mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-22 03:01:08 -05:00
Compare commits
31 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9697710569 | ||
|
|
68f44b8df4 | ||
|
|
9920882dc5 | ||
|
|
9ca5254c2b | ||
|
|
d7fddb2909 | ||
|
|
61c7afc19e | ||
|
|
3c470ab0f8 | ||
|
|
2b5e436a2a | ||
|
|
e24c824c9a | ||
|
|
dcf81372af | ||
|
|
ab48787422 | ||
|
|
91aa1f9a52 | ||
|
|
2979269ac3 | ||
|
|
cf28822a1c | ||
|
|
86ca984926 | ||
|
|
e3964624ac | ||
|
|
7c7c0fd955 | ||
|
|
e37b4a926d | ||
|
|
11f3a14c02 | ||
|
|
eab01e0272 | ||
|
|
bbcef7ce5c | ||
|
|
0ee52df5a7 | ||
|
|
6421b1a0ca | ||
|
|
61a5c98717 | ||
|
|
a0afb5d03e | ||
|
|
cdacb796a8 | ||
|
|
3ce54147e6 | ||
|
|
08690b2906 | ||
|
|
299cc26694 | ||
|
|
48715ff013 | ||
|
|
ad0d0ed1f1 |
@@ -454,6 +454,8 @@ Enables AI-assisted field generation.
|
||||
|
||||
## Tools Configuration
|
||||
|
||||
**Important:** `tools.config.tool` runs during serialization before variable resolution. Put `Number()` and other type coercions in `tools.config.params` instead, which runs at execution time after variables are resolved.
|
||||
|
||||
**Preferred:** Use tool names directly as dropdown option IDs to avoid switch cases:
|
||||
```typescript
|
||||
// Dropdown options use tool IDs directly
|
||||
|
||||
@@ -1,261 +0,0 @@
|
||||
---
|
||||
description: Add a knowledge base connector for syncing documents from an external source
|
||||
argument-hint: <service-name> [api-docs-url]
|
||||
---
|
||||
|
||||
# Add Connector Skill
|
||||
|
||||
You are an expert at adding knowledge base connectors to Sim. A connector syncs documents from an external source (Confluence, Google Drive, Notion, etc.) into a knowledge base.
|
||||
|
||||
## Your Task
|
||||
|
||||
When the user asks you to create a connector:
|
||||
1. Use Context7 or WebFetch to read the service's API documentation
|
||||
2. Create the connector directory and config
|
||||
3. Register it in the connector registry
|
||||
|
||||
## Directory Structure
|
||||
|
||||
Create files in `apps/sim/connectors/{service}/`:
|
||||
```
|
||||
connectors/{service}/
|
||||
├── index.ts # Barrel export
|
||||
└── {service}.ts # ConnectorConfig definition
|
||||
```
|
||||
|
||||
## ConnectorConfig Structure
|
||||
|
||||
```typescript
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { {Service}Icon } from '@/components/icons'
|
||||
import { fetchWithRetry } from '@/lib/knowledge/documents/utils'
|
||||
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
|
||||
|
||||
const logger = createLogger('{Service}Connector')
|
||||
|
||||
export const {service}Connector: ConnectorConfig = {
|
||||
id: '{service}',
|
||||
name: '{Service}',
|
||||
description: 'Sync documents from {Service} into your knowledge base',
|
||||
version: '1.0.0',
|
||||
icon: {Service}Icon,
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: '{service}', // Must match OAuthService in lib/oauth/types.ts
|
||||
requiredScopes: ['read:...'],
|
||||
},
|
||||
|
||||
configFields: [
|
||||
// Rendered dynamically by the add-connector modal UI
|
||||
// Supports 'short-input' and 'dropdown' types
|
||||
],
|
||||
|
||||
listDocuments: async (accessToken, sourceConfig, cursor) => {
|
||||
// Paginate via cursor, extract text, compute SHA-256 hash
|
||||
// Return { documents: ExternalDocument[], nextCursor?, hasMore }
|
||||
},
|
||||
|
||||
getDocument: async (accessToken, sourceConfig, externalId) => {
|
||||
// Return ExternalDocument or null
|
||||
},
|
||||
|
||||
validateConfig: async (accessToken, sourceConfig) => {
|
||||
// Return { valid: true } or { valid: false, error: 'message' }
|
||||
},
|
||||
|
||||
// Optional: map source metadata to semantic tag keys (translated to slots by sync engine)
|
||||
mapTags: (metadata) => {
|
||||
// Return Record<string, unknown> with keys matching tagDefinitions[].id
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## ConfigField Types
|
||||
|
||||
The add-connector modal renders these automatically — no custom UI needed.
|
||||
|
||||
```typescript
|
||||
// Text input
|
||||
{
|
||||
id: 'domain',
|
||||
title: 'Domain',
|
||||
type: 'short-input',
|
||||
placeholder: 'yoursite.example.com',
|
||||
required: true,
|
||||
}
|
||||
|
||||
// Dropdown (static options)
|
||||
{
|
||||
id: 'contentType',
|
||||
title: 'Content Type',
|
||||
type: 'dropdown',
|
||||
required: false,
|
||||
options: [
|
||||
{ label: 'Pages only', id: 'page' },
|
||||
{ label: 'Blog posts only', id: 'blogpost' },
|
||||
{ label: 'All content', id: 'all' },
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## ExternalDocument Shape
|
||||
|
||||
Every document returned from `listDocuments`/`getDocument` must include:
|
||||
|
||||
```typescript
|
||||
{
|
||||
externalId: string // Source-specific unique ID
|
||||
title: string // Document title
|
||||
content: string // Extracted plain text
|
||||
mimeType: 'text/plain' // Always text/plain (content is extracted)
|
||||
contentHash: string // SHA-256 of content (change detection)
|
||||
sourceUrl?: string // Link back to original (stored on document record)
|
||||
metadata?: Record<string, unknown> // Source-specific data (fed to mapTags)
|
||||
}
|
||||
```
|
||||
|
||||
## Content Hashing (Required)
|
||||
|
||||
The sync engine uses content hashes for change detection:
|
||||
|
||||
```typescript
|
||||
async function computeContentHash(content: string): Promise<string> {
|
||||
const data = new TextEncoder().encode(content)
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', data)
|
||||
return Array.from(new Uint8Array(hashBuffer)).map(b => b.toString(16).padStart(2, '0')).join('')
|
||||
}
|
||||
```
|
||||
|
||||
## tagDefinitions — Declared Tag Definitions
|
||||
|
||||
Declare which tags the connector populates using semantic IDs. Shown in the add-connector modal as opt-out checkboxes.
|
||||
On connector creation, slots are **dynamically assigned** via `getNextAvailableSlot` — connectors never hardcode slot names.
|
||||
|
||||
```typescript
|
||||
tagDefinitions: [
|
||||
{ id: 'labels', displayName: 'Labels', fieldType: 'text' },
|
||||
{ id: 'version', displayName: 'Version', fieldType: 'number' },
|
||||
{ id: 'lastModified', displayName: 'Last Modified', fieldType: 'date' },
|
||||
],
|
||||
```
|
||||
|
||||
Each entry has:
|
||||
- `id`: Semantic key matching a key returned by `mapTags` (e.g. `'labels'`, `'version'`)
|
||||
- `displayName`: Human-readable name shown in the UI (e.g. "Labels", "Last Modified")
|
||||
- `fieldType`: `'text'` | `'number'` | `'date'` | `'boolean'` — determines which slot pool to draw from
|
||||
|
||||
Users can opt out of specific tags in the modal. Disabled IDs are stored in `sourceConfig.disabledTagIds`.
|
||||
The assigned mapping (`semantic id → slot`) is stored in `sourceConfig.tagSlotMapping`.
|
||||
|
||||
## mapTags — Metadata to Semantic Keys
|
||||
|
||||
Maps source metadata to semantic tag keys. Required if `tagDefinitions` is set.
|
||||
The sync engine calls this automatically and translates semantic keys to actual DB slots
|
||||
using the `tagSlotMapping` stored on the connector.
|
||||
|
||||
Return keys must match the `id` values declared in `tagDefinitions`.
|
||||
|
||||
```typescript
|
||||
mapTags: (metadata: Record<string, unknown>): Record<string, unknown> => {
|
||||
const result: Record<string, unknown> = {}
|
||||
|
||||
// Validate arrays before casting — metadata may be malformed
|
||||
const labels = Array.isArray(metadata.labels) ? (metadata.labels as string[]) : []
|
||||
if (labels.length > 0) result.labels = labels.join(', ')
|
||||
|
||||
// Validate numbers — guard against NaN
|
||||
if (metadata.version != null) {
|
||||
const num = Number(metadata.version)
|
||||
if (!Number.isNaN(num)) result.version = num
|
||||
}
|
||||
|
||||
// Validate dates — guard against Invalid Date
|
||||
if (typeof metadata.lastModified === 'string') {
|
||||
const date = new Date(metadata.lastModified)
|
||||
if (!Number.isNaN(date.getTime())) result.lastModified = date
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
```
|
||||
|
||||
## External API Calls — Use `fetchWithRetry`
|
||||
|
||||
All external API calls must use `fetchWithRetry` from `@/lib/knowledge/documents/utils` instead of raw `fetch()`. This provides exponential backoff with retries on 429/502/503/504 errors. It returns a standard `Response` — all `.ok`, `.json()`, `.text()` checks work unchanged.
|
||||
|
||||
For `validateConfig` (user-facing, called on save), pass `VALIDATE_RETRY_OPTIONS` to cap wait time at ~7s. Background operations (`listDocuments`, `getDocument`) use the built-in defaults (5 retries, ~31s max).
|
||||
|
||||
```typescript
|
||||
import { VALIDATE_RETRY_OPTIONS, fetchWithRetry } from '@/lib/knowledge/documents/utils'
|
||||
|
||||
// Background sync — use defaults
|
||||
const response = await fetchWithRetry(url, {
|
||||
method: 'GET',
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
})
|
||||
|
||||
// validateConfig — tighter retry budget
|
||||
const response = await fetchWithRetry(url, { ... }, VALIDATE_RETRY_OPTIONS)
|
||||
```
|
||||
|
||||
## sourceUrl
|
||||
|
||||
If `ExternalDocument.sourceUrl` is set, the sync engine stores it on the document record. Always construct the full URL (not a relative path).
|
||||
|
||||
## Sync Engine Behavior (Do Not Modify)
|
||||
|
||||
The sync engine (`lib/knowledge/connectors/sync-engine.ts`) is connector-agnostic. It:
|
||||
1. Calls `listDocuments` with pagination until `hasMore` is false
|
||||
2. Compares `contentHash` to detect new/changed/unchanged documents
|
||||
3. Stores `sourceUrl` and calls `mapTags` on insert/update automatically
|
||||
4. Handles soft-delete of removed documents
|
||||
|
||||
You never need to modify the sync engine when adding a connector.
|
||||
|
||||
## OAuth Credential Reuse
|
||||
|
||||
Connectors reuse the existing OAuth infrastructure. The `oauth.provider` must match an `OAuthService` from `apps/sim/lib/oauth/types.ts`. Check existing providers before adding a new one.
|
||||
|
||||
## Icon
|
||||
|
||||
The `icon` field on `ConnectorConfig` is used throughout the UI — in the connector list, the add-connector modal, and as the document icon in the knowledge base table (replacing the generic file type icon for connector-sourced documents). The icon is read from `CONNECTOR_REGISTRY[connectorType].icon` at runtime — no separate icon map to maintain.
|
||||
|
||||
If the service already has an icon in `apps/sim/components/icons.tsx` (from a tool integration), reuse it. Otherwise, ask the user to provide the SVG.
|
||||
|
||||
## Registering
|
||||
|
||||
Add one line to `apps/sim/connectors/registry.ts`:
|
||||
|
||||
```typescript
|
||||
import { {service}Connector } from '@/connectors/{service}'
|
||||
|
||||
export const CONNECTOR_REGISTRY: ConnectorRegistry = {
|
||||
// ... existing connectors ...
|
||||
{service}: {service}Connector,
|
||||
}
|
||||
```
|
||||
|
||||
## Reference Implementation
|
||||
|
||||
See `apps/sim/connectors/confluence/confluence.ts` for a complete example with:
|
||||
- Multiple config field types (text + dropdown)
|
||||
- Label fetching and CQL search filtering
|
||||
- Blogpost + page content types
|
||||
- `mapTags` mapping labels, version, and dates to semantic keys
|
||||
|
||||
## Checklist
|
||||
|
||||
- [ ] Created `connectors/{service}/{service}.ts` with full ConnectorConfig
|
||||
- [ ] Created `connectors/{service}/index.ts` barrel export
|
||||
- [ ] `oauth.provider` matches an existing OAuthService in `lib/oauth/types.ts`
|
||||
- [ ] `listDocuments` handles pagination and computes content hashes
|
||||
- [ ] `sourceUrl` set on each ExternalDocument (full URL, not relative)
|
||||
- [ ] `metadata` includes source-specific data for tag mapping
|
||||
- [ ] `tagDefinitions` declared for each semantic key returned by `mapTags`
|
||||
- [ ] `mapTags` implemented if source has useful metadata (labels, dates, versions)
|
||||
- [ ] `validateConfig` verifies the source is accessible
|
||||
- [ ] All external API calls use `fetchWithRetry` (not raw `fetch`)
|
||||
- [ ] All optional config fields validated in `validateConfig`
|
||||
- [ ] Icon exists in `components/icons.tsx` (or asked user to provide SVG)
|
||||
- [ ] Registered in `connectors/registry.ts`
|
||||
@@ -238,7 +238,7 @@ export const ServiceBlock: BlockConfig = {
|
||||
bgColor: '#hexcolor',
|
||||
icon: ServiceIcon,
|
||||
subBlocks: [ /* see SubBlock Properties */ ],
|
||||
tools: { access: ['service_action'], config: { tool: (p) => `service_${p.operation}` } },
|
||||
tools: { access: ['service_action'], config: { tool: (p) => `service_${p.operation}`, params: (p) => ({ /* type coercions here */ }) } },
|
||||
inputs: { /* ... */ },
|
||||
outputs: { /* ... */ },
|
||||
}
|
||||
@@ -246,6 +246,8 @@ export const ServiceBlock: BlockConfig = {
|
||||
|
||||
Register in `blocks/registry.ts` (alphabetically).
|
||||
|
||||
**Important:** `tools.config.tool` runs during serialization (before variable resolution). Never do `Number()` or other type coercions there — dynamic references like `<Block.output>` will be destroyed. Use `tools.config.params` for type coercions (it runs during execution, after variables are resolved).
|
||||
|
||||
**SubBlock Properties:**
|
||||
```typescript
|
||||
{
|
||||
|
||||
@@ -59,12 +59,6 @@ body {
|
||||
--content-gap: 1.75rem;
|
||||
}
|
||||
|
||||
/* Remove custom layout variable overrides to fallback to fumadocs defaults */
|
||||
|
||||
/* ============================================
|
||||
Navbar Light Mode Styling
|
||||
============================================ */
|
||||
|
||||
/* Light mode navbar and search styling */
|
||||
:root:not(.dark) nav {
|
||||
background-color: hsla(0, 0%, 96%, 0.85) !important;
|
||||
@@ -88,10 +82,6 @@ body {
|
||||
-webkit-backdrop-filter: blur(25px) saturate(180%) brightness(0.6) !important;
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
Custom Sidebar Styling (Turborepo-inspired)
|
||||
============================================ */
|
||||
|
||||
/* Floating sidebar appearance - remove background */
|
||||
[data-sidebar-container],
|
||||
#nd-sidebar {
|
||||
@@ -468,10 +458,6 @@ aside[data-sidebar],
|
||||
writing-mode: horizontal-tb !important;
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
Code Block Styling (Improved)
|
||||
============================================ */
|
||||
|
||||
/* Apply Geist Mono to code elements */
|
||||
code,
|
||||
pre,
|
||||
@@ -532,10 +518,6 @@ pre code .line {
|
||||
color: var(--color-fd-primary);
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
TOC (Table of Contents) Styling
|
||||
============================================ */
|
||||
|
||||
/* Remove the thin border-left on nested TOC items (keeps main indicator only) */
|
||||
#nd-toc a[style*="padding-inline-start"] {
|
||||
border-left: none !important;
|
||||
@@ -554,10 +536,6 @@ main article,
|
||||
padding-bottom: 4rem;
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
Center and Constrain Main Content Width
|
||||
============================================ */
|
||||
|
||||
/* Main content area - center and constrain like turborepo/raindrop */
|
||||
/* Note: --sidebar-offset and --toc-offset are now applied at #nd-docs-layout level */
|
||||
main[data-main] {
|
||||
|
||||
@@ -1157,6 +1157,17 @@ export function AirweaveIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function AlgoliaIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 50 50'>
|
||||
<path
|
||||
fill='#FFFFFF'
|
||||
d='M25,0C11.3,0,0.2,11,0,24.6C-0.2,38.4,11,49.9,24.8,50c4.3,0,8.4-1,12-3c0.4-0.2,0.4-0.7,0.1-1l-2.3-2.1 c-0.5-0.4-1.2-0.5-1.7-0.3c-2.5,1.1-5.3,1.6-8.2,1.6c-11.2-0.1-20.2-9.4-20-20.6C4.9,13.6,13.9,4.7,25,4.7h20.3v36L33.7,30.5 c-0.4-0.3-0.9-0.3-1.2,0.1c-1.8,2.4-4.9,4-8.2,3.7c-4.6-0.3-8.4-4-8.7-8.7c-0.4-5.5,4-10.2,9.4-10.2c4.9,0,9,3.8,9.4,8.6 c0,0.4,0.2,0.8,0.6,1.1l3,2.7c0.3,0.3,0.9,0.1,1-0.3c0.2-1.2,0.3-2.4,0.2-3.6c-0.5-7-6.2-12.7-13.2-13.1c-8.1-0.5-14.8,5.8-15,13.7 c-0.2,7.7,6.1,14.4,13.8,14.5c3.2,0.1,6.2-0.9,8.6-2.7l15,13.3c0.6,0.6,1.7,0.1,1.7-0.7v-48C50,0.4,49.5,0,49,0L25,0 C25,0,25,0,25,0z'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function GoogleBooksIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 478.633 540.068'>
|
||||
@@ -4407,6 +4418,161 @@ export function DatadogIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function MicrosoftDataverseIcon(props: SVGProps<SVGSVGElement>) {
|
||||
const id = useId()
|
||||
const clip0 = `dataverse_clip0_${id}`
|
||||
const clip1 = `dataverse_clip1_${id}`
|
||||
const clip2 = `dataverse_clip2_${id}`
|
||||
const paint0 = `dataverse_paint0_${id}`
|
||||
const paint1 = `dataverse_paint1_${id}`
|
||||
const paint2 = `dataverse_paint2_${id}`
|
||||
const paint3 = `dataverse_paint3_${id}`
|
||||
const paint4 = `dataverse_paint4_${id}`
|
||||
const paint5 = `dataverse_paint5_${id}`
|
||||
const paint6 = `dataverse_paint6_${id}`
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
width='96'
|
||||
height='96'
|
||||
viewBox='0 0 96 96'
|
||||
fill='none'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
>
|
||||
<g clipPath={`url(#${clip0})`}>
|
||||
<g clipPath={`url(#${clip1})`}>
|
||||
<g clipPath={`url(#${clip2})`}>
|
||||
<path
|
||||
d='M13.8776 21.8242C29.1033 8.13791 49.7501 8.1861 62.955 18.9134C74.9816 28.6836 77.4697 44.3159 70.851 55.7801C64.2321 67.2443 52.5277 70.1455 39.5011 62.6247L31.7286 76.087L31.7234 76.0862C27.4181 83.5324 17.8937 86.0828 10.4437 81.7817C7.45394 80.0556 5.25322 77.4879 3.96665 74.551L3.96096 74.5511C-4.07832 55.7804 0.200745 34.1184 13.8776 21.8242Z'
|
||||
fill={`url(#${paint0})`}
|
||||
/>
|
||||
<path
|
||||
d='M13.8776 21.8242C29.1033 8.13791 49.7501 8.1861 62.955 18.9134C74.9816 28.6836 77.4697 44.3159 70.851 55.7801C64.2321 67.2443 52.5277 70.1455 39.5011 62.6247L31.7286 76.087L31.7234 76.0862C27.4181 83.5324 17.8937 86.0828 10.4437 81.7817C7.45394 80.0556 5.25322 77.4879 3.96665 74.551L3.96096 74.5511C-4.07832 55.7804 0.200745 34.1184 13.8776 21.8242Z'
|
||||
fill={`url(#${paint1})`}
|
||||
fillOpacity='0.8'
|
||||
/>
|
||||
<path
|
||||
d='M85.4327 14.2231C88.4528 15.9668 90.6686 18.569 91.9494 21.5433L91.9533 21.5444C99.9406 40.2943 95.6533 61.9068 81.9983 74.1814C66.7726 87.8677 46.1257 87.8196 32.9209 77.0923C20.8945 67.3221 18.4062 51.6897 25.0249 40.2256C31.6438 28.7614 43.3482 25.8601 56.3748 33.381L64.1434 19.9255L64.1482 19.9249C68.4516 12.4736 77.9805 9.92084 85.4327 14.2231Z'
|
||||
fill={`url(#${paint2})`}
|
||||
/>
|
||||
<path
|
||||
d='M85.4327 14.2231C88.4528 15.9668 90.6686 18.569 91.9494 21.5433L91.9533 21.5444C99.9406 40.2943 95.6533 61.9068 81.9983 74.1814C66.7726 87.8677 46.1257 87.8196 32.9209 77.0923C20.8945 67.3221 18.4062 51.6897 25.0249 40.2256C31.6438 28.7614 43.3482 25.8601 56.3748 33.381L64.1434 19.9255L64.1482 19.9249C68.4516 12.4736 77.9805 9.92084 85.4327 14.2231Z'
|
||||
fill={`url(#${paint3})`}
|
||||
fillOpacity='0.9'
|
||||
/>
|
||||
<path
|
||||
d='M39.5041 62.6261C52.5307 70.1469 64.2352 67.2456 70.8541 55.7814C77.2488 44.7055 75.1426 29.7389 64.147 19.9271L56.3791 33.3814L39.5041 62.6261Z'
|
||||
fill={`url(#${paint4})`}
|
||||
/>
|
||||
<path
|
||||
d='M56.3794 33.3815C43.3528 25.8607 31.6482 28.762 25.0294 40.2262C18.6347 51.3021 20.7409 66.2687 31.7364 76.0806L39.5043 62.6262L56.3794 33.3815Z'
|
||||
fill={`url(#${paint5})`}
|
||||
/>
|
||||
<path
|
||||
d='M33.3215 56.4453C37.9837 64.5204 48.3094 67.2872 56.3846 62.625C64.4598 57.9628 67.2266 47.6371 62.5643 39.5619C57.9021 31.4867 47.5764 28.72 39.5013 33.3822C31.4261 38.0444 28.6593 48.3701 33.3215 56.4453Z'
|
||||
fill={`url(#${paint6})`}
|
||||
/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<defs>
|
||||
<radialGradient
|
||||
id={paint0}
|
||||
cx='0'
|
||||
cy='0'
|
||||
r='1'
|
||||
gradientUnits='userSpaceOnUse'
|
||||
gradientTransform='translate(46.0001 49.4996) rotate(-148.717) scale(46.2195 47.5359)'
|
||||
>
|
||||
<stop offset='0.465088' stopColor='#09442A' />
|
||||
<stop offset='0.70088' stopColor='#136C6C' />
|
||||
<stop offset='1' stopColor='#22918B' />
|
||||
</radialGradient>
|
||||
<radialGradient
|
||||
id={paint1}
|
||||
cx='0'
|
||||
cy='0'
|
||||
r='1'
|
||||
gradientUnits='userSpaceOnUse'
|
||||
gradientTransform='translate(50.0001 32.4996) rotate(123.57) scale(66.0095 46.5498)'
|
||||
>
|
||||
<stop offset='0.718705' stopColor='#1A7F7C' stopOpacity='0' />
|
||||
<stop offset='1' stopColor='#16BBDA' />
|
||||
</radialGradient>
|
||||
<radialGradient
|
||||
id={paint2}
|
||||
cx='0'
|
||||
cy='0'
|
||||
r='1'
|
||||
gradientUnits='userSpaceOnUse'
|
||||
gradientTransform='translate(50.4999 44.5001) rotate(30.75) scale(45.9618 44.5095)'
|
||||
>
|
||||
<stop offset='0.358097' stopColor='#136C6C' />
|
||||
<stop offset='0.789474' stopColor='#42B870' />
|
||||
<stop offset='1' stopColor='#76D45E' />
|
||||
</radialGradient>
|
||||
<radialGradient
|
||||
id={paint3}
|
||||
cx='0'
|
||||
cy='0'
|
||||
r='1'
|
||||
gradientTransform='matrix(42.5 -36.0002 31.1824 36.8127 49.4998 55.5001)'
|
||||
gradientUnits='userSpaceOnUse'
|
||||
>
|
||||
<stop offset='0.583166' stopColor='#76D45E' stopOpacity='0' />
|
||||
<stop offset='1' stopColor='#C8F5B7' />
|
||||
</radialGradient>
|
||||
<radialGradient
|
||||
id={paint4}
|
||||
cx='0'
|
||||
cy='0'
|
||||
r='1'
|
||||
gradientUnits='userSpaceOnUse'
|
||||
gradientTransform='translate(47.5 48) rotate(-58.9042) scale(32.6898)'
|
||||
>
|
||||
<stop offset='0.486266' stopColor='#22918B' />
|
||||
<stop offset='0.729599' stopColor='#42B870' />
|
||||
<stop offset='1' stopColor='#43E5CA' />
|
||||
</radialGradient>
|
||||
<radialGradient
|
||||
id={paint5}
|
||||
cx='0'
|
||||
cy='0'
|
||||
r='1'
|
||||
gradientUnits='userSpaceOnUse'
|
||||
gradientTransform='translate(47.3833 49.0077) rotate(119.859) scale(31.1328 29.4032)'
|
||||
>
|
||||
<stop offset='0.459553' stopColor='#08494E' />
|
||||
<stop offset='0.742242' stopColor='#1A7F7C' />
|
||||
<stop offset='1' stopColor='#309C61' />
|
||||
</radialGradient>
|
||||
<radialGradient
|
||||
id={paint6}
|
||||
cx='0'
|
||||
cy='0'
|
||||
r='1'
|
||||
gradientUnits='userSpaceOnUse'
|
||||
gradientTransform='translate(52.5 40) rotate(120.784) scale(27.3542)'
|
||||
>
|
||||
<stop stopColor='#C8F5B7' />
|
||||
<stop offset='0.24583' stopColor='#98F0B0' />
|
||||
<stop offset='0.643961' stopColor='#52D17C' />
|
||||
<stop offset='1' stopColor='#119FC5' />
|
||||
</radialGradient>
|
||||
<clipPath id={clip0}>
|
||||
<rect width='96' height='96' fill='white' />
|
||||
</clipPath>
|
||||
<clipPath id={clip1}>
|
||||
<rect width='96' height='96' fill='white' />
|
||||
</clipPath>
|
||||
<clipPath id={clip2}>
|
||||
<rect width='95.9998' height='96' fill='white' />
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function KalshiIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 78 20' fill='currentColor' xmlns='http://www.w3.org/2000/svg'>
|
||||
@@ -4809,6 +4975,26 @@ export function BedrockIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function TableIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
viewBox='0 0 24 24'
|
||||
fill='none'
|
||||
stroke='currentColor'
|
||||
strokeWidth={2}
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
{...props}
|
||||
>
|
||||
<rect width='18' height='18' x='3' y='3' rx='2' />
|
||||
<path d='M3 9h18' />
|
||||
<path d='M3 15h18' />
|
||||
<path d='M9 3v18' />
|
||||
<path d='M15 3v18' />
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
export function ReductoIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
@@ -5532,3 +5718,104 @@ export function OnePasswordIcon(props: SVGProps<SVGSVGElement>) {
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function VercelIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
viewBox='0 0 256 222'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
preserveAspectRatio='xMidYMid'
|
||||
>
|
||||
<g transform='translate(19.2 16.63) scale(0.85)'>
|
||||
<polygon fill='#fafafa' points='128 0 256 221.705007 0 221.705007' />
|
||||
</g>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function CloudflareIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 512 512'>
|
||||
<path
|
||||
fill='#f38020'
|
||||
d='M331 326c11-26-4-38-19-38l-148-2c-4 0-4-6 1-7l150-2c17-1 37-15 43-33 0 0 10-21 9-24a97 97 0 0 0-187-11c-38-25-78 9-69 46-48 3-65 46-60 72 0 1 1 2 3 2h274c1 0 3-1 3-3z'
|
||||
/>
|
||||
<path
|
||||
fill='#faae40'
|
||||
d='M381 224c-4 0-6-1-7 1l-5 21c-5 16 3 30 20 31l32 2c4 0 4 6-1 7l-33 1c-36 4-46 39-46 39 0 2 0 3 2 3h113l3-2a81 81 0 0 0-78-103'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function UpstashIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 256 341' width='24' height='24'>
|
||||
<path
|
||||
fill='#00C98D'
|
||||
d='M0 298.417c56.554 56.553 148.247 56.553 204.801 0c56.554-56.554 56.554-148.247 0-204.801l-25.6 25.6c42.415 42.416 42.415 111.185 0 153.6c-42.416 42.416-111.185 42.416-153.601 0z'
|
||||
/>
|
||||
<path
|
||||
fill='#00C98D'
|
||||
d='M51.2 247.216c28.277 28.277 74.123 28.277 102.4 0c28.277-28.276 28.277-74.123 0-102.4l-25.6 25.6c14.14 14.138 14.14 37.061 0 51.2c-14.138 14.139-37.061 14.139-51.2 0zM256 42.415c-56.554-56.553-148.247-56.553-204.8 0c-56.555 56.555-56.555 148.247 0 204.801l25.599-25.6c-42.415-42.415-42.415-111.185 0-153.6c42.416-42.416 111.185-42.416 153.6 0z'
|
||||
/>
|
||||
<path
|
||||
fill='#00C98D'
|
||||
d='M204.8 93.616c-28.276-28.277-74.124-28.277-102.4 0c-28.278 28.277-28.278 74.123 0 102.4l25.6-25.6c-14.14-14.138-14.14-37.061 0-51.2c14.138-14.139 37.06-14.139 51.2 0z'
|
||||
/>
|
||||
<path
|
||||
fill='#FFF'
|
||||
fillOpacity='.4'
|
||||
d='M256 42.415c-56.554-56.553-148.247-56.553-204.8 0c-56.555 56.555-56.555 148.247 0 204.801l25.599-25.6c-42.415-42.415-42.415-111.185 0-153.6c42.416-42.416 111.185-42.416 153.6 0z'
|
||||
/>
|
||||
<path
|
||||
fill='#FFF'
|
||||
fillOpacity='.4'
|
||||
d='M204.8 93.616c-28.276-28.277-74.124-28.277-102.4 0c-28.278 28.277-28.278 74.123 0 102.4l25.6-25.6c-14.14-14.138-14.14-37.061 0-51.2c14.138-14.139 37.06-14.139 51.2 0z'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function RevenueCatIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
width='512'
|
||||
height='512'
|
||||
viewBox='0 0 512 512'
|
||||
fill='none'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
>
|
||||
<path
|
||||
d='M95 109.774C110.152 106.108 133.612 104 154.795 104C212.046 104 246.32 123.928 246.32 174.646C246.32 205.746 233.737 226.264 214.005 237.437L261.765 318.946C258.05 321.632 250.035 323.176 238.864 323.176C226.282 323.176 217.987 321.672 211.982 318.946L172.225 248.3H167.645C157.789 248.305 147.945 247.601 138.18 246.192V319.255C134.172 321.672 127.022 323.176 116.73 323.176C106.73 323.176 99.2874 321.659 95 319.255V109.774ZM137.643 207.848C145.772 209.263 153.997 209.968 162.235 209.956C187.12 209.956 202.285 200.556 202.285 177.057C202.285 152.886 186.268 142.949 157.668 142.949C150.956 142.918 144.255 143.515 137.643 144.735V207.848Z'
|
||||
fill='#FFFFFF'
|
||||
/>
|
||||
<path
|
||||
d='M428.529 329.244C428.529 365.526 410.145 375.494 396.306 382.195C360.972 399.32 304.368 379.4 244.206 373.338C189.732 366.214 135.706 361.522 127.309 373.738C124.152 376.832 123.481 386.798 127.309 390.862C138.604 402.85 168.061 394.493 188.919 390.714C195.391 389.694 201.933 392.099 206.079 397.021C210.226 401.944 211.349 408.637 209.024 414.58C206.699 420.522 201.28 424.811 194.809 425.831C185.379 427.264 175.85 427.989 166.306 428C145.988 428 120.442 424.495 105.943 409.072C98.7232 401.4 91.3266 387.78 97.0271 366.465C107.875 326.074 172.807 336.052 248.033 343.633C300.41 348.907 357.23 366.465 379.934 350.343C385.721 346.234 396.517 337.022 390.698 329.244C384.879 321.467 375.353 325.684 362.838 325.684C300.152 325.684 263.238 285.302 263.238 217.916C263.247 167.292 284.176 131.892 318.287 115.09C333.109 107.789 350.421 104 369.587 104C386.292 104 403.269 106.931 414.11 113.366C420.847 123.032 423.778 140.305 422.306 153.201C408.247 146.466 395.36 142.949 378.669 142.949C337.365 142.949 308.947 164.039 308.947 214.985C308.947 265.932 337.065 286.149 376.611 286.149C387.869 286.035 403.1 284.67 422.306 282.053C426.455 297.498 428.529 313.228 428.529 329.244Z'
|
||||
fill='#FFFFFF'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function RedisIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
viewBox='0 0 512 512'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
fillRule='evenodd'
|
||||
clipRule='evenodd'
|
||||
strokeLinejoin='round'
|
||||
strokeMiterlimit='2'
|
||||
>
|
||||
<path
|
||||
d='M479.14 279.864c-34.584 43.578-71.94 93.385-146.645 93.385-66.73 0-91.59-58.858-93.337-106.672 14.62 30.915 43.203 55.949 87.804 54.792C412.737 318.6 471.53 241.127 471.53 170.57c0-84.388-62.947-145.262-172.24-145.262-78.165 0-175.004 29.743-238.646 76.782-.689 48.42 26.286 111.369 35.972 104.452 55.17-39.67 98.918-65.203 141.35-78.01C175.153 198.58 24.451 361.219 6 389.85c2.076 26.286 34.588 96.842 50.496 96.842 4.841 0 8.993-2.768 13.835-7.61 45.433-51.046 82.472-96.816 115.412-140.933 4.627 64.658 36.42 143.702 125.307 143.702 79.55 0 158.408-57.414 194.377-186.767 4.149-15.911-15.22-28.362-26.286-15.22zm-90.616-104.449c0 40.81-40.118 60.87-76.782 60.87-19.596 0-34.648-5.145-46.554-11.832 21.906-33.168 43.59-67.182 66.887-103.593 41.08 6.953 56.449 29.788 56.449 54.555z'
|
||||
fill='#FFFFFF'
|
||||
fillRule='nonzero'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
AhrefsIcon,
|
||||
AirtableIcon,
|
||||
AirweaveIcon,
|
||||
AlgoliaIcon,
|
||||
ApifyIcon,
|
||||
ApolloIcon,
|
||||
ArxivIcon,
|
||||
@@ -19,6 +20,7 @@ import {
|
||||
CirclebackIcon,
|
||||
ClayIcon,
|
||||
ClerkIcon,
|
||||
CloudflareIcon,
|
||||
ConfluenceIcon,
|
||||
CursorIcon,
|
||||
DatadogIcon,
|
||||
@@ -71,6 +73,7 @@ import {
|
||||
MailgunIcon,
|
||||
MailServerIcon,
|
||||
Mem0Icon,
|
||||
MicrosoftDataverseIcon,
|
||||
MicrosoftExcelIcon,
|
||||
MicrosoftOneDriveIcon,
|
||||
MicrosoftPlannerIcon,
|
||||
@@ -96,8 +99,10 @@ import {
|
||||
QdrantIcon,
|
||||
RDSIcon,
|
||||
RedditIcon,
|
||||
RedisIcon,
|
||||
ReductoIcon,
|
||||
ResendIcon,
|
||||
RevenueCatIcon,
|
||||
S3Icon,
|
||||
SalesforceIcon,
|
||||
SearchIcon,
|
||||
@@ -125,6 +130,8 @@ import {
|
||||
TTSIcon,
|
||||
TwilioIcon,
|
||||
TypeformIcon,
|
||||
UpstashIcon,
|
||||
VercelIcon,
|
||||
VideoIcon,
|
||||
WealthboxIcon,
|
||||
WebflowIcon,
|
||||
@@ -145,6 +152,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
ahrefs: AhrefsIcon,
|
||||
airtable: AirtableIcon,
|
||||
airweave: AirweaveIcon,
|
||||
algolia: AlgoliaIcon,
|
||||
apify: ApifyIcon,
|
||||
apollo: ApolloIcon,
|
||||
arxiv: ArxivIcon,
|
||||
@@ -155,6 +163,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
circleback: CirclebackIcon,
|
||||
clay: ClayIcon,
|
||||
clerk: ClerkIcon,
|
||||
cloudflare: CloudflareIcon,
|
||||
confluence_v2: ConfluenceIcon,
|
||||
cursor_v2: CursorIcon,
|
||||
datadog: DatadogIcon,
|
||||
@@ -208,6 +217,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
mailgun: MailgunIcon,
|
||||
mem0: Mem0Icon,
|
||||
memory: BrainIcon,
|
||||
microsoft_dataverse: MicrosoftDataverseIcon,
|
||||
microsoft_excel_v2: MicrosoftExcelIcon,
|
||||
microsoft_planner: MicrosoftPlannerIcon,
|
||||
microsoft_teams: MicrosoftTeamsIcon,
|
||||
@@ -231,8 +241,10 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
qdrant: QdrantIcon,
|
||||
rds: RDSIcon,
|
||||
reddit: RedditIcon,
|
||||
redis: RedisIcon,
|
||||
reducto_v2: ReductoIcon,
|
||||
resend: ResendIcon,
|
||||
revenuecat: RevenueCatIcon,
|
||||
s3: S3Icon,
|
||||
salesforce: SalesforceIcon,
|
||||
search: SearchIcon,
|
||||
@@ -262,6 +274,8 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
twilio_sms: TwilioIcon,
|
||||
twilio_voice: TwilioIcon,
|
||||
typeform: TypeformIcon,
|
||||
upstash: UpstashIcon,
|
||||
vercel: VercelIcon,
|
||||
video_generator_v2: VideoIcon,
|
||||
vision_v2: EyeIcon,
|
||||
wealthbox: WealthboxIcon,
|
||||
|
||||
@@ -130,37 +130,4 @@ Update multiple existing records in an Airtable table
|
||||
| `records` | json | Array of updated Airtable records |
|
||||
| `metadata` | json | Operation metadata including record count and updated record IDs |
|
||||
|
||||
### `airtable_list_bases`
|
||||
|
||||
List all bases the authenticated user has access to
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `bases` | json | Array of Airtable bases with id, name, and permissionLevel |
|
||||
| `metadata` | json | Operation metadata including total bases count |
|
||||
|
||||
### `airtable_get_base_schema`
|
||||
|
||||
Get the schema of all tables, fields, and views in an Airtable base
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `baseId` | string | Yes | Airtable base ID \(starts with "app", e.g., "appXXXXXXXXXXXXXX"\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `tables` | json | Array of table schemas with fields and views |
|
||||
| `metadata` | json | Operation metadata including total tables count |
|
||||
|
||||
|
||||
|
||||
404
apps/docs/content/docs/en/tools/algolia.mdx
Normal file
404
apps/docs/content/docs/en/tools/algolia.mdx
Normal file
@@ -0,0 +1,404 @@
|
||||
---
|
||||
title: Algolia
|
||||
description: Search and manage Algolia indices
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="algolia"
|
||||
color="#003DFF"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Algolia](https://www.algolia.com/) is a powerful hosted search platform that enables developers and teams to deliver fast, relevant search experiences in their apps and websites. Algolia provides full-text, faceted, and filtered search as well as analytics and advanced ranking capabilities.
|
||||
|
||||
With Algolia, you can:
|
||||
|
||||
- **Deliver lightning-fast search**: Provide instant search results as users type, with typo tolerance and synonyms
|
||||
- **Manage and update records**: Easily add, update, or delete objects/records in your indices
|
||||
- **Perform advanced filtering**: Use filters, facets, and custom ranking to refine and organize search results
|
||||
- **Configure index settings**: Adjust relevance, ranking, attributes for search, and more to optimize user experience
|
||||
- **Scale confidently**: Algolia handles massive traffic and data volumes with globally distributed infrastructure
|
||||
- **Gain insights**: Track analytics, search patterns, and user engagement
|
||||
|
||||
In Sim, the Algolia integration allows your agents to search, manage, and configure Algolia indices directly within your workflows. Use Algolia to power dynamic data exploration, automate record updates, run batch operations, and more—all from a single tool in your workspace.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Algolia into your workflow. Search indices, manage records (add, update, delete, browse), configure index settings, and perform batch operations.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `algolia_search`
|
||||
|
||||
Search an Algolia index
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia API Key |
|
||||
| `indexName` | string | Yes | Name of the Algolia index to search |
|
||||
| `query` | string | Yes | Search query text |
|
||||
| `hitsPerPage` | number | No | Number of hits per page \(default: 20\) |
|
||||
| `page` | number | No | Page number to retrieve \(default: 0\) |
|
||||
| `filters` | string | No | Filter string \(e.g., "category:electronics AND price < 100"\) |
|
||||
| `attributesToRetrieve` | string | No | Comma-separated list of attributes to retrieve |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `hits` | array | Array of matching records |
|
||||
| ↳ `objectID` | string | Unique identifier of the record |
|
||||
| ↳ `_highlightResult` | object | Highlighted attributes matching the query. Each attribute has value, matchLevel \(none, partial, full\), and matchedWords |
|
||||
| ↳ `_snippetResult` | object | Snippeted attributes matching the query. Each attribute has value and matchLevel |
|
||||
| ↳ `_rankingInfo` | object | Ranking information for the hit. Only present when getRankingInfo is enabled |
|
||||
| ↳ `nbTypos` | number | Number of typos in the query match |
|
||||
| ↳ `firstMatchedWord` | number | Position of the first matched word |
|
||||
| ↳ `geoDistance` | number | Distance in meters for geo-search results |
|
||||
| ↳ `nbExactWords` | number | Number of exactly matched words |
|
||||
| ↳ `userScore` | number | Custom ranking score |
|
||||
| ↳ `words` | number | Number of matched words |
|
||||
| `nbHits` | number | Total number of matching hits |
|
||||
| `page` | number | Current page number \(zero-based\) |
|
||||
| `nbPages` | number | Total number of pages available |
|
||||
| `hitsPerPage` | number | Number of hits per page \(1-1000, default 20\) |
|
||||
| `processingTimeMS` | number | Server-side processing time in milliseconds |
|
||||
| `query` | string | The search query that was executed |
|
||||
| `parsedQuery` | string | The query string after normalization and stop word removal |
|
||||
| `facets` | object | Facet counts keyed by facet name, each containing value-count pairs |
|
||||
| `facets_stats` | object | Statistics \(min, max, avg, sum\) for numeric facets |
|
||||
| `exhaustive` | object | Exhaustiveness flags for facetsCount, facetValues, nbHits, rulesMatch, and typo |
|
||||
|
||||
### `algolia_add_record`
|
||||
|
||||
Add or replace a record in an Algolia index
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia Admin API Key |
|
||||
| `indexName` | string | Yes | Name of the Algolia index |
|
||||
| `objectID` | string | No | Object ID for the record \(auto-generated if not provided\) |
|
||||
| `record` | json | Yes | JSON object representing the record to add |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskID` | number | Algolia task ID for tracking the indexing operation |
|
||||
| `objectID` | string | The object ID of the added or replaced record |
|
||||
| `createdAt` | string | Timestamp when the record was created \(only present when objectID is auto-generated\) |
|
||||
| `updatedAt` | string | Timestamp when the record was updated \(only present when replacing an existing record\) |
|
||||
|
||||
### `algolia_get_record`
|
||||
|
||||
Get a record by objectID from an Algolia index
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia API Key |
|
||||
| `indexName` | string | Yes | Name of the Algolia index |
|
||||
| `objectID` | string | Yes | The objectID of the record to retrieve |
|
||||
| `attributesToRetrieve` | string | No | Comma-separated list of attributes to retrieve |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `objectID` | string | The objectID of the retrieved record |
|
||||
| `record` | object | The record data \(all attributes\) |
|
||||
|
||||
### `algolia_get_records`
|
||||
|
||||
Retrieve multiple records by objectID from one or more Algolia indices
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia API Key |
|
||||
| `indexName` | string | Yes | Default index name for all requests |
|
||||
| `requests` | json | Yes | Array of objects specifying records to retrieve. Each must have "objectID" and optionally "indexName" and "attributesToRetrieve". |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `results` | array | Array of retrieved records \(null entries for records not found\) |
|
||||
| ↳ `objectID` | string | Unique identifier of the record |
|
||||
|
||||
### `algolia_partial_update_record`
|
||||
|
||||
Partially update a record in an Algolia index without replacing it entirely
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia Admin API Key |
|
||||
| `indexName` | string | Yes | Name of the Algolia index |
|
||||
| `objectID` | string | Yes | The objectID of the record to update |
|
||||
| `attributes` | json | Yes | JSON object with attributes to update. Supports built-in operations like \{"stock": \{"_operation": "Decrement", "value": 1\}\} |
|
||||
| `createIfNotExists` | boolean | No | Whether to create the record if it does not exist \(default: true\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskID` | number | Algolia task ID for tracking the update operation |
|
||||
| `objectID` | string | The objectID of the updated record |
|
||||
| `updatedAt` | string | Timestamp when the record was updated |
|
||||
|
||||
### `algolia_delete_record`
|
||||
|
||||
Delete a record by objectID from an Algolia index
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia Admin API Key |
|
||||
| `indexName` | string | Yes | Name of the Algolia index |
|
||||
| `objectID` | string | Yes | The objectID of the record to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskID` | number | Algolia task ID for tracking the deletion |
|
||||
| `deletedAt` | string | Timestamp when the record was deleted |
|
||||
|
||||
### `algolia_browse_records`
|
||||
|
||||
Browse and iterate over all records in an Algolia index using cursor pagination
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia API Key \(must have browse ACL\) |
|
||||
| `indexName` | string | Yes | Name of the Algolia index to browse |
|
||||
| `query` | string | No | Search query to filter browsed records |
|
||||
| `filters` | string | No | Filter string to narrow down results |
|
||||
| `attributesToRetrieve` | string | No | Comma-separated list of attributes to retrieve |
|
||||
| `hitsPerPage` | number | No | Number of hits per page \(default: 1000, max: 1000\) |
|
||||
| `cursor` | string | No | Cursor from a previous browse response for pagination |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `hits` | array | Array of records from the index \(up to 1000 per request\) |
|
||||
| ↳ `objectID` | string | Unique identifier of the record |
|
||||
| `cursor` | string | Opaque cursor string for retrieving the next page of results. Absent when no more results exist. |
|
||||
| `nbHits` | number | Total number of records matching the browse criteria |
|
||||
| `page` | number | Current page number \(zero-based\) |
|
||||
| `nbPages` | number | Total number of pages available |
|
||||
| `hitsPerPage` | number | Number of hits per page \(1-1000, default 1000 for browse\) |
|
||||
| `processingTimeMS` | number | Server-side processing time in milliseconds |
|
||||
|
||||
### `algolia_batch_operations`
|
||||
|
||||
Perform batch add, update, partial update, or delete operations on records in an Algolia index
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia Admin API Key |
|
||||
| `indexName` | string | Yes | Name of the Algolia index |
|
||||
| `requests` | json | Yes | Array of batch operations. Each item has "action" \(addObject, updateObject, partialUpdateObject, partialUpdateObjectNoCreate, deleteObject\) and "body" \(the record data, must include objectID for update/delete\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskID` | number | Algolia task ID for tracking the batch operation |
|
||||
| `objectIDs` | array | Array of object IDs affected by the batch operation |
|
||||
|
||||
### `algolia_list_indices`
|
||||
|
||||
List all indices in an Algolia application
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia API Key |
|
||||
| `page` | number | No | Page number for paginating indices \(default: not paginated\) |
|
||||
| `hitsPerPage` | number | No | Number of indices per page \(default: 100\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `indices` | array | List of indices in the application |
|
||||
| ↳ `name` | string | Name of the index |
|
||||
| ↳ `entries` | number | Number of records in the index |
|
||||
| ↳ `dataSize` | number | Size of the index data in bytes |
|
||||
| ↳ `fileSize` | number | Size of the index files in bytes |
|
||||
| ↳ `lastBuildTimeS` | number | Last build duration in seconds |
|
||||
| ↳ `numberOfPendingTasks` | number | Number of pending indexing tasks |
|
||||
| ↳ `pendingTask` | boolean | Whether the index has pending tasks |
|
||||
| ↳ `createdAt` | string | Timestamp when the index was created |
|
||||
| ↳ `updatedAt` | string | Timestamp when the index was last updated |
|
||||
| ↳ `primary` | string | Name of the primary index \(if this is a replica\) |
|
||||
| ↳ `replicas` | array | List of replica index names |
|
||||
| ↳ `virtual` | boolean | Whether the index is a virtual replica |
|
||||
| `nbPages` | number | Total number of pages of indices |
|
||||
|
||||
### `algolia_get_settings`
|
||||
|
||||
Retrieve the settings of an Algolia index
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia API Key |
|
||||
| `indexName` | string | Yes | Name of the Algolia index |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `searchableAttributes` | array | List of searchable attributes |
|
||||
| `attributesForFaceting` | array | Attributes used for faceting |
|
||||
| `ranking` | array | Ranking criteria |
|
||||
| `customRanking` | array | Custom ranking criteria |
|
||||
| `replicas` | array | List of replica index names |
|
||||
| `hitsPerPage` | number | Default number of hits per page |
|
||||
| `maxValuesPerFacet` | number | Maximum number of facet values returned |
|
||||
| `highlightPreTag` | string | HTML tag inserted before highlighted parts |
|
||||
| `highlightPostTag` | string | HTML tag inserted after highlighted parts |
|
||||
| `paginationLimitedTo` | number | Maximum number of hits accessible via pagination |
|
||||
|
||||
### `algolia_update_settings`
|
||||
|
||||
Update the settings of an Algolia index
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia Admin API Key \(must have editSettings ACL\) |
|
||||
| `indexName` | string | Yes | Name of the Algolia index |
|
||||
| `settings` | json | Yes | JSON object with settings to update \(e.g., \{"searchableAttributes": \["name", "description"\], "customRanking": \["desc\(popularity\)"\]\}\) |
|
||||
| `forwardToReplicas` | boolean | No | Whether to apply changes to replica indices \(default: false\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskID` | number | Algolia task ID for tracking the settings update |
|
||||
| `updatedAt` | string | Timestamp when the settings were updated |
|
||||
|
||||
### `algolia_delete_index`
|
||||
|
||||
Delete an entire Algolia index and all its records
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia Admin API Key \(must have deleteIndex ACL\) |
|
||||
| `indexName` | string | Yes | Name of the Algolia index to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskID` | number | Algolia task ID for tracking the index deletion |
|
||||
| `deletedAt` | string | Timestamp when the index was deleted |
|
||||
|
||||
### `algolia_copy_move_index`
|
||||
|
||||
Copy or move an Algolia index to a new destination
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia Admin API Key |
|
||||
| `indexName` | string | Yes | Name of the source index |
|
||||
| `operation` | string | Yes | Operation to perform: "copy" or "move" |
|
||||
| `destination` | string | Yes | Name of the destination index |
|
||||
| `scope` | json | No | Array of scopes to copy \(only for "copy" operation\): \["settings", "synonyms", "rules"\]. Omit to copy everything including records. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskID` | number | Algolia task ID for tracking the copy/move operation |
|
||||
| `updatedAt` | string | Timestamp when the operation was performed |
|
||||
|
||||
### `algolia_clear_records`
|
||||
|
||||
Clear all records from an Algolia index while keeping settings, synonyms, and rules
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia Admin API Key \(must have deleteIndex ACL\) |
|
||||
| `indexName` | string | Yes | Name of the Algolia index to clear |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskID` | number | Algolia task ID for tracking the clear operation |
|
||||
| `updatedAt` | string | Timestamp when the records were cleared |
|
||||
|
||||
### `algolia_delete_by_filter`
|
||||
|
||||
Delete all records matching a filter from an Algolia index
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia Admin API Key \(must have deleteIndex ACL\) |
|
||||
| `indexName` | string | Yes | Name of the Algolia index |
|
||||
| `filters` | string | No | Filter expression to match records for deletion \(e.g., "category:outdated"\) |
|
||||
| `facetFilters` | json | No | Array of facet filters \(e.g., \["brand:Acme"\]\) |
|
||||
| `numericFilters` | json | No | Array of numeric filters \(e.g., \["price > 100"\]\) |
|
||||
| `tagFilters` | json | No | Array of tag filters using the _tags attribute \(e.g., \["published"\]\) |
|
||||
| `aroundLatLng` | string | No | Coordinates for geo-search filter \(e.g., "40.71,-74.01"\) |
|
||||
| `aroundRadius` | number | No | Maximum radius in meters for geo-search, or "all" for unlimited |
|
||||
| `insideBoundingBox` | json | No | Bounding box coordinates as \[\[lat1, lng1, lat2, lng2\]\] for geo-search filter |
|
||||
| `insidePolygon` | json | No | Polygon coordinates as \[\[lat1, lng1, lat2, lng2, lat3, lng3, ...\]\] for geo-search filter |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskID` | number | Algolia task ID for tracking the delete-by-filter operation |
|
||||
| `updatedAt` | string | Timestamp when the operation was performed |
|
||||
|
||||
|
||||
569
apps/docs/content/docs/en/tools/cloudflare.mdx
Normal file
569
apps/docs/content/docs/en/tools/cloudflare.mdx
Normal file
@@ -0,0 +1,569 @@
|
||||
---
|
||||
title: Cloudflare
|
||||
description: Manage DNS, domains, certificates, and cache
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="cloudflare"
|
||||
color="#F5F6FA"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Cloudflare](https://cloudflare.com/) is a global cloud platform that provides content delivery, domain management, cybersecurity, and performance services for websites and applications.
|
||||
|
||||
In Sim, the Cloudflare integration empowers your agents to automate the management of DNS records, SSL/TLS certificates, domains (zones), cache, zone settings, and more through easy-to-use API tools. Agents can securely list and edit domains, update DNS records, monitor analytics, and manage security and performance—all as part of your automated workflows.
|
||||
|
||||
With Cloudflare, you can:
|
||||
|
||||
- **Manage DNS and Domains**: List all your domains (zones), view zone details, and fully control DNS records from your automated agent workflows.
|
||||
- **Handle SSL/TLS Certificates and Settings**: Issue, renew, or list certificates and adjust security and performance settings for your sites.
|
||||
- **Purge Cache and Analyze Traffic**: Instantly purge edge cache and review real-time DNS analytics directly within your Sim agent processes.
|
||||
- **Automate Security and Operations**: Use agents to programmatically manage zones, update settings, and streamline repetitive Cloudflare tasks.
|
||||
|
||||
This integration enables streamlined, secure management of your site's infrastructure from within Sim. Your agents can integrate Cloudflare operations directly into processes—keeping DNS records up-to-date, responding to security events, improving site performance, and automating large-scale site and account administration.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Cloudflare into the workflow. Manage zones (domains), DNS records, SSL/TLS certificates, zone settings, DNS analytics, and cache purging via the Cloudflare API.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `cloudflare_list_zones`
|
||||
|
||||
Lists all zones (domains) in the Cloudflare account.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `name` | string | No | Filter zones by domain name \(e.g., "example.com"\) |
|
||||
| `status` | string | No | Filter by zone status: "initializing", "pending", "active", or "moved" |
|
||||
| `page` | number | No | Page number for pagination \(default: 1\) |
|
||||
| `per_page` | number | No | Number of zones per page \(default: 20, max: 50\) |
|
||||
| `accountId` | string | No | Filter zones by account ID |
|
||||
| `order` | string | No | Sort field \(name, status, account.id, account.name\) |
|
||||
| `direction` | string | No | Sort direction \(asc, desc\) |
|
||||
| `match` | string | No | Match logic for filters \(any, all\). Default: all |
|
||||
| `apiKey` | string | Yes | Cloudflare API Token |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `zones` | array | List of zones/domains |
|
||||
| ↳ `id` | string | Zone ID |
|
||||
| ↳ `name` | string | Domain name |
|
||||
| ↳ `status` | string | Zone status \(initializing, pending, active, moved\) |
|
||||
| ↳ `paused` | boolean | Whether the zone is paused |
|
||||
| ↳ `type` | string | Zone type \(full, partial, or secondary\) |
|
||||
| ↳ `name_servers` | array | Assigned Cloudflare name servers |
|
||||
| ↳ `original_name_servers` | array | Original name servers before moving to Cloudflare |
|
||||
| ↳ `created_on` | string | ISO 8601 date when the zone was created |
|
||||
| ↳ `modified_on` | string | ISO 8601 date when the zone was last modified |
|
||||
| ↳ `activated_on` | string | ISO 8601 date when the zone was activated |
|
||||
| ↳ `development_mode` | number | Seconds remaining in development mode \(0 = off\) |
|
||||
| ↳ `plan` | object | Zone plan information |
|
||||
| ↳ `id` | string | Plan identifier |
|
||||
| ↳ `name` | string | Plan name |
|
||||
| ↳ `price` | number | Plan price |
|
||||
| ↳ `is_subscribed` | boolean | Whether the zone is subscribed to the plan |
|
||||
| ↳ `frequency` | string | Plan billing frequency |
|
||||
| ↳ `currency` | string | Plan currency |
|
||||
| ↳ `legacy_id` | string | Legacy plan identifier |
|
||||
| ↳ `account` | object | Account the zone belongs to |
|
||||
| ↳ `id` | string | Account identifier |
|
||||
| ↳ `name` | string | Account name |
|
||||
| ↳ `owner` | object | Zone owner information |
|
||||
| ↳ `id` | string | Owner identifier |
|
||||
| ↳ `name` | string | Owner name |
|
||||
| ↳ `type` | string | Owner type |
|
||||
| ↳ `meta` | object | Zone metadata |
|
||||
| ↳ `cdn_only` | boolean | Whether the zone is CDN only |
|
||||
| ↳ `custom_certificate_quota` | number | Custom certificate quota |
|
||||
| ↳ `dns_only` | boolean | Whether the zone is DNS only |
|
||||
| ↳ `foundation_dns` | boolean | Whether foundation DNS is enabled |
|
||||
| ↳ `page_rule_quota` | number | Page rule quota |
|
||||
| ↳ `phishing_detected` | boolean | Whether phishing was detected |
|
||||
| ↳ `step` | number | Current setup step |
|
||||
| ↳ `vanity_name_servers` | array | Custom vanity name servers |
|
||||
| ↳ `permissions` | array | User permissions for the zone |
|
||||
| `total_count` | number | Total number of zones matching the query |
|
||||
|
||||
### `cloudflare_get_zone`
|
||||
|
||||
Gets details for a specific zone (domain) by its ID.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `zoneId` | string | Yes | The zone ID to retrieve details for |
|
||||
| `apiKey` | string | Yes | Cloudflare API Token |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Zone ID |
|
||||
| `name` | string | Domain name |
|
||||
| `status` | string | Zone status \(initializing, pending, active, moved\) |
|
||||
| `paused` | boolean | Whether the zone is paused |
|
||||
| `type` | string | Zone type \(full, partial, or secondary\) |
|
||||
| `name_servers` | array | Assigned Cloudflare name servers |
|
||||
| `original_name_servers` | array | Original name servers before moving to Cloudflare |
|
||||
| `created_on` | string | ISO 8601 date when the zone was created |
|
||||
| `modified_on` | string | ISO 8601 date when the zone was last modified |
|
||||
| `activated_on` | string | ISO 8601 date when the zone was activated |
|
||||
| `development_mode` | number | Seconds remaining in development mode \(0 = off\) |
|
||||
| `plan` | object | Zone plan information |
|
||||
| ↳ `id` | string | Plan identifier |
|
||||
| ↳ `name` | string | Plan name |
|
||||
| ↳ `price` | number | Plan price |
|
||||
| ↳ `is_subscribed` | boolean | Whether the zone is subscribed to the plan |
|
||||
| ↳ `frequency` | string | Plan billing frequency |
|
||||
| ↳ `currency` | string | Plan currency |
|
||||
| ↳ `legacy_id` | string | Legacy plan identifier |
|
||||
| `account` | object | Account the zone belongs to |
|
||||
| ↳ `id` | string | Account identifier |
|
||||
| ↳ `name` | string | Account name |
|
||||
| `owner` | object | Zone owner information |
|
||||
| ↳ `id` | string | Owner identifier |
|
||||
| ↳ `name` | string | Owner name |
|
||||
| ↳ `type` | string | Owner type |
|
||||
| `meta` | object | Zone metadata |
|
||||
| ↳ `cdn_only` | boolean | Whether the zone is CDN only |
|
||||
| ↳ `custom_certificate_quota` | number | Custom certificate quota |
|
||||
| ↳ `dns_only` | boolean | Whether the zone is DNS only |
|
||||
| ↳ `foundation_dns` | boolean | Whether foundation DNS is enabled |
|
||||
| ↳ `page_rule_quota` | number | Page rule quota |
|
||||
| ↳ `phishing_detected` | boolean | Whether phishing was detected |
|
||||
| ↳ `step` | number | Current setup step |
|
||||
| `vanity_name_servers` | array | Custom vanity name servers |
|
||||
| `permissions` | array | User permissions for the zone |
|
||||
|
||||
### `cloudflare_create_zone`
|
||||
|
||||
Adds a new zone (domain) to the Cloudflare account.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `name` | string | Yes | The domain name to add \(e.g., "example.com"\) |
|
||||
| `accountId` | string | Yes | The Cloudflare account ID |
|
||||
| `type` | string | No | Zone type: "full" \(Cloudflare manages DNS\), "partial" \(CNAME setup\), or "secondary" \(secondary DNS\) |
|
||||
| `jump_start` | boolean | No | Automatically attempt to fetch existing DNS records when creating the zone |
|
||||
| `apiKey` | string | Yes | Cloudflare API Token |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Created zone ID |
|
||||
| `name` | string | Domain name |
|
||||
| `status` | string | Zone status \(initializing, pending, active, moved\) |
|
||||
| `paused` | boolean | Whether the zone is paused |
|
||||
| `type` | string | Zone type \(full, partial, or secondary\) |
|
||||
| `name_servers` | array | Assigned Cloudflare name servers |
|
||||
| `original_name_servers` | array | Original name servers before moving to Cloudflare |
|
||||
| `created_on` | string | ISO 8601 date when the zone was created |
|
||||
| `modified_on` | string | ISO 8601 date when the zone was last modified |
|
||||
| `activated_on` | string | ISO 8601 date when the zone was activated |
|
||||
| `development_mode` | number | Seconds remaining in development mode \(0 = off\) |
|
||||
| `plan` | object | Zone plan information |
|
||||
| ↳ `id` | string | Plan identifier |
|
||||
| ↳ `name` | string | Plan name |
|
||||
| ↳ `price` | number | Plan price |
|
||||
| ↳ `is_subscribed` | boolean | Whether the zone is subscribed to the plan |
|
||||
| ↳ `frequency` | string | Plan billing frequency |
|
||||
| ↳ `currency` | string | Plan currency |
|
||||
| ↳ `legacy_id` | string | Legacy plan identifier |
|
||||
| `account` | object | Account the zone belongs to |
|
||||
| ↳ `id` | string | Account identifier |
|
||||
| ↳ `name` | string | Account name |
|
||||
| `owner` | object | Zone owner information |
|
||||
| ↳ `id` | string | Owner identifier |
|
||||
| ↳ `name` | string | Owner name |
|
||||
| ↳ `type` | string | Owner type |
|
||||
| `meta` | object | Zone metadata |
|
||||
| ↳ `cdn_only` | boolean | Whether the zone is CDN only |
|
||||
| ↳ `custom_certificate_quota` | number | Custom certificate quota |
|
||||
| ↳ `dns_only` | boolean | Whether the zone is DNS only |
|
||||
| ↳ `foundation_dns` | boolean | Whether foundation DNS is enabled |
|
||||
| ↳ `page_rule_quota` | number | Page rule quota |
|
||||
| ↳ `phishing_detected` | boolean | Whether phishing was detected |
|
||||
| ↳ `step` | number | Current setup step |
|
||||
| `vanity_name_servers` | array | Custom vanity name servers |
|
||||
| `permissions` | array | User permissions for the zone |
|
||||
|
||||
### `cloudflare_delete_zone`
|
||||
|
||||
Deletes a zone (domain) from the Cloudflare account.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `zoneId` | string | Yes | The zone ID to delete |
|
||||
| `apiKey` | string | Yes | Cloudflare API Token |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Deleted zone ID |
|
||||
|
||||
### `cloudflare_list_dns_records`
|
||||
|
||||
Lists DNS records for a specific zone.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `zoneId` | string | Yes | The zone ID to list DNS records for |
|
||||
| `type` | string | No | Filter by record type \(e.g., "A", "AAAA", "CNAME", "MX", "TXT"\) |
|
||||
| `name` | string | No | Filter by record name \(exact match\) |
|
||||
| `content` | string | No | Filter by record content \(exact match\) |
|
||||
| `page` | number | No | Page number for pagination \(default: 1\) |
|
||||
| `per_page` | number | No | Number of records per page \(default: 100, max: 5000000\) |
|
||||
| `direction` | string | No | Sort direction \(asc or desc\) |
|
||||
| `match` | string | No | Match logic for filters: any or all \(default: all\) |
|
||||
| `order` | string | No | Sort field \(type, name, content, ttl, proxied\) |
|
||||
| `proxied` | boolean | No | Filter by proxy status |
|
||||
| `search` | string | No | Free-text search across record name, content, and value |
|
||||
| `tag` | string | No | Filter by tags \(comma-separated\) |
|
||||
| `tag_match` | string | No | Tag filter match logic: any or all |
|
||||
| `commentFilter` | string | No | Filter records by comment content \(substring match\) |
|
||||
| `apiKey` | string | Yes | Cloudflare API Token |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `records` | array | List of DNS records |
|
||||
| ↳ `id` | string | Unique identifier for the DNS record |
|
||||
| ↳ `zone_id` | string | The ID of the zone the record belongs to |
|
||||
| ↳ `zone_name` | string | The name of the zone |
|
||||
| ↳ `type` | string | Record type \(A, AAAA, CNAME, MX, TXT, etc.\) |
|
||||
| ↳ `name` | string | Record name \(e.g., example.com\) |
|
||||
| ↳ `content` | string | Record content \(e.g., IP address\) |
|
||||
| ↳ `proxiable` | boolean | Whether the record can be proxied |
|
||||
| ↳ `proxied` | boolean | Whether Cloudflare proxy is enabled |
|
||||
| ↳ `ttl` | number | TTL in seconds \(1 = automatic\) |
|
||||
| ↳ `locked` | boolean | Whether the record is locked |
|
||||
| ↳ `priority` | number | MX/SRV record priority |
|
||||
| ↳ `comment` | string | Comment associated with the record |
|
||||
| ↳ `tags` | array | Tags associated with the record |
|
||||
| ↳ `comment_modified_on` | string | ISO 8601 timestamp when the comment was last modified |
|
||||
| ↳ `tags_modified_on` | string | ISO 8601 timestamp when tags were last modified |
|
||||
| ↳ `meta` | object | Record metadata |
|
||||
| ↳ `source` | string | Source of the DNS record |
|
||||
| ↳ `created_on` | string | ISO 8601 timestamp when the record was created |
|
||||
| ↳ `modified_on` | string | ISO 8601 timestamp when the record was last modified |
|
||||
| `total_count` | number | Total number of DNS records matching the query |
|
||||
|
||||
### `cloudflare_create_dns_record`
|
||||
|
||||
Creates a new DNS record for a zone.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `zoneId` | string | Yes | The zone ID to create the DNS record in |
|
||||
| `type` | string | Yes | DNS record type \(e.g., "A", "AAAA", "CNAME", "MX", "TXT", "NS", "SRV"\) |
|
||||
| `name` | string | Yes | DNS record name \(e.g., "example.com" or "subdomain.example.com"\) |
|
||||
| `content` | string | Yes | DNS record content \(e.g., IP address for A records, target for CNAME\) |
|
||||
| `ttl` | number | No | Time to live in seconds \(1 = automatic, default: 1\) |
|
||||
| `proxied` | boolean | No | Whether to enable Cloudflare proxy \(default: false\) |
|
||||
| `priority` | number | No | Priority for MX and SRV records |
|
||||
| `comment` | string | No | Comment for the DNS record |
|
||||
| `tags` | string | No | Comma-separated tags for the DNS record |
|
||||
| `apiKey` | string | Yes | Cloudflare API Token |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Unique identifier for the created DNS record |
|
||||
| `zone_id` | string | The ID of the zone the record belongs to |
|
||||
| `zone_name` | string | The name of the zone |
|
||||
| `type` | string | DNS record type \(A, AAAA, CNAME, MX, TXT, etc.\) |
|
||||
| `name` | string | DNS record hostname |
|
||||
| `content` | string | DNS record value \(e.g., IP address, target hostname\) |
|
||||
| `proxiable` | boolean | Whether the record can be proxied through Cloudflare |
|
||||
| `proxied` | boolean | Whether Cloudflare proxy is enabled |
|
||||
| `ttl` | number | Time to live in seconds \(1 = automatic\) |
|
||||
| `locked` | boolean | Whether the record is locked |
|
||||
| `priority` | number | Priority for MX and SRV records |
|
||||
| `comment` | string | Comment associated with the record |
|
||||
| `tags` | array | Tags associated with the record |
|
||||
| `comment_modified_on` | string | ISO 8601 timestamp when the comment was last modified |
|
||||
| `tags_modified_on` | string | ISO 8601 timestamp when tags were last modified |
|
||||
| `meta` | object | Record metadata |
|
||||
| ↳ `source` | string | Source of the DNS record |
|
||||
| `created_on` | string | ISO 8601 timestamp when the record was created |
|
||||
| `modified_on` | string | ISO 8601 timestamp when the record was last modified |
|
||||
|
||||
### `cloudflare_update_dns_record`
|
||||
|
||||
Updates an existing DNS record for a zone.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `zoneId` | string | Yes | The zone ID containing the DNS record |
|
||||
| `recordId` | string | Yes | The DNS record ID to update |
|
||||
| `type` | string | No | DNS record type \(e.g., "A", "AAAA", "CNAME", "MX", "TXT"\) |
|
||||
| `name` | string | No | DNS record name |
|
||||
| `content` | string | No | DNS record content \(e.g., IP address\) |
|
||||
| `ttl` | number | No | Time to live in seconds \(1 = automatic\) |
|
||||
| `proxied` | boolean | No | Whether to enable Cloudflare proxy |
|
||||
| `priority` | number | No | Priority for MX and SRV records |
|
||||
| `comment` | string | No | Comment for the DNS record |
|
||||
| `tags` | string | No | Comma-separated tags for the DNS record |
|
||||
| `apiKey` | string | Yes | Cloudflare API Token |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Unique identifier for the updated DNS record |
|
||||
| `zone_id` | string | The ID of the zone the record belongs to |
|
||||
| `zone_name` | string | The name of the zone |
|
||||
| `type` | string | DNS record type \(A, AAAA, CNAME, MX, TXT, etc.\) |
|
||||
| `name` | string | DNS record hostname |
|
||||
| `content` | string | DNS record value \(e.g., IP address, target hostname\) |
|
||||
| `proxiable` | boolean | Whether the record can be proxied through Cloudflare |
|
||||
| `proxied` | boolean | Whether Cloudflare proxy is enabled |
|
||||
| `ttl` | number | Time to live in seconds \(1 = automatic\) |
|
||||
| `locked` | boolean | Whether the record is locked |
|
||||
| `priority` | number | Priority for MX and SRV records |
|
||||
| `comment` | string | Comment associated with the record |
|
||||
| `tags` | array | Tags associated with the record |
|
||||
| `comment_modified_on` | string | ISO 8601 timestamp when the comment was last modified |
|
||||
| `tags_modified_on` | string | ISO 8601 timestamp when tags were last modified |
|
||||
| `meta` | object | Record metadata |
|
||||
| ↳ `source` | string | Source of the DNS record |
|
||||
| `created_on` | string | ISO 8601 timestamp when the record was created |
|
||||
| `modified_on` | string | ISO 8601 timestamp when the record was last modified |
|
||||
|
||||
### `cloudflare_delete_dns_record`
|
||||
|
||||
Deletes a DNS record from a zone.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `zoneId` | string | Yes | The zone ID containing the DNS record |
|
||||
| `recordId` | string | Yes | The DNS record ID to delete |
|
||||
| `apiKey` | string | Yes | Cloudflare API Token |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Deleted record ID |
|
||||
|
||||
### `cloudflare_list_certificates`
|
||||
|
||||
Lists SSL/TLS certificate packs for a zone.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `zoneId` | string | Yes | The zone ID to list certificates for |
|
||||
| `status` | string | No | Filter certificate packs by status \(e.g., "all", "active", "pending"\) |
|
||||
| `page` | number | No | Page number of paginated results \(default: 1\) |
|
||||
| `per_page` | number | No | Number of certificate packs per page \(default: 20, min: 5, max: 50\) |
|
||||
| `deploy` | string | No | Filter by deployment environment: "staging" or "production" |
|
||||
| `apiKey` | string | Yes | Cloudflare API Token |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `certificates` | array | List of SSL/TLS certificate packs |
|
||||
| ↳ `id` | string | Certificate pack ID |
|
||||
| ↳ `type` | string | Certificate type \(e.g., "universal", "advanced"\) |
|
||||
| ↳ `hosts` | array | Hostnames covered by this certificate pack |
|
||||
| ↳ `primary_certificate` | string | ID of the primary certificate in the pack |
|
||||
| ↳ `status` | string | Certificate pack status \(e.g., "active", "pending"\) |
|
||||
| ↳ `certificates` | array | Individual certificates within the pack |
|
||||
| ↳ `id` | string | Certificate ID |
|
||||
| ↳ `hosts` | array | Hostnames covered by this certificate |
|
||||
| ↳ `issuer` | string | Certificate issuer |
|
||||
| ↳ `signature` | string | Signature algorithm \(e.g., "ECDSAWithSHA256"\) |
|
||||
| ↳ `status` | string | Certificate status |
|
||||
| ↳ `bundle_method` | string | Bundle method \(e.g., "ubiquitous"\) |
|
||||
| ↳ `zone_id` | string | Zone ID the certificate belongs to |
|
||||
| ↳ `uploaded_on` | string | Upload date \(ISO 8601\) |
|
||||
| ↳ `modified_on` | string | Last modified date \(ISO 8601\) |
|
||||
| ↳ `expires_on` | string | Expiration date \(ISO 8601\) |
|
||||
| ↳ `priority` | number | Certificate priority order |
|
||||
| ↳ `geo_restrictions` | object | Geographic restrictions for the certificate |
|
||||
| ↳ `label` | string | Geographic restriction label |
|
||||
| ↳ `cloudflare_branding` | boolean | Whether Cloudflare branding is enabled on the certificate |
|
||||
| ↳ `validation_method` | string | Validation method \(e.g., "txt", "http", "cname"\) |
|
||||
| ↳ `validity_days` | number | Validity period in days |
|
||||
| ↳ `certificate_authority` | string | Certificate authority \(e.g., "lets_encrypt", "google"\) |
|
||||
| ↳ `validation_errors` | array | Validation issues for the certificate pack |
|
||||
| ↳ `message` | string | Validation error message |
|
||||
| ↳ `validation_records` | array | Validation records for the certificate pack |
|
||||
| ↳ `cname` | string | CNAME record name |
|
||||
| ↳ `cname_target` | string | CNAME record target |
|
||||
| ↳ `emails` | array | Email addresses for validation |
|
||||
| ↳ `http_body` | string | HTTP validation body content |
|
||||
| ↳ `http_url` | string | HTTP validation URL |
|
||||
| ↳ `status` | string | Validation record status |
|
||||
| ↳ `txt_name` | string | TXT record name |
|
||||
| ↳ `txt_value` | string | TXT record value |
|
||||
| ↳ `dcv_delegation_records` | array | Domain control validation delegation records |
|
||||
| ↳ `cname` | string | CNAME record name |
|
||||
| ↳ `cname_target` | string | CNAME record target |
|
||||
| ↳ `emails` | array | Email addresses for validation |
|
||||
| ↳ `http_body` | string | HTTP validation body content |
|
||||
| ↳ `http_url` | string | HTTP validation URL |
|
||||
| ↳ `status` | string | Delegation record status |
|
||||
| ↳ `txt_name` | string | TXT record name |
|
||||
| ↳ `txt_value` | string | TXT record value |
|
||||
| `total_count` | number | Total number of certificate packs |
|
||||
|
||||
### `cloudflare_get_zone_settings`
|
||||
|
||||
Gets all settings for a zone including SSL mode, minification, caching level, and security settings.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `zoneId` | string | Yes | The zone ID to get settings for |
|
||||
| `apiKey` | string | Yes | Cloudflare API Token |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `settings` | array | List of zone settings |
|
||||
| ↳ `id` | string | Setting identifier \(e.g., ssl, minify, cache_level, security_level, always_use_https\) |
|
||||
| ↳ `value` | string | Setting value as a string. Simple values returned as-is \(e.g., "full", "on"\). Complex values are JSON-stringified \(e.g., \ |
|
||||
| ↳ `editable` | boolean | Whether the setting can be modified for the current zone plan |
|
||||
| ↳ `modified_on` | string | ISO 8601 timestamp when the setting was last modified |
|
||||
| ↳ `time_remaining` | number | Seconds remaining until the setting can be modified again \(only present for rate-limited settings\) |
|
||||
|
||||
### `cloudflare_update_zone_setting`
|
||||
|
||||
Updates a specific zone setting such as SSL mode, security level, cache level, minification, or other configuration.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `zoneId` | string | Yes | The zone ID to update settings for |
|
||||
| `settingId` | string | Yes | Setting to update \(e.g., "ssl", "security_level", "cache_level", "minify", "always_use_https", "browser_cache_ttl", "http3", "min_tls_version", "ciphers"\) |
|
||||
| `value` | string | Yes | New value for the setting as a string or JSON string for complex values \(e.g., "full" for SSL, "medium" for security_level, "aggressive" for cache_level, \'\{"css":"on","html":"on","js":"on"\}\' for minify, \'\["ECDHE-RSA-AES128-GCM-SHA256"\]\' for ciphers\) |
|
||||
| `apiKey` | string | Yes | Cloudflare API Token |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Setting identifier \(e.g., ssl, minify, cache_level\) |
|
||||
| `value` | string | Updated setting value as a string. Simple values returned as-is \(e.g., "full", "on"\). Complex values are JSON-stringified. |
|
||||
| `editable` | boolean | Whether the setting can be modified for the current zone plan |
|
||||
| `modified_on` | string | ISO 8601 timestamp when the setting was last modified |
|
||||
| `time_remaining` | number | Seconds remaining until the setting can be modified again \(only present for rate-limited settings\) |
|
||||
|
||||
### `cloudflare_dns_analytics`
|
||||
|
||||
Gets DNS analytics report for a zone including query counts and trends.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `zoneId` | string | Yes | The zone ID to get DNS analytics for |
|
||||
| `since` | string | No | Start date for analytics \(ISO 8601, e.g., "2024-01-01T00:00:00Z"\) or relative \(e.g., "-6h"\) |
|
||||
| `until` | string | No | End date for analytics \(ISO 8601, e.g., "2024-01-31T23:59:59Z"\) or relative \(e.g., "now"\) |
|
||||
| `metrics` | string | Yes | Comma-separated metrics to retrieve \(e.g., "queryCount,uncachedCount,staleCount,responseTimeAvg,responseTimeMedian,responseTime90th,responseTime99th"\) |
|
||||
| `dimensions` | string | No | Comma-separated dimensions to group by \(e.g., "queryName,queryType,responseCode,responseCached,coloName,origin,dayOfWeek,tcp,ipVersion,querySizeBucket,responseSizeBucket"\) |
|
||||
| `filters` | string | No | Filters to apply to the data \(e.g., "queryType==A"\) |
|
||||
| `sort` | string | No | Sort order for the result set. Fields must be included in metrics or dimensions \(e.g., "+queryCount" or "-responseTimeAvg"\) |
|
||||
| `limit` | number | No | Maximum number of results to return |
|
||||
| `apiKey` | string | Yes | Cloudflare API Token |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `totals` | object | Aggregate DNS analytics totals for the entire queried period |
|
||||
| ↳ `queryCount` | number | Total number of DNS queries |
|
||||
| ↳ `uncachedCount` | number | Number of uncached DNS queries |
|
||||
| ↳ `staleCount` | number | Number of stale DNS queries |
|
||||
| ↳ `responseTimeAvg` | number | Average response time in milliseconds |
|
||||
| ↳ `responseTimeMedian` | number | Median response time in milliseconds |
|
||||
| ↳ `responseTime90th` | number | 90th percentile response time in milliseconds |
|
||||
| ↳ `responseTime99th` | number | 99th percentile response time in milliseconds |
|
||||
| `min` | object | Minimum values across the analytics period |
|
||||
| ↳ `queryCount` | number | Minimum number of DNS queries |
|
||||
| ↳ `uncachedCount` | number | Minimum number of uncached DNS queries |
|
||||
| ↳ `staleCount` | number | Minimum number of stale DNS queries |
|
||||
| ↳ `responseTimeAvg` | number | Minimum average response time in milliseconds |
|
||||
| ↳ `responseTimeMedian` | number | Minimum median response time in milliseconds |
|
||||
| ↳ `responseTime90th` | number | Minimum 90th percentile response time in milliseconds |
|
||||
| ↳ `responseTime99th` | number | Minimum 99th percentile response time in milliseconds |
|
||||
| `max` | object | Maximum values across the analytics period |
|
||||
| ↳ `queryCount` | number | Maximum number of DNS queries |
|
||||
| ↳ `uncachedCount` | number | Maximum number of uncached DNS queries |
|
||||
| ↳ `staleCount` | number | Maximum number of stale DNS queries |
|
||||
| ↳ `responseTimeAvg` | number | Maximum average response time in milliseconds |
|
||||
| ↳ `responseTimeMedian` | number | Maximum median response time in milliseconds |
|
||||
| ↳ `responseTime90th` | number | Maximum 90th percentile response time in milliseconds |
|
||||
| ↳ `responseTime99th` | number | Maximum 99th percentile response time in milliseconds |
|
||||
| `data` | array | Raw analytics data rows returned by the Cloudflare DNS analytics report |
|
||||
| ↳ `dimensions` | array | Dimension values for this data row, parallel to the requested dimensions list |
|
||||
| ↳ `metrics` | array | Metric values for this data row, parallel to the requested metrics list |
|
||||
| `data_lag` | number | Processing lag in seconds before analytics data becomes available |
|
||||
| `rows` | number | Total number of rows in the result set |
|
||||
| `query` | object | Echo of the query parameters sent to the API |
|
||||
| ↳ `since` | string | Start date of the analytics query |
|
||||
| ↳ `until` | string | End date of the analytics query |
|
||||
| ↳ `metrics` | array | Metrics requested in the query |
|
||||
| ↳ `dimensions` | array | Dimensions requested in the query |
|
||||
| ↳ `filters` | string | Filters applied to the query |
|
||||
| ↳ `sort` | array | Sort order applied to the query |
|
||||
| ↳ `limit` | number | Maximum number of results requested |
|
||||
|
||||
### `cloudflare_purge_cache`
|
||||
|
||||
Purges cached content for a zone. Can purge everything or specific files/tags/hosts/prefixes.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `zoneId` | string | Yes | The zone ID to purge cache for |
|
||||
| `purge_everything` | boolean | No | Set to true to purge all cached content. Mutually exclusive with files, tags, hosts, and prefixes |
|
||||
| `files` | string | No | Comma-separated list of URLs to purge from cache |
|
||||
| `tags` | string | No | Comma-separated list of cache tags to purge \(Enterprise only\) |
|
||||
| `hosts` | string | No | Comma-separated list of hostnames to purge \(Enterprise only\) |
|
||||
| `prefixes` | string | No | Comma-separated list of URL prefixes to purge \(Enterprise only\) |
|
||||
| `apiKey` | string | Yes | Cloudflare API Token |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Purge request identifier returned by Cloudflare |
|
||||
|
||||
|
||||
@@ -234,7 +234,6 @@ List actions from incident.io. Optionally filter by incident ID.
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | incident.io API Key |
|
||||
| `incident_id` | string | No | Filter actions by incident ID \(e.g., "01FCNDV6P870EA6S7TK1DSYDG0"\) |
|
||||
| `page_size` | number | No | Number of actions to return per page \(e.g., 10, 25, 50\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -309,7 +308,6 @@ List follow-ups from incident.io. Optionally filter by incident ID.
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | incident.io API Key |
|
||||
| `incident_id` | string | No | Filter follow-ups by incident ID \(e.g., "01FCNDV6P870EA6S7TK1DSYDG0"\) |
|
||||
| `page_size` | number | No | Number of follow-ups to return per page \(e.g., 10, 25, 50\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -396,6 +394,7 @@ List all users in your Incident.io workspace. Returns user details including id,
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Incident.io API Key |
|
||||
| `page_size` | number | No | Number of results to return per page \(e.g., 10, 25, 50\). Default: 25 |
|
||||
| `after` | string | No | Pagination cursor to fetch the next page of results |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -406,6 +405,10 @@ List all users in your Incident.io workspace. Returns user details including id,
|
||||
| ↳ `name` | string | Full name of the user |
|
||||
| ↳ `email` | string | Email address of the user |
|
||||
| ↳ `role` | string | Role of the user in the workspace |
|
||||
| `pagination_meta` | object | Pagination metadata |
|
||||
| ↳ `after` | string | Cursor for next page |
|
||||
| ↳ `page_size` | number | Number of items per page |
|
||||
| ↳ `total_record_count` | number | Total number of records |
|
||||
|
||||
### `incidentio_users_show`
|
||||
|
||||
@@ -644,7 +647,6 @@ List all escalation policies in incident.io
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | incident.io API Key |
|
||||
| `page_size` | number | No | Number of results per page \(e.g., 10, 25, 50\). Default: 25 |
|
||||
|
||||
#### Output
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ In Sim, the Knowledge Base block enables your agents to perform intelligent sema
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Knowledge into the workflow. Perform full CRUD operations on documents, chunks, and tags.
|
||||
Integrate Knowledge into the workflow. Can search, upload chunks, and create documents.
|
||||
|
||||
|
||||
|
||||
@@ -126,161 +126,4 @@ Create a new document in a knowledge base
|
||||
| `message` | string | Success or error message describing the operation result |
|
||||
| `documentId` | string | ID of the created document |
|
||||
|
||||
### `knowledge_list_tags`
|
||||
|
||||
List all tag definitions for a knowledge base
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base to list tags for |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `knowledgeBaseId` | string | ID of the knowledge base |
|
||||
| `tags` | array | Array of tag definitions for the knowledge base |
|
||||
| ↳ `id` | string | Tag definition ID |
|
||||
| ↳ `tagSlot` | string | Internal tag slot \(e.g. tag1, number1\) |
|
||||
| ↳ `displayName` | string | Human-readable tag name |
|
||||
| ↳ `fieldType` | string | Tag field type \(text, number, date, boolean\) |
|
||||
| ↳ `createdAt` | string | Creation timestamp |
|
||||
| ↳ `updatedAt` | string | Last update timestamp |
|
||||
| `totalTags` | number | Total number of tag definitions |
|
||||
|
||||
### `knowledge_list_documents`
|
||||
|
||||
List documents in a knowledge base with optional filtering, search, and pagination
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base to list documents from |
|
||||
| `search` | string | No | Search query to filter documents by filename |
|
||||
| `enabledFilter` | string | No | Filter by enabled status: "all", "enabled", or "disabled" |
|
||||
| `limit` | number | No | Maximum number of documents to return \(default: 50\) |
|
||||
| `offset` | number | No | Number of documents to skip for pagination \(default: 0\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `knowledgeBaseId` | string | ID of the knowledge base |
|
||||
| `documents` | array | Array of documents in the knowledge base |
|
||||
| ↳ `id` | string | Document ID |
|
||||
| ↳ `filename` | string | Document filename |
|
||||
| ↳ `fileSize` | number | File size in bytes |
|
||||
| ↳ `mimeType` | string | MIME type of the document |
|
||||
| ↳ `enabled` | boolean | Whether the document is enabled |
|
||||
| ↳ `processingStatus` | string | Processing status \(pending, processing, completed, failed\) |
|
||||
| ↳ `chunkCount` | number | Number of chunks in the document |
|
||||
| ↳ `tokenCount` | number | Total token count across chunks |
|
||||
| ↳ `uploadedAt` | string | Upload timestamp |
|
||||
| ↳ `updatedAt` | string | Last update timestamp |
|
||||
| `totalDocuments` | number | Total number of documents matching the filter |
|
||||
| `limit` | number | Page size used |
|
||||
| `offset` | number | Offset used for pagination |
|
||||
|
||||
### `knowledge_delete_document`
|
||||
|
||||
Delete a document from a knowledge base
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base containing the document |
|
||||
| `documentId` | string | Yes | ID of the document to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `documentId` | string | ID of the deleted document |
|
||||
| `message` | string | Confirmation message |
|
||||
|
||||
### `knowledge_list_chunks`
|
||||
|
||||
List chunks for a document in a knowledge base with optional filtering and pagination
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base |
|
||||
| `documentId` | string | Yes | ID of the document to list chunks from |
|
||||
| `search` | string | No | Search query to filter chunks by content |
|
||||
| `enabled` | string | No | Filter by enabled status: "true", "false", or "all" \(default: "all"\) |
|
||||
| `limit` | number | No | Maximum number of chunks to return \(1-100, default: 50\) |
|
||||
| `offset` | number | No | Number of chunks to skip for pagination \(default: 0\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `knowledgeBaseId` | string | ID of the knowledge base |
|
||||
| `documentId` | string | ID of the document |
|
||||
| `chunks` | array | Array of chunks in the document |
|
||||
| ↳ `id` | string | Chunk ID |
|
||||
| ↳ `chunkIndex` | number | Index of the chunk within the document |
|
||||
| ↳ `content` | string | Chunk text content |
|
||||
| ↳ `contentLength` | number | Content length in characters |
|
||||
| ↳ `tokenCount` | number | Token count for the chunk |
|
||||
| ↳ `enabled` | boolean | Whether the chunk is enabled |
|
||||
| ↳ `createdAt` | string | Creation timestamp |
|
||||
| ↳ `updatedAt` | string | Last update timestamp |
|
||||
| `totalChunks` | number | Total number of chunks matching the filter |
|
||||
| `limit` | number | Page size used |
|
||||
| `offset` | number | Offset used for pagination |
|
||||
|
||||
### `knowledge_update_chunk`
|
||||
|
||||
Update the content or enabled status of a chunk in a knowledge base
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base |
|
||||
| `documentId` | string | Yes | ID of the document containing the chunk |
|
||||
| `chunkId` | string | Yes | ID of the chunk to update |
|
||||
| `content` | string | No | New content for the chunk |
|
||||
| `enabled` | boolean | No | Whether the chunk should be enabled or disabled |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `documentId` | string | ID of the parent document |
|
||||
| `id` | string | Chunk ID |
|
||||
| `chunkIndex` | number | Index of the chunk within the document |
|
||||
| `content` | string | Updated chunk content |
|
||||
| `contentLength` | number | Content length in characters |
|
||||
| `tokenCount` | number | Token count for the chunk |
|
||||
| `enabled` | boolean | Whether the chunk is enabled |
|
||||
| `updatedAt` | string | Last update timestamp |
|
||||
|
||||
### `knowledge_delete_chunk`
|
||||
|
||||
Delete a chunk from a document in a knowledge base
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base |
|
||||
| `documentId` | string | Yes | ID of the document containing the chunk |
|
||||
| `chunkId` | string | Yes | ID of the chunk to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `chunkId` | string | ID of the deleted chunk |
|
||||
| `documentId` | string | ID of the parent document |
|
||||
| `message` | string | Confirmation message |
|
||||
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"ahrefs",
|
||||
"airtable",
|
||||
"airweave",
|
||||
"algolia",
|
||||
"apify",
|
||||
"apollo",
|
||||
"arxiv",
|
||||
@@ -15,6 +16,7 @@
|
||||
"circleback",
|
||||
"clay",
|
||||
"clerk",
|
||||
"cloudflare",
|
||||
"confluence",
|
||||
"cursor",
|
||||
"datadog",
|
||||
@@ -68,6 +70,7 @@
|
||||
"mailgun",
|
||||
"mem0",
|
||||
"memory",
|
||||
"microsoft_dataverse",
|
||||
"microsoft_excel",
|
||||
"microsoft_planner",
|
||||
"microsoft_teams",
|
||||
@@ -91,8 +94,10 @@
|
||||
"qdrant",
|
||||
"rds",
|
||||
"reddit",
|
||||
"redis",
|
||||
"reducto",
|
||||
"resend",
|
||||
"revenuecat",
|
||||
"s3",
|
||||
"salesforce",
|
||||
"search",
|
||||
@@ -112,6 +117,7 @@
|
||||
"stripe",
|
||||
"stt",
|
||||
"supabase",
|
||||
"table",
|
||||
"tavily",
|
||||
"telegram",
|
||||
"textract",
|
||||
@@ -122,6 +128,8 @@
|
||||
"twilio_sms",
|
||||
"twilio_voice",
|
||||
"typeform",
|
||||
"upstash",
|
||||
"vercel",
|
||||
"video_generator",
|
||||
"vision",
|
||||
"wealthbox",
|
||||
|
||||
426
apps/docs/content/docs/en/tools/microsoft_dataverse.mdx
Normal file
426
apps/docs/content/docs/en/tools/microsoft_dataverse.mdx
Normal file
@@ -0,0 +1,426 @@
|
||||
---
|
||||
title: Microsoft Dataverse
|
||||
description: Manage records in Microsoft Dataverse tables
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="microsoft_dataverse"
|
||||
color="#E0E0E0"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Microsoft Dataverse](https://learn.microsoft.com/en-us/power-apps/maker/data-platform/data-platform-intro) is a powerful cloud data platform for securely storing, managing, and interacting with structured business data. The Microsoft Dataverse integration enables you to programmatically create, read, update, delete, and link records in Dataverse tables as part of your workflow and automation needs.
|
||||
|
||||
With Microsoft Dataverse integration, you can:
|
||||
|
||||
- **List and query records:** Access lists of records or query with advanced filters to find the data you need from any Dataverse table.
|
||||
- **Create and update records:** Add new records or update existing ones in any table for use across Power Platform, Dynamics 365, and custom apps.
|
||||
- **Delete and manage records:** Remove records as part of data lifecycle management directly from your automation flows.
|
||||
- **Associate and disassociate records:** Link related items together or remove associations using entity relationships and navigation properties—essential for reflecting complex business processes.
|
||||
- **Work with any Dataverse environment:** Connect to your organization’s environments, including production, sandbox, or Dynamics 365 tenants, for maximum flexibility.
|
||||
- **Integrate with Power Platform and Dynamics 365:** Automate tasks ranging from sales and marketing data updates to custom app workflows—all powered by Dataverse's security and governance.
|
||||
|
||||
The Dataverse integration empowers solution builders and business users to automate business processes, maintain accurate and up-to-date information, create system integrations, trigger actions, and drive insights—all with robust security and governance.
|
||||
|
||||
Connect Microsoft Dataverse to your automations to unlock sophisticated data management, orchestration, and business logic across your apps, teams, and cloud services.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Microsoft Dataverse into your workflow. Create, read, update, delete, upsert, associate, query, search, and execute actions and functions against Dataverse tables using the Web API. Supports bulk operations, FetchXML, file uploads, and relevance search. Works with Dynamics 365, Power Platform, and custom Dataverse environments.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `microsoft_dataverse_associate`
|
||||
|
||||
Associate two records in Microsoft Dataverse via a navigation property. Creates a relationship between a source record and a target record. Supports both collection-valued (POST) and single-valued (PUT) navigation properties.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
|
||||
| `entitySetName` | string | Yes | Source entity set name \(e.g., accounts\) |
|
||||
| `recordId` | string | Yes | Source record GUID |
|
||||
| `navigationProperty` | string | Yes | Navigation property name \(e.g., contact_customer_accounts for collection-valued, or parentcustomerid_account for single-valued\) |
|
||||
| `targetEntitySetName` | string | Yes | Target entity set name \(e.g., contacts\) |
|
||||
| `targetRecordId` | string | Yes | Target record GUID to associate |
|
||||
| `navigationType` | string | No | Type of navigation property: "collection" \(default, uses POST\) or "single" \(uses PUT for lookup fields\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether the association was created successfully |
|
||||
| `entitySetName` | string | Source entity set name used in the association |
|
||||
| `recordId` | string | Source record GUID that was associated |
|
||||
| `navigationProperty` | string | Navigation property used for the association |
|
||||
| `targetEntitySetName` | string | Target entity set name used in the association |
|
||||
| `targetRecordId` | string | Target record GUID that was associated |
|
||||
|
||||
### `microsoft_dataverse_create_multiple`
|
||||
|
||||
Create multiple records of the same table type in a single request. Each record in the Targets array must include an @odata.type annotation. Recommended batch size: 100-1000 records for standard tables.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
|
||||
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
|
||||
| `entityLogicalName` | string | Yes | Table logical name for @odata.type annotation \(e.g., account, contact\). Used to set Microsoft.Dynamics.CRM.\{entityLogicalName\} on each record. |
|
||||
| `records` | object | Yes | Array of record objects to create. Each record should contain column logical names as keys. The @odata.type annotation is added automatically. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ids` | array | Array of GUIDs for the created records |
|
||||
| `count` | number | Number of records created |
|
||||
| `success` | boolean | Whether all records were created successfully |
|
||||
|
||||
### `microsoft_dataverse_create_record`
|
||||
|
||||
Create a new record in a Microsoft Dataverse table. Requires the entity set name (plural table name) and record data as a JSON object.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
|
||||
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
|
||||
| `data` | object | Yes | Record data as a JSON object with column names as keys |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `recordId` | string | The ID of the created record |
|
||||
| `record` | object | Dataverse record object. Contains dynamic columns based on the queried table, plus OData metadata fields. |
|
||||
| `success` | boolean | Whether the record was created successfully |
|
||||
|
||||
### `microsoft_dataverse_delete_record`
|
||||
|
||||
Delete a record from a Microsoft Dataverse table by its ID.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
|
||||
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
|
||||
| `recordId` | string | Yes | The unique identifier \(GUID\) of the record to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `recordId` | string | The ID of the deleted record |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `microsoft_dataverse_disassociate`
|
||||
|
||||
Remove an association between two records in Microsoft Dataverse. For collection-valued navigation properties, provide the target record ID. For single-valued navigation properties, only the navigation property name is needed.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
|
||||
| `entitySetName` | string | Yes | Source entity set name \(e.g., accounts\) |
|
||||
| `recordId` | string | Yes | Source record GUID |
|
||||
| `navigationProperty` | string | Yes | Navigation property name \(e.g., contact_customer_accounts for collection-valued, or parentcustomerid_account for single-valued\) |
|
||||
| `targetRecordId` | string | No | Target record GUID \(required for collection-valued navigation properties, omit for single-valued\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether the disassociation was completed successfully |
|
||||
| `entitySetName` | string | Source entity set name used in the disassociation |
|
||||
| `recordId` | string | Source record GUID that was disassociated |
|
||||
| `navigationProperty` | string | Navigation property used for the disassociation |
|
||||
| `targetRecordId` | string | Target record GUID that was disassociated |
|
||||
|
||||
### `microsoft_dataverse_download_file`
|
||||
|
||||
Download a file from a file or image column on a Dataverse record. Returns the file content as a base64-encoded string along with file metadata from response headers.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
|
||||
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
|
||||
| `recordId` | string | Yes | Record GUID to download the file from |
|
||||
| `fileColumn` | string | Yes | File or image column logical name \(e.g., entityimage, cr_document\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `fileContent` | string | Base64-encoded file content |
|
||||
| `fileName` | string | Name of the downloaded file |
|
||||
| `fileSize` | number | File size in bytes |
|
||||
| `mimeType` | string | MIME type of the file |
|
||||
| `success` | boolean | Whether the file was downloaded successfully |
|
||||
|
||||
### `microsoft_dataverse_execute_action`
|
||||
|
||||
Execute a bound or unbound Dataverse action. Actions perform operations with side effects (e.g., Merge, GrantAccess, SendEmail, QualifyLead). For bound actions, provide the entity set name and record ID.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
|
||||
| `actionName` | string | Yes | Action name \(e.g., Merge, GrantAccess, SendEmail\). Do not include the Microsoft.Dynamics.CRM. namespace prefix for unbound actions. |
|
||||
| `entitySetName` | string | No | Entity set name for bound actions \(e.g., accounts\). Leave empty for unbound actions. |
|
||||
| `recordId` | string | No | Record GUID for bound actions. Leave empty for unbound or collection-bound actions. |
|
||||
| `parameters` | object | No | Action parameters as a JSON object. For entity references, include @odata.type annotation \(e.g., \{"Target": \{"@odata.type": "Microsoft.Dynamics.CRM.account", "accountid": "..."\}\}\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `result` | object | Action response data. Structure varies by action. Null for actions that return 204 No Content. |
|
||||
| `success` | boolean | Whether the action executed successfully |
|
||||
|
||||
### `microsoft_dataverse_execute_function`
|
||||
|
||||
Execute a bound or unbound Dataverse function. Functions are read-only operations (e.g., RetrievePrincipalAccess, RetrieveTotalRecordCount, InitializeFrom). For bound functions, provide the entity set name and record ID.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
|
||||
| `functionName` | string | Yes | Function name \(e.g., RetrievePrincipalAccess, RetrieveTotalRecordCount\). Do not include the Microsoft.Dynamics.CRM. namespace prefix for unbound functions. |
|
||||
| `entitySetName` | string | No | Entity set name for bound functions \(e.g., systemusers\). Leave empty for unbound functions. |
|
||||
| `recordId` | string | No | Record GUID for bound functions. Leave empty for unbound functions. |
|
||||
| `parameters` | string | No | Function parameters as a comma-separated list of name=value pairs for the URL \(e.g., "LocalizedStandardName=\'Pacific Standard Time\ |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `result` | object | Function response data. Structure varies by function. |
|
||||
| `success` | boolean | Whether the function executed successfully |
|
||||
|
||||
### `microsoft_dataverse_fetchxml_query`
|
||||
|
||||
Execute a FetchXML query against a Microsoft Dataverse table. FetchXML supports aggregation, grouping, linked-entity joins, and complex filtering beyond OData capabilities.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
|
||||
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
|
||||
| `fetchXml` | string | Yes | FetchXML query string. Must include <fetch> root element and <entity> child element matching the table logical name. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `records` | array | Array of Dataverse records. Each record has dynamic columns based on the table schema. |
|
||||
| `count` | number | Number of records returned in the current page |
|
||||
| `fetchXmlPagingCookie` | string | Paging cookie for retrieving the next page of results |
|
||||
| `moreRecords` | boolean | Whether more records are available beyond the current page |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `microsoft_dataverse_get_record`
|
||||
|
||||
Retrieve a single record from a Microsoft Dataverse table by its ID. Supports $select and $expand OData query options.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
|
||||
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
|
||||
| `recordId` | string | Yes | The unique identifier \(GUID\) of the record to retrieve |
|
||||
| `select` | string | No | Comma-separated list of columns to return \(OData $select\) |
|
||||
| `expand` | string | No | Navigation properties to expand \(OData $expand\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `record` | object | Dataverse record object. Contains dynamic columns based on the queried table, plus OData metadata fields. |
|
||||
| `recordId` | string | The record primary key ID \(auto-detected from response\) |
|
||||
| `success` | boolean | Whether the record was retrieved successfully |
|
||||
|
||||
### `microsoft_dataverse_list_records`
|
||||
|
||||
Query and list records from a Microsoft Dataverse table. Supports OData query options for filtering, selecting columns, ordering, and pagination.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
|
||||
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
|
||||
| `select` | string | No | Comma-separated list of columns to return \(OData $select\) |
|
||||
| `filter` | string | No | OData $filter expression \(e.g., statecode eq 0\) |
|
||||
| `orderBy` | string | No | OData $orderby expression \(e.g., name asc, createdon desc\) |
|
||||
| `top` | number | No | Maximum number of records to return \(OData $top\) |
|
||||
| `expand` | string | No | Navigation properties to expand \(OData $expand\) |
|
||||
| `count` | string | No | Set to "true" to include total record count in response \(OData $count\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `records` | array | Array of Dataverse records. Each record has dynamic columns based on the table schema. |
|
||||
| `count` | number | Number of records returned in the current page |
|
||||
| `totalCount` | number | Total number of matching records server-side \(requires $count=true\) |
|
||||
| `nextLink` | string | URL for the next page of results |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `microsoft_dataverse_search`
|
||||
|
||||
Perform a full-text relevance search across Microsoft Dataverse tables. Requires Dataverse Search to be enabled on the environment. Supports simple and Lucene query syntax.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
|
||||
| `searchTerm` | string | Yes | Search text \(1-100 chars\). Supports simple syntax: + \(AND\), \| \(OR\), - \(NOT\), * \(wildcard\), "exact phrase" |
|
||||
| `entities` | string | No | JSON array of search entity configs. Each object: \{"Name":"account","SelectColumns":\["name"\],"SearchColumns":\["name"\],"Filter":"statecode eq 0"\} |
|
||||
| `filter` | string | No | Global OData filter applied across all entities \(e.g., "createdon gt 2024-01-01"\) |
|
||||
| `facets` | string | No | JSON array of facet specifications \(e.g., \["entityname,count:100","ownerid,count:100"\]\) |
|
||||
| `top` | number | No | Maximum number of results \(default: 50, max: 100\) |
|
||||
| `skip` | number | No | Number of results to skip for pagination |
|
||||
| `orderBy` | string | No | JSON array of sort expressions \(e.g., \["createdon desc"\]\) |
|
||||
| `searchMode` | string | No | Search mode: "any" \(default, match any term\) or "all" \(match all terms\) |
|
||||
| `searchType` | string | No | Query type: "simple" \(default\) or "lucene" \(enables regex, fuzzy, proximity, boosting\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `results` | array | Array of search result objects |
|
||||
| ↳ `Id` | string | Record GUID |
|
||||
| ↳ `EntityName` | string | Table logical name \(e.g., account, contact\) |
|
||||
| ↳ `ObjectTypeCode` | number | Entity type code |
|
||||
| ↳ `Attributes` | object | Record attributes matching the search. Keys are column logical names. |
|
||||
| ↳ `Highlights` | object | Highlighted search matches. Keys are column names, values are arrays of strings with \{crmhit\}/\{/crmhit\} markers. |
|
||||
| ↳ `Score` | number | Relevance score for this result |
|
||||
| `totalCount` | number | Total number of matching records across all tables |
|
||||
| `count` | number | Number of results returned in this page |
|
||||
| `facets` | object | Facet results when facets were requested. Keys are facet names, values are arrays of facet value objects with count and value properties. |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `microsoft_dataverse_update_multiple`
|
||||
|
||||
Update multiple records of the same table type in a single request. Each record must include its primary key. Only include columns that need to be changed. Recommended batch size: 100-1000 records.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
|
||||
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
|
||||
| `entityLogicalName` | string | Yes | Table logical name for @odata.type annotation \(e.g., account, contact\). Used to set Microsoft.Dynamics.CRM.\{entityLogicalName\} on each record. |
|
||||
| `records` | object | Yes | Array of record objects to update. Each record must include its primary key \(e.g., accountid\) and only the columns being changed. The @odata.type annotation is added automatically. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether all records were updated successfully |
|
||||
|
||||
### `microsoft_dataverse_update_record`
|
||||
|
||||
Update an existing record in a Microsoft Dataverse table. Only send the columns you want to change.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
|
||||
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
|
||||
| `recordId` | string | Yes | The unique identifier \(GUID\) of the record to update |
|
||||
| `data` | object | Yes | Record data to update as a JSON object with column names as keys |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `recordId` | string | The ID of the updated record |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `microsoft_dataverse_upload_file`
|
||||
|
||||
Upload a file to a file or image column on a Dataverse record. Supports single-request upload for files up to 128 MB. The file content must be provided as a base64-encoded string.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
|
||||
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
|
||||
| `recordId` | string | Yes | Record GUID to upload the file to |
|
||||
| `fileColumn` | string | Yes | File or image column logical name \(e.g., entityimage, cr_document\) |
|
||||
| `fileName` | string | Yes | Name of the file being uploaded \(e.g., document.pdf\) |
|
||||
| `file` | file | No | File to upload \(UserFile object\) |
|
||||
| `fileContent` | string | No | Base64-encoded file content \(legacy\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `recordId` | string | Record GUID the file was uploaded to |
|
||||
| `fileColumn` | string | File column the file was uploaded to |
|
||||
| `fileName` | string | Name of the uploaded file |
|
||||
| `success` | boolean | Whether the file was uploaded successfully |
|
||||
|
||||
### `microsoft_dataverse_upsert_record`
|
||||
|
||||
Create or update a record in a Microsoft Dataverse table. If a record with the given ID exists, it is updated; otherwise, a new record is created.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
|
||||
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
|
||||
| `recordId` | string | Yes | The unique identifier \(GUID\) of the record to upsert |
|
||||
| `data` | object | Yes | Record data as a JSON object with column names as keys |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `recordId` | string | The ID of the upserted record |
|
||||
| `created` | boolean | True if the record was created, false if updated |
|
||||
| `record` | object | Dataverse record object. Contains dynamic columns based on the queried table, plus OData metadata fields. |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `microsoft_dataverse_whoami`
|
||||
|
||||
Retrieve the current authenticated user information from Microsoft Dataverse. Useful for testing connectivity and getting the user ID, business unit ID, and organization ID.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `userId` | string | The authenticated user ID |
|
||||
| `businessUnitId` | string | The business unit ID |
|
||||
| `organizationId` | string | The organization ID |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
|
||||
@@ -49,6 +49,7 @@ Retrieve all deals from Pipedrive with optional filters
|
||||
| `pipeline_id` | string | No | If supplied, only deals in the specified pipeline are returned \(e.g., "1"\) |
|
||||
| `updated_since` | string | No | If set, only deals updated after this time are returned. Format: 2025-01-01T10:20:00Z |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
|
||||
| `cursor` | string | No | For pagination, the marker representing the first item on the next page |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -74,6 +75,8 @@ Retrieve all deals from Pipedrive with optional filters
|
||||
| `metadata` | object | Pagination metadata for the response |
|
||||
| ↳ `total_items` | number | Total number of items |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
| ↳ `next_cursor` | string | Cursor for fetching the next page \(v2 endpoints\) |
|
||||
| ↳ `next_start` | number | Offset for fetching the next page \(v1 endpoints\) |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_get_deal`
|
||||
@@ -148,10 +151,9 @@ Retrieve files from Pipedrive with optional filters
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `deal_id` | string | No | Filter files by deal ID \(e.g., "123"\) |
|
||||
| `person_id` | string | No | Filter files by person ID \(e.g., "456"\) |
|
||||
| `org_id` | string | No | Filter files by organization ID \(e.g., "789"\) |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
|
||||
| `sort` | string | No | Sort files by field \(supported: "id", "update_time"\) |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 100\) |
|
||||
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
|
||||
| `downloadFiles` | boolean | No | Download file contents into file outputs |
|
||||
|
||||
#### Output
|
||||
@@ -171,6 +173,8 @@ Retrieve files from Pipedrive with optional filters
|
||||
| ↳ `url` | string | File download URL |
|
||||
| `downloadedFiles` | file[] | Downloaded files from Pipedrive |
|
||||
| `total_items` | number | Total number of files returned |
|
||||
| `has_more` | boolean | Whether more files are available |
|
||||
| `next_start` | number | Offset for fetching the next page |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_get_mail_messages`
|
||||
@@ -183,6 +187,7 @@ Retrieve mail threads from Pipedrive mailbox
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `folder` | string | No | Filter by folder: inbox, drafts, sent, archive \(default: inbox\) |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "25", default: 50\) |
|
||||
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -190,6 +195,8 @@ Retrieve mail threads from Pipedrive mailbox
|
||||
| --------- | ---- | ----------- |
|
||||
| `messages` | array | Array of mail thread objects from Pipedrive mailbox |
|
||||
| `total_items` | number | Total number of mail threads returned |
|
||||
| `has_more` | boolean | Whether more messages are available |
|
||||
| `next_start` | number | Offset for fetching the next page |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_get_mail_thread`
|
||||
@@ -221,7 +228,7 @@ Retrieve all pipelines from Pipedrive
|
||||
| `sort_by` | string | No | Field to sort by: id, update_time, add_time \(default: id\) |
|
||||
| `sort_direction` | string | No | Sorting direction: asc, desc \(default: asc\) |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
|
||||
| `cursor` | string | No | For pagination, the marker representing the first item on the next page |
|
||||
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -237,6 +244,8 @@ Retrieve all pipelines from Pipedrive
|
||||
| ↳ `add_time` | string | When the pipeline was created |
|
||||
| ↳ `update_time` | string | When the pipeline was last updated |
|
||||
| `total_items` | number | Total number of pipelines returned |
|
||||
| `has_more` | boolean | Whether more pipelines are available |
|
||||
| `next_start` | number | Offset for fetching the next page |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_get_pipeline_deals`
|
||||
@@ -249,8 +258,8 @@ Retrieve all deals in a specific pipeline
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `pipeline_id` | string | Yes | The ID of the pipeline \(e.g., "1"\) |
|
||||
| `stage_id` | string | No | Filter by specific stage within the pipeline \(e.g., "2"\) |
|
||||
| `status` | string | No | Filter by deal status: open, won, lost |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
|
||||
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -271,6 +280,7 @@ Retrieve all projects or a specific project from Pipedrive
|
||||
| `project_id` | string | No | Optional: ID of a specific project to retrieve \(e.g., "123"\) |
|
||||
| `status` | string | No | Filter by project status: open, completed, deleted \(only for listing all\) |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500, only for listing all\) |
|
||||
| `cursor` | string | No | For pagination, the marker representing the first item on the next page |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -279,6 +289,8 @@ Retrieve all projects or a specific project from Pipedrive
|
||||
| `projects` | array | Array of project objects \(when listing all\) |
|
||||
| `project` | object | Single project object \(when project_id is provided\) |
|
||||
| `total_items` | number | Total number of projects returned |
|
||||
| `has_more` | boolean | Whether more projects are available |
|
||||
| `next_cursor` | string | Cursor for fetching the next page |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_create_project`
|
||||
@@ -309,12 +321,11 @@ Retrieve activities (tasks) from Pipedrive with optional filters
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `deal_id` | string | No | Filter activities by deal ID \(e.g., "123"\) |
|
||||
| `person_id` | string | No | Filter activities by person ID \(e.g., "456"\) |
|
||||
| `org_id` | string | No | Filter activities by organization ID \(e.g., "789"\) |
|
||||
| `user_id` | string | No | Filter activities by user ID \(e.g., "123"\) |
|
||||
| `type` | string | No | Filter by activity type \(call, meeting, task, deadline, email, lunch\) |
|
||||
| `done` | string | No | Filter by completion status: 0 for not done, 1 for done |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
|
||||
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -335,6 +346,8 @@ Retrieve activities (tasks) from Pipedrive with optional filters
|
||||
| ↳ `add_time` | string | When the activity was created |
|
||||
| ↳ `update_time` | string | When the activity was last updated |
|
||||
| `total_items` | number | Total number of activities returned |
|
||||
| `has_more` | boolean | Whether more activities are available |
|
||||
| `next_start` | number | Offset for fetching the next page |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_create_activity`
|
||||
@@ -399,6 +412,7 @@ Retrieve all leads or a specific lead from Pipedrive
|
||||
| `person_id` | string | No | Filter by person ID \(e.g., "456"\) |
|
||||
| `organization_id` | string | No | Filter by organization ID \(e.g., "789"\) |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
|
||||
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -433,6 +447,8 @@ Retrieve all leads or a specific lead from Pipedrive
|
||||
| ↳ `add_time` | string | When the lead was created \(ISO 8601\) |
|
||||
| ↳ `update_time` | string | When the lead was last updated \(ISO 8601\) |
|
||||
| `total_items` | number | Total number of leads returned |
|
||||
| `has_more` | boolean | Whether more leads are available |
|
||||
| `next_start` | number | Offset for fetching the next page |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_create_lead`
|
||||
|
||||
452
apps/docs/content/docs/en/tools/redis.mdx
Normal file
452
apps/docs/content/docs/en/tools/redis.mdx
Normal file
@@ -0,0 +1,452 @@
|
||||
---
|
||||
title: Redis
|
||||
description: Key-value operations with Redis
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="redis"
|
||||
color="#FF4438"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Redis](https://redis.io/) is an open-source, in-memory data structure store, used as a distributed key-value database, cache, and message broker. Redis supports a variety of data structures including strings, hashes, lists, sets, and more, making it highly flexible for different application scenarios.
|
||||
|
||||
With Redis, you can:
|
||||
|
||||
- **Store and retrieve key-value data instantly**: Use Redis as a fast database, cache, or session store for high performance.
|
||||
- **Work with multiple data structures**: Manage not just strings, but also lists, hashes, sets, sorted sets, streams, and bitmaps.
|
||||
- **Perform atomic operations**: Safely manipulate data using atomic commands and transactions.
|
||||
- **Support pub/sub messaging**: Use Redis’s publisher/subscriber features for real-time event handling and messaging.
|
||||
- **Set automatic expiration policies**: Assign TTLs to keys for caching and time-sensitive data.
|
||||
- **Scale horizontally**: Use Redis Cluster for sharding, high availability, and scalable workloads.
|
||||
|
||||
In Sim, the Redis integration lets your agents connect to any Redis-compatible instance to perform key-value, hash, list, and utility operations. You can build workflows that involve storing, retrieving, or manipulating data in Redis, or manage your app’s cache, sessions, or real-time messaging, directly within your Sim workspace.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Connect to any Redis instance to perform key-value, hash, list, and utility operations via a direct connection.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `redis_get`
|
||||
|
||||
Get the value of a key from Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The key to retrieve |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was retrieved |
|
||||
| `value` | string | The value of the key, or null if the key does not exist |
|
||||
|
||||
### `redis_set`
|
||||
|
||||
Set the value of a key in Redis with an optional expiration time in seconds.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The key to set |
|
||||
| `value` | string | Yes | The value to store |
|
||||
| `ex` | number | No | Expiration time in seconds \(optional\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was set |
|
||||
| `result` | string | The result of the SET operation \(typically "OK"\) |
|
||||
|
||||
### `redis_delete`
|
||||
|
||||
Delete a key from Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The key to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was deleted |
|
||||
| `deletedCount` | number | Number of keys deleted \(0 if key did not exist, 1 if deleted\) |
|
||||
|
||||
### `redis_keys`
|
||||
|
||||
List all keys matching a pattern in Redis. Avoid using on large databases in production; use the Redis Command tool with SCAN for large key spaces.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `pattern` | string | No | Pattern to match keys \(default: * for all keys\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `pattern` | string | The pattern used to match keys |
|
||||
| `keys` | array | List of keys matching the pattern |
|
||||
| `count` | number | Number of keys found |
|
||||
|
||||
### `redis_command`
|
||||
|
||||
Execute a raw Redis command as a JSON array (e.g. [
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `command` | string | Yes | Redis command as a JSON array \(e.g. \["SET", "key", "value"\]\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `command` | string | The command that was executed |
|
||||
| `result` | json | The result of the command |
|
||||
|
||||
### `redis_hset`
|
||||
|
||||
Set a field in a hash stored at a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The hash key |
|
||||
| `field` | string | Yes | The field name within the hash |
|
||||
| `value` | string | Yes | The value to set for the field |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The hash key |
|
||||
| `field` | string | The field that was set |
|
||||
| `result` | number | Number of fields added \(1 if new, 0 if updated\) |
|
||||
|
||||
### `redis_hget`
|
||||
|
||||
Get the value of a field in a hash stored at a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The hash key |
|
||||
| `field` | string | Yes | The field name to retrieve |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The hash key |
|
||||
| `field` | string | The field that was retrieved |
|
||||
| `value` | string | The field value, or null if the field or key does not exist |
|
||||
|
||||
### `redis_hgetall`
|
||||
|
||||
Get all fields and values of a hash stored at a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The hash key |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The hash key |
|
||||
| `fields` | object | All field-value pairs in the hash as a key-value object. Empty object if the key does not exist. |
|
||||
| `fieldCount` | number | Number of fields in the hash |
|
||||
|
||||
### `redis_hdel`
|
||||
|
||||
Delete a field from a hash stored at a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The hash key |
|
||||
| `field` | string | Yes | The field name to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The hash key |
|
||||
| `field` | string | The field that was deleted |
|
||||
| `deleted` | number | Number of fields removed \(1 if deleted, 0 if field did not exist\) |
|
||||
|
||||
### `redis_incr`
|
||||
|
||||
Increment the integer value of a key by one in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The key to increment |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was incremented |
|
||||
| `value` | number | The new value after increment |
|
||||
|
||||
### `redis_incrby`
|
||||
|
||||
Increment the integer value of a key by a given amount in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The key to increment |
|
||||
| `increment` | number | Yes | Amount to increment by \(negative to decrement\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was incremented |
|
||||
| `value` | number | The new value after increment |
|
||||
|
||||
### `redis_expire`
|
||||
|
||||
Set an expiration time (in seconds) on a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The key to set expiration on |
|
||||
| `seconds` | number | Yes | Timeout in seconds |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that expiration was set on |
|
||||
| `result` | number | 1 if the timeout was set, 0 if the key does not exist |
|
||||
|
||||
### `redis_ttl`
|
||||
|
||||
Get the remaining time to live (in seconds) of a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The key to check TTL for |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was checked |
|
||||
| `ttl` | number | Remaining TTL in seconds. Positive integer if TTL set, -1 if no expiration, -2 if key does not exist. |
|
||||
|
||||
### `redis_persist`
|
||||
|
||||
Remove the expiration from a key in Redis, making it persist indefinitely.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The key to persist |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was persisted |
|
||||
| `result` | number | 1 if the expiration was removed, 0 if the key does not exist or has no expiration |
|
||||
|
||||
### `redis_lpush`
|
||||
|
||||
Prepend a value to a list stored at a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The list key |
|
||||
| `value` | string | Yes | The value to prepend |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The list key |
|
||||
| `length` | number | Length of the list after the push |
|
||||
|
||||
### `redis_rpush`
|
||||
|
||||
Append a value to the end of a list stored at a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The list key |
|
||||
| `value` | string | Yes | The value to append |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The list key |
|
||||
| `length` | number | Length of the list after the push |
|
||||
|
||||
### `redis_lpop`
|
||||
|
||||
Remove and return the first element of a list stored at a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The list key |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The list key |
|
||||
| `value` | string | The removed element, or null if the list is empty |
|
||||
|
||||
### `redis_rpop`
|
||||
|
||||
Remove and return the last element of a list stored at a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The list key |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The list key |
|
||||
| `value` | string | The removed element, or null if the list is empty |
|
||||
|
||||
### `redis_llen`
|
||||
|
||||
Get the length of a list stored at a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The list key |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The list key |
|
||||
| `length` | number | The length of the list, or 0 if the key does not exist |
|
||||
|
||||
### `redis_lrange`
|
||||
|
||||
Get a range of elements from a list stored at a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The list key |
|
||||
| `start` | number | Yes | Start index \(0-based\) |
|
||||
| `stop` | number | Yes | Stop index \(-1 for all elements\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The list key |
|
||||
| `values` | array | List elements in the specified range |
|
||||
| `count` | number | Number of elements returned |
|
||||
|
||||
### `redis_exists`
|
||||
|
||||
Check if a key exists in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The key to check |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was checked |
|
||||
| `exists` | boolean | Whether the key exists \(true\) or not \(false\) |
|
||||
|
||||
### `redis_setnx`
|
||||
|
||||
Set the value of a key in Redis only if the key does not already exist.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The key to set |
|
||||
| `value` | string | Yes | The value to store |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was set |
|
||||
| `wasSet` | boolean | Whether the key was set \(true\) or already existed \(false\) |
|
||||
|
||||
|
||||
456
apps/docs/content/docs/en/tools/revenuecat.mdx
Normal file
456
apps/docs/content/docs/en/tools/revenuecat.mdx
Normal file
@@ -0,0 +1,456 @@
|
||||
---
|
||||
title: RevenueCat
|
||||
description: Manage in-app subscriptions and entitlements
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="revenuecat"
|
||||
color="#F25A5A"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[RevenueCat](https://www.revenuecat.com/) is a subscription management platform that enables you to easily set up, manage, and analyze in-app subscriptions for your apps. With RevenueCat, you can handle the complexities of in-app purchases across platforms like iOS, Android, and web—all through a single unified API.
|
||||
|
||||
With RevenueCat, you can:
|
||||
|
||||
- **Manage subscribers**: Track user subscriptions, entitlements, and purchases across all platforms in real time
|
||||
- **Simplify implementation**: Integrate RevenueCat’s SDKs to abstract away App Store and Play Store purchase logic
|
||||
- **Automate entitlement logic**: Define and manage what features users should receive when they purchase or renew
|
||||
- **Analyze revenue**: Access dashboards and analytics to view churn, LTV, revenue, active subscriptions, and more
|
||||
- **Grant or revoke entitlements**: Manually adjust user access (for example, for customer support or promotions)
|
||||
- **Operate globally**: Support purchases, refunds, and promotions worldwide with ease
|
||||
|
||||
In Sim, the RevenueCat integration allows your agents to fetch and manage subscriber data, review and update entitlements, and automate subscription-related workflows. Use RevenueCat to centralize subscription operations for your apps directly within your Sim workspace.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate RevenueCat into the workflow. Manage subscribers, entitlements, offerings, and Google Play subscriptions. Retrieve customer subscription status, grant or revoke promotional entitlements, record purchases, update subscriber attributes, and manage Google Play subscription billing.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `revenuecat_get_customer`
|
||||
|
||||
Retrieve subscriber information by app user ID
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) |
|
||||
| `appUserId` | string | Yes | The app user ID of the subscriber |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `subscriber` | object | The subscriber object with subscriptions and entitlements |
|
||||
| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen |
|
||||
| ↳ `original_app_user_id` | string | Original app user ID |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase |
|
||||
| ↳ `management_url` | string | URL for managing the subscriber subscriptions |
|
||||
| ↳ `subscriptions` | object | Map of product identifiers to subscription objects |
|
||||
| ↳ `store_transaction_id` | string | Store transaction identifier |
|
||||
| ↳ `original_transaction_id` | string | Original transaction identifier |
|
||||
| ↳ `purchase_date` | string | ISO 8601 purchase date |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase |
|
||||
| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected |
|
||||
| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) |
|
||||
| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) |
|
||||
| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded |
|
||||
| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume |
|
||||
| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) |
|
||||
| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects |
|
||||
| ↳ `grant_date` | string | ISO 8601 grant date |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `product_identifier` | string | Product identifier |
|
||||
| ↳ `is_active` | boolean | Whether the entitlement is active |
|
||||
| ↳ `will_renew` | boolean | Whether the entitlement will renew |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) |
|
||||
| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal |
|
||||
| ↳ `store` | string | Store the entitlement was granted from |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects |
|
||||
| `metadata` | object | Subscriber summary metadata |
|
||||
| ↳ `app_user_id` | string | The app user ID |
|
||||
| ↳ `first_seen` | string | ISO 8601 date when the subscriber was first seen |
|
||||
| ↳ `active_entitlements` | number | Number of active entitlements |
|
||||
| ↳ `active_subscriptions` | number | Number of active subscriptions |
|
||||
|
||||
### `revenuecat_delete_customer`
|
||||
|
||||
Permanently delete a subscriber and all associated data
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) |
|
||||
| `appUserId` | string | Yes | The app user ID of the subscriber to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `deleted` | boolean | Whether the subscriber was deleted |
|
||||
| `app_user_id` | string | The deleted app user ID |
|
||||
|
||||
### `revenuecat_create_purchase`
|
||||
|
||||
Record a purchase (receipt) for a subscriber via the REST API
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | RevenueCat API key \(public or secret\) |
|
||||
| `appUserId` | string | Yes | The app user ID of the subscriber |
|
||||
| `fetchToken` | string | Yes | The receipt token or purchase token from the store \(App Store receipt, Google Play purchase token, or Stripe subscription ID\) |
|
||||
| `productId` | string | Yes | The product identifier for the purchase |
|
||||
| `price` | number | No | The price of the product in the currency specified |
|
||||
| `currency` | string | No | ISO 4217 currency code \(e.g., USD, EUR\) |
|
||||
| `isRestore` | boolean | No | Whether this is a restore of a previous purchase |
|
||||
| `platform` | string | No | Platform of the purchase \(ios, android, amazon, macos, stripe\). Required for Stripe and Paddle purchases. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `subscriber` | object | The updated subscriber object after recording the purchase |
|
||||
| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen |
|
||||
| ↳ `original_app_user_id` | string | Original app user ID |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase |
|
||||
| ↳ `management_url` | string | URL for managing the subscriber subscriptions |
|
||||
| ↳ `subscriptions` | object | Map of product identifiers to subscription objects |
|
||||
| ↳ `store_transaction_id` | string | Store transaction identifier |
|
||||
| ↳ `original_transaction_id` | string | Original transaction identifier |
|
||||
| ↳ `purchase_date` | string | ISO 8601 purchase date |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase |
|
||||
| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected |
|
||||
| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) |
|
||||
| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) |
|
||||
| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded |
|
||||
| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume |
|
||||
| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) |
|
||||
| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects |
|
||||
| ↳ `grant_date` | string | ISO 8601 grant date |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `product_identifier` | string | Product identifier |
|
||||
| ↳ `is_active` | boolean | Whether the entitlement is active |
|
||||
| ↳ `will_renew` | boolean | Whether the entitlement will renew |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) |
|
||||
| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal |
|
||||
| ↳ `store` | string | Store the entitlement was granted from |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects |
|
||||
|
||||
### `revenuecat_grant_entitlement`
|
||||
|
||||
Grant a promotional entitlement to a subscriber
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) |
|
||||
| `appUserId` | string | Yes | The app user ID of the subscriber |
|
||||
| `entitlementIdentifier` | string | Yes | The entitlement identifier to grant |
|
||||
| `duration` | string | Yes | Duration of the entitlement \(daily, three_day, weekly, monthly, two_month, three_month, six_month, yearly, lifetime\) |
|
||||
| `startTimeMs` | number | No | Optional start time in milliseconds since Unix epoch. Set to a past time to achieve custom durations shorter than daily. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `subscriber` | object | The updated subscriber object after granting the entitlement |
|
||||
| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen |
|
||||
| ↳ `original_app_user_id` | string | Original app user ID |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase |
|
||||
| ↳ `management_url` | string | URL for managing the subscriber subscriptions |
|
||||
| ↳ `subscriptions` | object | Map of product identifiers to subscription objects |
|
||||
| ↳ `store_transaction_id` | string | Store transaction identifier |
|
||||
| ↳ `original_transaction_id` | string | Original transaction identifier |
|
||||
| ↳ `purchase_date` | string | ISO 8601 purchase date |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase |
|
||||
| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected |
|
||||
| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) |
|
||||
| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) |
|
||||
| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded |
|
||||
| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume |
|
||||
| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) |
|
||||
| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects |
|
||||
| ↳ `grant_date` | string | ISO 8601 grant date |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `product_identifier` | string | Product identifier |
|
||||
| ↳ `is_active` | boolean | Whether the entitlement is active |
|
||||
| ↳ `will_renew` | boolean | Whether the entitlement will renew |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) |
|
||||
| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal |
|
||||
| ↳ `store` | string | Store the entitlement was granted from |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects |
|
||||
|
||||
### `revenuecat_revoke_entitlement`
|
||||
|
||||
Revoke all promotional entitlements for a specific entitlement identifier
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) |
|
||||
| `appUserId` | string | Yes | The app user ID of the subscriber |
|
||||
| `entitlementIdentifier` | string | Yes | The entitlement identifier to revoke |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `subscriber` | object | The updated subscriber object after revoking the entitlement |
|
||||
| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen |
|
||||
| ↳ `original_app_user_id` | string | Original app user ID |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase |
|
||||
| ↳ `management_url` | string | URL for managing the subscriber subscriptions |
|
||||
| ↳ `subscriptions` | object | Map of product identifiers to subscription objects |
|
||||
| ↳ `store_transaction_id` | string | Store transaction identifier |
|
||||
| ↳ `original_transaction_id` | string | Original transaction identifier |
|
||||
| ↳ `purchase_date` | string | ISO 8601 purchase date |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase |
|
||||
| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected |
|
||||
| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) |
|
||||
| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) |
|
||||
| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded |
|
||||
| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume |
|
||||
| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) |
|
||||
| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects |
|
||||
| ↳ `grant_date` | string | ISO 8601 grant date |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `product_identifier` | string | Product identifier |
|
||||
| ↳ `is_active` | boolean | Whether the entitlement is active |
|
||||
| ↳ `will_renew` | boolean | Whether the entitlement will renew |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) |
|
||||
| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal |
|
||||
| ↳ `store` | string | Store the entitlement was granted from |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects |
|
||||
|
||||
### `revenuecat_list_offerings`
|
||||
|
||||
List all offerings configured for the project
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | RevenueCat API key |
|
||||
| `appUserId` | string | Yes | An app user ID to retrieve offerings for |
|
||||
| `platform` | string | No | Platform to filter offerings \(ios, android, stripe, etc.\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `current_offering_id` | string | The identifier of the current offering |
|
||||
| `offerings` | array | List of offerings |
|
||||
| ↳ `identifier` | string | Offering identifier |
|
||||
| ↳ `description` | string | Offering description |
|
||||
| ↳ `packages` | array | List of packages in the offering |
|
||||
| ↳ `identifier` | string | Package identifier |
|
||||
| ↳ `platform_product_identifier` | string | Platform-specific product identifier |
|
||||
| `metadata` | object | Offerings metadata |
|
||||
| ↳ `count` | number | Number of offerings returned |
|
||||
| ↳ `current_offering_id` | string | Current offering identifier |
|
||||
|
||||
### `revenuecat_update_subscriber_attributes`
|
||||
|
||||
Update custom subscriber attributes (e.g., $email, $displayName, or custom key-value pairs)
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) |
|
||||
| `appUserId` | string | Yes | The app user ID of the subscriber |
|
||||
| `attributes` | json | Yes | JSON object of attributes to set. Each key maps to an object with a "value" field. Example: \{"$email": \{"value": "user@example.com"\}, "$displayName": \{"value": "John"\}\} |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `updated` | boolean | Whether the subscriber attributes were successfully updated |
|
||||
| `app_user_id` | string | The app user ID of the updated subscriber |
|
||||
|
||||
### `revenuecat_defer_google_subscription`
|
||||
|
||||
Defer a Google Play subscription by extending its billing date by a number of days (Google Play only)
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) |
|
||||
| `appUserId` | string | Yes | The app user ID of the subscriber |
|
||||
| `productId` | string | Yes | The Google Play product identifier of the subscription to defer \(use the part before the colon for products set up after Feb 2023\) |
|
||||
| `extendByDays` | number | Yes | Number of days to extend the subscription by \(1-365\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `subscriber` | object | The updated subscriber object after deferring the Google subscription |
|
||||
| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen |
|
||||
| ↳ `original_app_user_id` | string | Original app user ID |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase |
|
||||
| ↳ `management_url` | string | URL for managing the subscriber subscriptions |
|
||||
| ↳ `subscriptions` | object | Map of product identifiers to subscription objects |
|
||||
| ↳ `store_transaction_id` | string | Store transaction identifier |
|
||||
| ↳ `original_transaction_id` | string | Original transaction identifier |
|
||||
| ↳ `purchase_date` | string | ISO 8601 purchase date |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase |
|
||||
| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected |
|
||||
| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) |
|
||||
| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) |
|
||||
| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded |
|
||||
| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume |
|
||||
| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) |
|
||||
| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects |
|
||||
| ↳ `grant_date` | string | ISO 8601 grant date |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `product_identifier` | string | Product identifier |
|
||||
| ↳ `is_active` | boolean | Whether the entitlement is active |
|
||||
| ↳ `will_renew` | boolean | Whether the entitlement will renew |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) |
|
||||
| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal |
|
||||
| ↳ `store` | string | Store the entitlement was granted from |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects |
|
||||
|
||||
### `revenuecat_refund_google_subscription`
|
||||
|
||||
Refund and optionally revoke a Google Play subscription (Google Play only)
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) |
|
||||
| `appUserId` | string | Yes | The app user ID of the subscriber |
|
||||
| `productId` | string | Yes | The Google Play product identifier of the subscription to refund |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `subscriber` | object | The updated subscriber object after refunding the Google subscription |
|
||||
| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen |
|
||||
| ↳ `original_app_user_id` | string | Original app user ID |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase |
|
||||
| ↳ `management_url` | string | URL for managing the subscriber subscriptions |
|
||||
| ↳ `subscriptions` | object | Map of product identifiers to subscription objects |
|
||||
| ↳ `store_transaction_id` | string | Store transaction identifier |
|
||||
| ↳ `original_transaction_id` | string | Original transaction identifier |
|
||||
| ↳ `purchase_date` | string | ISO 8601 purchase date |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase |
|
||||
| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected |
|
||||
| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) |
|
||||
| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) |
|
||||
| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded |
|
||||
| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume |
|
||||
| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) |
|
||||
| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects |
|
||||
| ↳ `grant_date` | string | ISO 8601 grant date |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `product_identifier` | string | Product identifier |
|
||||
| ↳ `is_active` | boolean | Whether the entitlement is active |
|
||||
| ↳ `will_renew` | boolean | Whether the entitlement will renew |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) |
|
||||
| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal |
|
||||
| ↳ `store` | string | Store the entitlement was granted from |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects |
|
||||
|
||||
### `revenuecat_revoke_google_subscription`
|
||||
|
||||
Immediately revoke access to a Google Play subscription and issue a refund (Google Play only)
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) |
|
||||
| `appUserId` | string | Yes | The app user ID of the subscriber |
|
||||
| `productId` | string | Yes | The Google Play product identifier of the subscription to revoke |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `subscriber` | object | The updated subscriber object after revoking the Google subscription |
|
||||
| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen |
|
||||
| ↳ `original_app_user_id` | string | Original app user ID |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase |
|
||||
| ↳ `management_url` | string | URL for managing the subscriber subscriptions |
|
||||
| ↳ `subscriptions` | object | Map of product identifiers to subscription objects |
|
||||
| ↳ `store_transaction_id` | string | Store transaction identifier |
|
||||
| ↳ `original_transaction_id` | string | Original transaction identifier |
|
||||
| ↳ `purchase_date` | string | ISO 8601 purchase date |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase |
|
||||
| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected |
|
||||
| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) |
|
||||
| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) |
|
||||
| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded |
|
||||
| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume |
|
||||
| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) |
|
||||
| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects |
|
||||
| ↳ `grant_date` | string | ISO 8601 grant date |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `product_identifier` | string | Product identifier |
|
||||
| ↳ `is_active` | boolean | Whether the entitlement is active |
|
||||
| ↳ `will_renew` | boolean | Whether the entitlement will renew |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) |
|
||||
| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal |
|
||||
| ↳ `store` | string | Store the entitlement was granted from |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects |
|
||||
|
||||
|
||||
@@ -57,6 +57,7 @@ Query data from a Supabase table
|
||||
| `filter` | string | No | PostgREST filter \(e.g., "id=eq.123"\) |
|
||||
| `orderBy` | string | No | Column to order by \(add DESC for descending\) |
|
||||
| `limit` | number | No | Maximum number of rows to return |
|
||||
| `offset` | number | No | Number of rows to skip \(for pagination\) |
|
||||
| `apiKey` | string | Yes | Your Supabase service role secret key |
|
||||
|
||||
#### Output
|
||||
@@ -211,6 +212,7 @@ Perform full-text search on a Supabase table
|
||||
| `searchType` | string | No | Search type: plain, phrase, or websearch \(default: websearch\) |
|
||||
| `language` | string | No | Language for text search configuration \(default: english\) |
|
||||
| `limit` | number | No | Maximum number of rows to return |
|
||||
| `offset` | number | No | Number of rows to skip \(for pagination\) |
|
||||
| `apiKey` | string | Yes | Your Supabase service role secret key |
|
||||
|
||||
#### Output
|
||||
|
||||
351
apps/docs/content/docs/en/tools/table.mdx
Normal file
351
apps/docs/content/docs/en/tools/table.mdx
Normal file
@@ -0,0 +1,351 @@
|
||||
---
|
||||
title: Table
|
||||
description: User-defined data tables for storing and querying structured data
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="table"
|
||||
color="#10B981"
|
||||
/>
|
||||
|
||||
Tables allow you to create and manage custom data tables directly within Sim. Store, query, and manipulate structured data within your workflows without needing external database integrations.
|
||||
|
||||
**Why Use Tables?**
|
||||
- **No external setup**: Create tables instantly without configuring external databases
|
||||
- **Workflow-native**: Data persists across workflow executions and is accessible from any workflow in your workspace
|
||||
- **Flexible schema**: Define columns with types (string, number, boolean, date, json) and constraints (required, unique)
|
||||
- **Powerful querying**: Filter, sort, and paginate data using MongoDB-style operators
|
||||
- **Agent-friendly**: Tables can be used as tools by AI agents for dynamic data storage and retrieval
|
||||
|
||||
**Key Features:**
|
||||
- Create tables with custom schemas
|
||||
- Insert, update, upsert, and delete rows
|
||||
- Query with filters and sorting
|
||||
- Batch operations for bulk inserts
|
||||
- Bulk updates and deletes by filter
|
||||
- Up to 10,000 rows per table, 100 tables per workspace
|
||||
|
||||
## Creating Tables
|
||||
|
||||
Tables are created from the **Tables** section in the sidebar. Each table requires:
|
||||
- **Name**: Alphanumeric with underscores (e.g., `customer_leads`)
|
||||
- **Description**: Optional description of the table's purpose
|
||||
- **Schema**: Define columns with name, type, and optional constraints
|
||||
|
||||
### Column Types
|
||||
|
||||
| Type | Description | Example Values |
|
||||
|------|-------------|----------------|
|
||||
| `string` | Text data | `"John Doe"`, `"active"` |
|
||||
| `number` | Numeric data | `42`, `99.99` |
|
||||
| `boolean` | True/false values | `true`, `false` |
|
||||
| `date` | Date/time values | `"2024-01-15T10:30:00Z"` |
|
||||
| `json` | Complex nested data | `{"address": {"city": "NYC"}}` |
|
||||
|
||||
### Column Constraints
|
||||
|
||||
- **Required**: Column must have a value (cannot be null)
|
||||
- **Unique**: Values must be unique across all rows (enables upsert matching)
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Create and manage custom data tables. Store, query, and manipulate structured data within workflows.
|
||||
|
||||
## Tools
|
||||
|
||||
### `table_query_rows`
|
||||
|
||||
Query rows from a table with filtering, sorting, and pagination
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `filter` | object | No | Filter conditions using MongoDB-style operators |
|
||||
| `sort` | object | No | Sort order as \{column: "asc"\|"desc"\} |
|
||||
| `limit` | number | No | Maximum rows to return \(default: 100, max: 1000\) |
|
||||
| `offset` | number | No | Number of rows to skip \(default: 0\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether query succeeded |
|
||||
| `rows` | array | Query result rows |
|
||||
| `rowCount` | number | Number of rows returned |
|
||||
| `totalCount` | number | Total rows matching filter |
|
||||
| `limit` | number | Limit used in query |
|
||||
| `offset` | number | Offset used in query |
|
||||
|
||||
### `table_insert_row`
|
||||
|
||||
Insert a new row into a table
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `data` | object | Yes | Row data as JSON object matching the table schema |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether row was inserted |
|
||||
| `row` | object | Inserted row data including generated ID |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_upsert_row`
|
||||
|
||||
Insert or update a row based on unique column constraints. If a row with matching unique field exists, update it; otherwise insert a new row.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `data` | object | Yes | Row data to insert or update |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether row was upserted |
|
||||
| `row` | object | Upserted row data |
|
||||
| `operation` | string | Operation performed: "insert" or "update" |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_batch_insert_rows`
|
||||
|
||||
Insert multiple rows at once (up to 1000 rows per batch)
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `rows` | array | Yes | Array of row data objects to insert |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether batch insert succeeded |
|
||||
| `rows` | array | Array of inserted rows with IDs |
|
||||
| `insertedCount` | number | Number of rows inserted |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_update_row`
|
||||
|
||||
Update a specific row by its ID
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `rowId` | string | Yes | Row ID to update |
|
||||
| `data` | object | Yes | Data to update \(partial update supported\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether row was updated |
|
||||
| `row` | object | Updated row data |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_update_rows_by_filter`
|
||||
|
||||
Update multiple rows matching a filter condition
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `filter` | object | Yes | Filter to match rows for update |
|
||||
| `data` | object | Yes | Data to apply to matching rows |
|
||||
| `limit` | number | No | Maximum rows to update \(default: 1000\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether update succeeded |
|
||||
| `updatedCount` | number | Number of rows updated |
|
||||
| `updatedRowIds` | array | IDs of updated rows |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_delete_row`
|
||||
|
||||
Delete a specific row by its ID
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `rowId` | string | Yes | Row ID to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether row was deleted |
|
||||
| `deletedCount` | number | Number of rows deleted \(1 or 0\) |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_delete_rows_by_filter`
|
||||
|
||||
Delete multiple rows matching a filter condition
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `filter` | object | Yes | Filter to match rows for deletion |
|
||||
| `limit` | number | No | Maximum rows to delete \(default: 1000\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether delete succeeded |
|
||||
| `deletedCount` | number | Number of rows deleted |
|
||||
| `deletedRowIds` | array | IDs of deleted rows |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_get_row`
|
||||
|
||||
Get a single row by its ID
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `rowId` | string | Yes | Row ID to retrieve |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether row was found |
|
||||
| `row` | object | Row data |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_get_schema`
|
||||
|
||||
Get the schema definition for a table
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether schema was retrieved |
|
||||
| `name` | string | Table name |
|
||||
| `columns` | array | Array of column definitions |
|
||||
| `message` | string | Status message |
|
||||
|
||||
## Filter Operators
|
||||
|
||||
Filters use MongoDB-style operators for flexible querying:
|
||||
|
||||
| Operator | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `$eq` | Equals | `{"status": {"$eq": "active"}}` or `{"status": "active"}` |
|
||||
| `$ne` | Not equals | `{"status": {"$ne": "deleted"}}` |
|
||||
| `$gt` | Greater than | `{"age": {"$gt": 18}}` |
|
||||
| `$gte` | Greater than or equal | `{"score": {"$gte": 80}}` |
|
||||
| `$lt` | Less than | `{"price": {"$lt": 100}}` |
|
||||
| `$lte` | Less than or equal | `{"quantity": {"$lte": 10}}` |
|
||||
| `$in` | In array | `{"status": {"$in": ["active", "pending"]}}` |
|
||||
| `$nin` | Not in array | `{"type": {"$nin": ["spam", "blocked"]}}` |
|
||||
| `$contains` | String contains | `{"email": {"$contains": "@gmail.com"}}` |
|
||||
|
||||
### Combining Filters
|
||||
|
||||
Multiple field conditions are combined with AND logic:
|
||||
|
||||
```json
|
||||
{
|
||||
"status": "active",
|
||||
"age": {"$gte": 18}
|
||||
}
|
||||
```
|
||||
|
||||
Use `$or` for OR logic:
|
||||
|
||||
```json
|
||||
{
|
||||
"$or": [
|
||||
{"status": "active"},
|
||||
{"status": "pending"}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Sort Specification
|
||||
|
||||
Specify sort order with column names and direction:
|
||||
|
||||
```json
|
||||
{
|
||||
"createdAt": "desc"
|
||||
}
|
||||
```
|
||||
|
||||
Multi-column sorting:
|
||||
|
||||
```json
|
||||
{
|
||||
"priority": "desc",
|
||||
"name": "asc"
|
||||
}
|
||||
```
|
||||
|
||||
## Built-in Columns
|
||||
|
||||
Every row automatically includes:
|
||||
|
||||
| Column | Type | Description |
|
||||
|--------|------|-------------|
|
||||
| `id` | string | Unique row identifier |
|
||||
| `createdAt` | date | When the row was created |
|
||||
| `updatedAt` | date | When the row was last modified |
|
||||
|
||||
These can be used in filters and sorting.
|
||||
|
||||
## Limits
|
||||
|
||||
| Resource | Limit |
|
||||
|----------|-------|
|
||||
| Tables per workspace | 100 |
|
||||
| Rows per table | 10,000 |
|
||||
| Columns per table | 50 |
|
||||
| Max row size | 100KB |
|
||||
| String value length | 10,000 characters |
|
||||
| Query limit | 1,000 rows |
|
||||
| Batch insert size | 1,000 rows |
|
||||
| Bulk update/delete | 1,000 rows |
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `blocks`
|
||||
- Type: `table`
|
||||
- Tables are scoped to workspaces and accessible from any workflow within that workspace
|
||||
- Data persists across workflow executions
|
||||
- Use unique constraints to enable upsert functionality
|
||||
- The visual filter/sort builder provides an easy way to construct queries without writing JSON
|
||||
@@ -43,6 +43,8 @@ Retrieve form responses from Typeform
|
||||
| `formId` | string | Yes | Typeform form ID \(e.g., "abc123XYZ"\) |
|
||||
| `apiKey` | string | Yes | Typeform Personal Access Token |
|
||||
| `pageSize` | number | No | Number of responses to retrieve \(e.g., 10, 25, 50\) |
|
||||
| `before` | string | No | Cursor token for fetching the next page of older responses |
|
||||
| `after` | string | No | Cursor token for fetching the next page of newer responses |
|
||||
| `since` | string | No | Retrieve responses submitted after this date \(e.g., "2024-01-01T00:00:00Z"\) |
|
||||
| `until` | string | No | Retrieve responses submitted before this date \(e.g., "2024-12-31T23:59:59Z"\) |
|
||||
| `completed` | string | No | Filter by completion status \(e.g., "true", "false", "all"\) |
|
||||
|
||||
357
apps/docs/content/docs/en/tools/upstash.mdx
Normal file
357
apps/docs/content/docs/en/tools/upstash.mdx
Normal file
@@ -0,0 +1,357 @@
|
||||
---
|
||||
title: Upstash
|
||||
description: Serverless Redis with Upstash
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="upstash"
|
||||
color="#181C1E"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Upstash](https://upstash.com/) is a serverless data platform designed for modern applications that need fast, simple, and scalable data storage with minimal setup. Upstash specializes in providing Redis and Kafka as fully managed, pay-per-request cloud services, making it a popular choice for developers building serverless, edge, and event-driven architectures.
|
||||
|
||||
With Upstash Redis, you can:
|
||||
|
||||
- **Store and retrieve data instantly**: Read and write key-value pairs, hashes, lists, sets, and more—all over a high-performance REST API.
|
||||
- **Scale serverlessly**: No infrastructure to manage. Upstash automatically scales with your app and charges only for what you use.
|
||||
- **Access globally**: Deploy near your users with multi-region support and global distribution.
|
||||
- **Integrate easily**: Use Upstash’s REST API in serverless functions, edge workers, Next.js, Vercel, Cloudflare Workers, and more.
|
||||
- **Automate with scripts**: Run Lua scripts for advanced transactions and automation.
|
||||
- **Ensure security**: Protect your data with built-in authentication and TLS encryption.
|
||||
|
||||
In Sim, the Upstash integration empowers your agents and workflows to read, write, and manage data in Upstash Redis using simple, unified commands—perfect for building scalable automations, caching results, managing queues, and more, all without dealing with server management.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Connect to Upstash Redis to perform key-value, hash, list, and utility operations via the REST API.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `upstash_redis_get`
|
||||
|
||||
Get the value of a key from Upstash Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The key to retrieve |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was retrieved |
|
||||
| `value` | json | The value of the key \(string\), or null if not found |
|
||||
|
||||
### `upstash_redis_set`
|
||||
|
||||
Set the value of a key in Upstash Redis with an optional expiration time in seconds.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The key to set |
|
||||
| `value` | string | Yes | The value to store |
|
||||
| `ex` | number | No | Expiration time in seconds \(optional\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was set |
|
||||
| `result` | string | The result of the SET operation \(typically "OK"\) |
|
||||
|
||||
### `upstash_redis_delete`
|
||||
|
||||
Delete a key from Upstash Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The key to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was deleted |
|
||||
| `deletedCount` | number | Number of keys deleted \(0 if key did not exist, 1 if deleted\) |
|
||||
|
||||
### `upstash_redis_keys`
|
||||
|
||||
List keys matching a pattern in Upstash Redis. Defaults to listing all keys (*).
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `pattern` | string | No | Pattern to match keys \(e.g., "user:*"\). Defaults to "*" for all keys. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `pattern` | string | The pattern used to match keys |
|
||||
| `keys` | array | List of keys matching the pattern |
|
||||
| `count` | number | Number of keys found |
|
||||
|
||||
### `upstash_redis_command`
|
||||
|
||||
Execute an arbitrary Redis command against Upstash Redis. Pass the full command as a JSON array (e.g., [
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `command` | string | Yes | Redis command as a JSON array \(e.g., \["HSET", "myhash", "field1", "value1"\]\) or a simple command string \(e.g., "PING"\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `command` | string | The command that was executed |
|
||||
| `result` | json | The result of the Redis command |
|
||||
|
||||
### `upstash_redis_hset`
|
||||
|
||||
Set a field in a hash stored at a key in Upstash Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The hash key |
|
||||
| `field` | string | Yes | The field name within the hash |
|
||||
| `value` | string | Yes | The value to store in the hash field |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The hash key |
|
||||
| `field` | string | The field that was set |
|
||||
| `result` | number | Number of new fields added \(0 if field was updated, 1 if new\) |
|
||||
|
||||
### `upstash_redis_hget`
|
||||
|
||||
Get the value of a field in a hash stored at a key in Upstash Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The hash key |
|
||||
| `field` | string | Yes | The field name to retrieve |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The hash key |
|
||||
| `field` | string | The field that was retrieved |
|
||||
| `value` | json | The value of the hash field \(string\), or null if not found |
|
||||
|
||||
### `upstash_redis_hgetall`
|
||||
|
||||
Get all fields and values of a hash stored at a key in Upstash Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The hash key |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The hash key |
|
||||
| `fields` | object | All field-value pairs in the hash, keyed by field name |
|
||||
| `fieldCount` | number | Number of fields in the hash |
|
||||
|
||||
### `upstash_redis_incr`
|
||||
|
||||
Atomically increment the integer value of a key by one in Upstash Redis. If the key does not exist, it is set to 0 before incrementing.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The key to increment |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was incremented |
|
||||
| `value` | number | The new value after incrementing |
|
||||
|
||||
### `upstash_redis_expire`
|
||||
|
||||
Set a timeout on a key in Upstash Redis. After the timeout, the key is deleted.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The key to set expiration on |
|
||||
| `seconds` | number | Yes | Timeout in seconds |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that expiration was set on |
|
||||
| `result` | number | 1 if the timeout was set, 0 if the key does not exist |
|
||||
|
||||
### `upstash_redis_ttl`
|
||||
|
||||
Get the remaining time to live of a key in Upstash Redis. Returns -1 if the key has no expiration, -2 if the key does not exist.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The key to check TTL for |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key checked |
|
||||
| `ttl` | number | Remaining TTL in seconds. Positive integer if the key has a TTL set, -1 if the key exists with no expiration, -2 if the key does not exist. |
|
||||
|
||||
### `upstash_redis_lpush`
|
||||
|
||||
Prepend a value to the beginning of a list in Upstash Redis. Creates the list if it does not exist.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The list key |
|
||||
| `value` | string | Yes | The value to prepend to the list |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The list key |
|
||||
| `length` | number | The length of the list after the push |
|
||||
|
||||
### `upstash_redis_lrange`
|
||||
|
||||
Get a range of elements from a list in Upstash Redis. Use 0 and -1 for start and stop to get all elements.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The list key |
|
||||
| `start` | number | Yes | Start index \(0-based, negative values count from end\) |
|
||||
| `stop` | number | Yes | Stop index \(inclusive, -1 for last element\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The list key |
|
||||
| `values` | array | List of elements in the specified range |
|
||||
| `count` | number | Number of elements returned |
|
||||
|
||||
### `upstash_redis_exists`
|
||||
|
||||
Check if a key exists in Upstash Redis. Returns true if the key exists, false otherwise.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The key to check |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was checked |
|
||||
| `exists` | boolean | Whether the key exists \(true\) or not \(false\) |
|
||||
|
||||
### `upstash_redis_setnx`
|
||||
|
||||
Set the value of a key only if it does not already exist. Returns true if the key was set, false if it already existed.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The key to set |
|
||||
| `value` | string | Yes | The value to store if the key does not exist |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was attempted to set |
|
||||
| `wasSet` | boolean | Whether the key was set \(true\) or already existed \(false\) |
|
||||
|
||||
### `upstash_redis_incrby`
|
||||
|
||||
Increment the integer value of a key by a given amount. Use a negative value to decrement. If the key does not exist, it is set to 0 before the operation.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The key to increment |
|
||||
| `increment` | number | Yes | Amount to increment by \(use negative value to decrement\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was incremented |
|
||||
| `value` | number | The new value after incrementing |
|
||||
|
||||
|
||||
1391
apps/docs/content/docs/en/tools/vercel.mdx
Normal file
1391
apps/docs/content/docs/en/tools/vercel.mdx
Normal file
File diff suppressed because it is too large
Load Diff
@@ -67,10 +67,9 @@ Retrieve a list of tickets from Zendesk with optional filtering
|
||||
| `type` | string | No | Filter by type: "problem", "incident", "question", or "task" |
|
||||
| `assigneeId` | string | No | Filter by assignee user ID as a numeric string \(e.g., "12345"\) |
|
||||
| `organizationId` | string | No | Filter by organization ID as a numeric string \(e.g., "67890"\) |
|
||||
| `sortBy` | string | No | Sort field: "created_at", "updated_at", "priority", or "status" |
|
||||
| `sortOrder` | string | No | Sort order: "asc" or "desc" |
|
||||
| `sort` | string | No | Sort field for ticket listing \(only applies without filters\): "updated_at", "id", or "status". Prefix with "-" for descending \(e.g., "-updated_at"\) |
|
||||
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
|
||||
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
|
||||
| `pageAfter` | string | No | Cursor from a previous response to fetch the next page of results |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -129,10 +128,10 @@ Retrieve a list of tickets from Zendesk with optional filtering
|
||||
| ↳ `from_messaging_channel` | boolean | Whether the ticket originated from a messaging channel |
|
||||
| ↳ `ticket_form_id` | number | Ticket form ID |
|
||||
| ↳ `generated_timestamp` | number | Unix timestamp of the ticket generation |
|
||||
| `paging` | object | Pagination information |
|
||||
| `paging` | object | Cursor-based pagination information |
|
||||
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
|
||||
| ↳ `has_more` | boolean | Whether more results are available |
|
||||
| ↳ `next_page` | string | URL for next page of results |
|
||||
| ↳ `previous_page` | string | URL for previous page of results |
|
||||
| ↳ `count` | number | Total count of items |
|
||||
| `metadata` | object | Response metadata |
|
||||
| ↳ `total_returned` | number | Number of items returned in this response |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
@@ -515,7 +514,7 @@ Retrieve a list of users from Zendesk with optional filtering
|
||||
| `role` | string | No | Filter by role: "end-user", "agent", or "admin" |
|
||||
| `permissionSet` | string | No | Filter by permission set ID as a numeric string \(e.g., "12345"\) |
|
||||
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
|
||||
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
|
||||
| `pageAfter` | string | No | Cursor from a previous response to fetch the next page of results |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -563,10 +562,10 @@ Retrieve a list of users from Zendesk with optional filtering
|
||||
| ↳ `shared` | boolean | Whether the user is shared from a different Zendesk |
|
||||
| ↳ `shared_agent` | boolean | Whether the agent is shared from a different Zendesk |
|
||||
| ↳ `remote_photo_url` | string | URL to a remote photo |
|
||||
| `paging` | object | Pagination information |
|
||||
| `paging` | object | Cursor-based pagination information |
|
||||
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
|
||||
| ↳ `has_more` | boolean | Whether more results are available |
|
||||
| ↳ `next_page` | string | URL for next page of results |
|
||||
| ↳ `previous_page` | string | URL for previous page of results |
|
||||
| ↳ `count` | number | Total count of items |
|
||||
| `metadata` | object | Response metadata |
|
||||
| ↳ `total_returned` | number | Number of items returned in this response |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
@@ -706,7 +705,7 @@ Search for users in Zendesk using a query string
|
||||
| `query` | string | No | Search query string \(e.g., user name or email\) |
|
||||
| `externalId` | string | No | External ID to search by \(your system identifier\) |
|
||||
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
|
||||
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
|
||||
| `page` | string | No | Page number for pagination \(1-based\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -754,10 +753,10 @@ Search for users in Zendesk using a query string
|
||||
| ↳ `shared` | boolean | Whether the user is shared from a different Zendesk |
|
||||
| ↳ `shared_agent` | boolean | Whether the agent is shared from a different Zendesk |
|
||||
| ↳ `remote_photo_url` | string | URL to a remote photo |
|
||||
| `paging` | object | Pagination information |
|
||||
| `paging` | object | Cursor-based pagination information |
|
||||
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
|
||||
| ↳ `has_more` | boolean | Whether more results are available |
|
||||
| ↳ `next_page` | string | URL for next page of results |
|
||||
| ↳ `previous_page` | string | URL for previous page of results |
|
||||
| ↳ `count` | number | Total count of items |
|
||||
| `metadata` | object | Response metadata |
|
||||
| ↳ `total_returned` | number | Number of items returned in this response |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
@@ -999,7 +998,7 @@ Retrieve a list of organizations from Zendesk
|
||||
| `apiToken` | string | Yes | Zendesk API token |
|
||||
| `subdomain` | string | Yes | Your Zendesk subdomain \(e.g., "mycompany" for mycompany.zendesk.com\) |
|
||||
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
|
||||
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
|
||||
| `pageAfter` | string | No | Cursor from a previous response to fetch the next page of results |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -1020,10 +1019,10 @@ Retrieve a list of organizations from Zendesk
|
||||
| ↳ `created_at` | string | When the organization was created \(ISO 8601 format\) |
|
||||
| ↳ `updated_at` | string | When the organization was last updated \(ISO 8601 format\) |
|
||||
| ↳ `external_id` | string | External ID for linking to external records |
|
||||
| `paging` | object | Pagination information |
|
||||
| `paging` | object | Cursor-based pagination information |
|
||||
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
|
||||
| ↳ `has_more` | boolean | Whether more results are available |
|
||||
| ↳ `next_page` | string | URL for next page of results |
|
||||
| ↳ `previous_page` | string | URL for previous page of results |
|
||||
| ↳ `count` | number | Total count of items |
|
||||
| `metadata` | object | Response metadata |
|
||||
| ↳ `total_returned` | number | Number of items returned in this response |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
@@ -1075,7 +1074,7 @@ Autocomplete organizations in Zendesk by name prefix (for name matching/autocomp
|
||||
| `subdomain` | string | Yes | Your Zendesk subdomain |
|
||||
| `name` | string | Yes | Organization name prefix to search for \(e.g., "Acme"\) |
|
||||
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
|
||||
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
|
||||
| `page` | string | No | Page number for pagination \(1-based\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -1096,10 +1095,10 @@ Autocomplete organizations in Zendesk by name prefix (for name matching/autocomp
|
||||
| ↳ `created_at` | string | When the organization was created \(ISO 8601 format\) |
|
||||
| ↳ `updated_at` | string | When the organization was last updated \(ISO 8601 format\) |
|
||||
| ↳ `external_id` | string | External ID for linking to external records |
|
||||
| `paging` | object | Pagination information |
|
||||
| `paging` | object | Cursor-based pagination information |
|
||||
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
|
||||
| ↳ `has_more` | boolean | Whether more results are available |
|
||||
| ↳ `next_page` | string | URL for next page of results |
|
||||
| ↳ `previous_page` | string | URL for previous page of results |
|
||||
| ↳ `count` | number | Total count of items |
|
||||
| `metadata` | object | Response metadata |
|
||||
| ↳ `total_returned` | number | Number of items returned in this response |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
@@ -1249,19 +1248,18 @@ Unified search across tickets, users, and organizations in Zendesk
|
||||
| `apiToken` | string | Yes | Zendesk API token |
|
||||
| `subdomain` | string | Yes | Your Zendesk subdomain |
|
||||
| `query` | string | Yes | Search query string using Zendesk search syntax \(e.g., "type:ticket status:open"\) |
|
||||
| `sortBy` | string | No | Sort field: "relevance", "created_at", "updated_at", "priority", "status", or "ticket_type" |
|
||||
| `sortOrder` | string | No | Sort order: "asc" or "desc" |
|
||||
| `filterType` | string | Yes | Resource type to search for: "ticket", "user", "organization", or "group" |
|
||||
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
|
||||
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
|
||||
| `pageAfter` | string | No | Cursor from a previous response to fetch the next page of results |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `paging` | object | Pagination information |
|
||||
| `paging` | object | Cursor-based pagination information |
|
||||
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
|
||||
| ↳ `has_more` | boolean | Whether more results are available |
|
||||
| ↳ `next_page` | string | URL for next page of results |
|
||||
| ↳ `previous_page` | string | URL for previous page of results |
|
||||
| ↳ `count` | number | Total count of items |
|
||||
| `metadata` | object | Response metadata |
|
||||
| ↳ `total_returned` | number | Number of items returned in this response |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"fumadocs-mdx": "14.1.0",
|
||||
"fumadocs-ui": "16.2.3",
|
||||
"lucide-react": "^0.511.0",
|
||||
"next": "16.1.0-canary.21",
|
||||
"next": "16.1.6",
|
||||
"next-themes": "^0.4.6",
|
||||
"postgres": "^3.4.5",
|
||||
"react": "19.2.1",
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
'use server'
|
||||
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { isProd } from '@/lib/core/config/feature-flags'
|
||||
|
||||
|
||||
@@ -85,7 +85,7 @@ export const LandingNode = React.memo(function LandingNode({ data }: { data: Lan
|
||||
transform: isAnimated ? 'translateY(0) scale(1)' : 'translateY(8px) scale(0.98)',
|
||||
transition:
|
||||
'opacity 0.6s cubic-bezier(0.22, 1, 0.36, 1), transform 0.6s cubic-bezier(0.22, 1, 0.36, 1)',
|
||||
willChange: 'transform, opacity',
|
||||
willChange: isAnimated ? 'auto' : 'transform, opacity',
|
||||
}}
|
||||
>
|
||||
<LandingBlock icon={data.icon} color={data.color} name={data.name} tags={data.tags} />
|
||||
|
||||
@@ -67,7 +67,6 @@ export const LandingEdge = React.memo(function LandingEdge(props: EdgeProps) {
|
||||
strokeLinejoin: 'round',
|
||||
pointerEvents: 'none',
|
||||
animation: `landing-edge-dash-${id} 1s linear infinite`,
|
||||
willChange: 'stroke-dashoffset',
|
||||
...style,
|
||||
}}
|
||||
/>
|
||||
|
||||
@@ -754,3 +754,100 @@ input[type="search"]::-ms-clear {
|
||||
text-decoration: none !important;
|
||||
color: inherit !important;
|
||||
}
|
||||
|
||||
/**
|
||||
* Respect user's prefers-reduced-motion setting (WCAG 2.3.3)
|
||||
* Disables animations and transitions for users who prefer reduced motion.
|
||||
*/
|
||||
@media (prefers-reduced-motion: reduce) {
|
||||
*,
|
||||
*::before,
|
||||
*::after {
|
||||
animation-duration: 0.01ms !important;
|
||||
animation-iteration-count: 1 !important;
|
||||
transition-duration: 0.01ms !important;
|
||||
scroll-behavior: auto !important;
|
||||
}
|
||||
}
|
||||
|
||||
/* WandPromptBar status indicator */
|
||||
@keyframes smoke-pulse {
|
||||
0%,
|
||||
100% {
|
||||
transform: scale(0.8);
|
||||
opacity: 0.4;
|
||||
}
|
||||
50% {
|
||||
transform: scale(1.1);
|
||||
opacity: 0.8;
|
||||
}
|
||||
}
|
||||
|
||||
.status-indicator {
|
||||
position: relative;
|
||||
width: 12px;
|
||||
height: 12px;
|
||||
border-radius: 50%;
|
||||
overflow: hidden;
|
||||
background-color: hsl(var(--muted-foreground) / 0.5);
|
||||
transition: background-color 0.3s ease;
|
||||
}
|
||||
|
||||
.status-indicator.streaming {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.status-indicator.streaming::before {
|
||||
content: "";
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
border-radius: 50%;
|
||||
background: radial-gradient(
|
||||
circle,
|
||||
hsl(var(--primary) / 0.9) 0%,
|
||||
hsl(var(--primary) / 0.4) 60%,
|
||||
transparent 80%
|
||||
);
|
||||
animation: smoke-pulse 1.8s ease-in-out infinite;
|
||||
opacity: 0.9;
|
||||
}
|
||||
|
||||
.dark .status-indicator.streaming::before {
|
||||
background: #6b7280;
|
||||
opacity: 0.9;
|
||||
animation: smoke-pulse 1.8s ease-in-out infinite;
|
||||
}
|
||||
|
||||
/* MessageContainer loading dot */
|
||||
@keyframes growShrink {
|
||||
0%,
|
||||
100% {
|
||||
transform: scale(0.9);
|
||||
}
|
||||
50% {
|
||||
transform: scale(1.1);
|
||||
}
|
||||
}
|
||||
|
||||
.loading-dot {
|
||||
animation: growShrink 1.5s infinite ease-in-out;
|
||||
}
|
||||
|
||||
/* Subflow node z-index and drag-over styles */
|
||||
.workflow-container .react-flow__node-subflowNode {
|
||||
z-index: -1 !important;
|
||||
}
|
||||
|
||||
.workflow-container .react-flow__node-subflowNode:has([data-subflow-selected="true"]) {
|
||||
z-index: 10 !important;
|
||||
}
|
||||
|
||||
.loop-node-drag-over,
|
||||
.parallel-node-drag-over {
|
||||
box-shadow: 0 0 0 1.75px var(--brand-secondary) !important;
|
||||
border-radius: 8px !important;
|
||||
}
|
||||
|
||||
.react-flow__node[data-parent-node-id] .react-flow__handle {
|
||||
z-index: 30;
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockLogger, createMockRequest } from '@sim/testing'
|
||||
import { auditMock, createMockLogger, createMockRequest } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
describe('OAuth Disconnect API Route', () => {
|
||||
@@ -67,6 +67,8 @@ describe('OAuth Disconnect API Route', () => {
|
||||
vi.doMock('@/lib/webhooks/utils.server', () => ({
|
||||
syncAllWebhooksForCredentialSet: mockSyncAllWebhooksForCredentialSet,
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/audit/log', () => auditMock)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
|
||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, eq, like, or } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { syncAllWebhooksForCredentialSet } from '@/lib/webhooks/utils.server'
|
||||
@@ -118,6 +119,20 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.OAUTH_DISCONNECTED,
|
||||
resourceType: AuditResourceType.OAUTH,
|
||||
resourceId: providerId ?? provider,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
resourceName: provider,
|
||||
description: `Disconnected OAuth provider: ${provider}`,
|
||||
metadata: { provider, providerId },
|
||||
request,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true }, { status: 200 })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error disconnecting OAuth provider`, error)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockLogger, createMockRequest } from '@sim/testing'
|
||||
import { createMockLogger, createMockRequest, mockHybridAuth } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
describe('OAuth Token API Routes', () => {
|
||||
@@ -12,7 +12,7 @@ describe('OAuth Token API Routes', () => {
|
||||
const mockRefreshTokenIfNeeded = vi.fn()
|
||||
const mockGetOAuthToken = vi.fn()
|
||||
const mockAuthorizeCredentialUse = vi.fn()
|
||||
const mockCheckSessionOrInternalAuth = vi.fn()
|
||||
let mockCheckSessionOrInternalAuth: ReturnType<typeof vi.fn>
|
||||
|
||||
const mockLogger = createMockLogger()
|
||||
|
||||
@@ -41,9 +41,7 @@ describe('OAuth Token API Routes', () => {
|
||||
authorizeCredentialUse: mockAuthorizeCredentialUse,
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: mockCheckSessionOrInternalAuth,
|
||||
}))
|
||||
;({ mockCheckSessionOrInternalAuth } = mockHybridAuth())
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
@@ -73,23 +71,18 @@ describe('OAuth Token API Routes', () => {
|
||||
refreshed: false,
|
||||
})
|
||||
|
||||
// Create mock request
|
||||
const req = createMockRequest('POST', {
|
||||
credentialId: 'credential-id',
|
||||
})
|
||||
|
||||
// Import handler after setting up mocks
|
||||
const { POST } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
// Call handler
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
// Verify request was handled correctly
|
||||
expect(response.status).toBe(200)
|
||||
expect(data).toHaveProperty('accessToken', 'fresh-token')
|
||||
|
||||
// Verify mocks were called correctly
|
||||
expect(mockAuthorizeCredentialUse).toHaveBeenCalled()
|
||||
expect(mockGetCredential).toHaveBeenCalled()
|
||||
expect(mockRefreshTokenIfNeeded).toHaveBeenCalled()
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getCreditBalance } from '@/lib/billing/credits/balance'
|
||||
import { purchaseCredits } from '@/lib/billing/credits/purchase'
|
||||
@@ -57,6 +58,17 @@ export async function POST(request: NextRequest) {
|
||||
return NextResponse.json({ error: result.error }, { status: 400 })
|
||||
}
|
||||
|
||||
recordAudit({
|
||||
actorId: session.user.id,
|
||||
actorName: session.user.name,
|
||||
actorEmail: session.user.email,
|
||||
action: AuditAction.CREDIT_PURCHASED,
|
||||
resourceType: AuditResourceType.BILLING,
|
||||
description: `Purchased $${validation.data.amount} in credits`,
|
||||
metadata: { amount: validation.data.amount, requestId: validation.data.requestId },
|
||||
request,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
logger.error('Failed to purchase credits', { error, userId: session.user.id })
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { loggerMock } from '@sim/testing'
|
||||
import { loggerMock, requestUtilsMock } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
/**
|
||||
@@ -94,9 +94,7 @@ vi.mock('@/lib/core/utils/sse', () => ({
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: vi.fn().mockReturnValue('test-request-id'),
|
||||
}))
|
||||
vi.mock('@/lib/core/utils/request', () => requestUtilsMock)
|
||||
|
||||
vi.mock('@/lib/core/security/encryption', () => ({
|
||||
decryptSecret: vi.fn().mockResolvedValue({ decrypted: 'test-password' }),
|
||||
|
||||
@@ -3,10 +3,12 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { loggerMock } from '@sim/testing'
|
||||
import { auditMock, loggerMock } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@/lib/audit/log', () => auditMock)
|
||||
|
||||
vi.mock('@/lib/core/config/feature-flags', () => ({
|
||||
isDev: true,
|
||||
isHosted: false,
|
||||
@@ -216,8 +218,11 @@ describe('Chat Edit API Route', () => {
|
||||
workflowId: 'workflow-123',
|
||||
}
|
||||
|
||||
mockCheckChatAccess.mockResolvedValue({ hasAccess: true, chat: mockChat })
|
||||
mockLimit.mockResolvedValueOnce([]) // No identifier conflict
|
||||
mockCheckChatAccess.mockResolvedValue({
|
||||
hasAccess: true,
|
||||
chat: mockChat,
|
||||
workspaceId: 'workspace-123',
|
||||
})
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/chat/manage/chat-123', {
|
||||
method: 'PATCH',
|
||||
@@ -311,8 +316,11 @@ describe('Chat Edit API Route', () => {
|
||||
workflowId: 'workflow-123',
|
||||
}
|
||||
|
||||
mockCheckChatAccess.mockResolvedValue({ hasAccess: true, chat: mockChat })
|
||||
mockLimit.mockResolvedValueOnce([])
|
||||
mockCheckChatAccess.mockResolvedValue({
|
||||
hasAccess: true,
|
||||
chat: mockChat,
|
||||
workspaceId: 'workspace-123',
|
||||
})
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/chat/manage/chat-123', {
|
||||
method: 'PATCH',
|
||||
@@ -371,8 +379,11 @@ describe('Chat Edit API Route', () => {
|
||||
}),
|
||||
}))
|
||||
|
||||
mockCheckChatAccess.mockResolvedValue({ hasAccess: true })
|
||||
mockWhere.mockResolvedValue(undefined)
|
||||
mockCheckChatAccess.mockResolvedValue({
|
||||
hasAccess: true,
|
||||
chat: { title: 'Test Chat', workflowId: 'workflow-123' },
|
||||
workspaceId: 'workspace-123',
|
||||
})
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/chat/manage/chat-123', {
|
||||
method: 'DELETE',
|
||||
@@ -393,8 +404,11 @@ describe('Chat Edit API Route', () => {
|
||||
}),
|
||||
}))
|
||||
|
||||
mockCheckChatAccess.mockResolvedValue({ hasAccess: true })
|
||||
mockWhere.mockResolvedValue(undefined)
|
||||
mockCheckChatAccess.mockResolvedValue({
|
||||
hasAccess: true,
|
||||
chat: { title: 'Test Chat', workflowId: 'workflow-123' },
|
||||
workspaceId: 'workspace-123',
|
||||
})
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/chat/manage/chat-123', {
|
||||
method: 'DELETE',
|
||||
|
||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { isDev } from '@/lib/core/config/feature-flags'
|
||||
import { encryptSecret } from '@/lib/core/security/encryption'
|
||||
@@ -103,7 +104,11 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
|
||||
try {
|
||||
const validatedData = chatUpdateSchema.parse(body)
|
||||
|
||||
const { hasAccess, chat: existingChatRecord } = await checkChatAccess(chatId, session.user.id)
|
||||
const {
|
||||
hasAccess,
|
||||
chat: existingChatRecord,
|
||||
workspaceId: chatWorkspaceId,
|
||||
} = await checkChatAccess(chatId, session.user.id)
|
||||
|
||||
if (!hasAccess || !existingChatRecord) {
|
||||
return createErrorResponse('Chat not found or access denied', 404)
|
||||
@@ -217,6 +222,19 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
|
||||
|
||||
logger.info(`Chat "${chatId}" updated successfully`)
|
||||
|
||||
recordAudit({
|
||||
workspaceId: chatWorkspaceId || null,
|
||||
actorId: session.user.id,
|
||||
actorName: session.user.name,
|
||||
actorEmail: session.user.email,
|
||||
action: AuditAction.CHAT_UPDATED,
|
||||
resourceType: AuditResourceType.CHAT,
|
||||
resourceId: chatId,
|
||||
resourceName: title || existingChatRecord.title,
|
||||
description: `Updated chat deployment "${title || existingChatRecord.title}"`,
|
||||
request,
|
||||
})
|
||||
|
||||
return createSuccessResponse({
|
||||
id: chatId,
|
||||
chatUrl,
|
||||
@@ -252,7 +270,11 @@ export async function DELETE(
|
||||
return createErrorResponse('Unauthorized', 401)
|
||||
}
|
||||
|
||||
const { hasAccess } = await checkChatAccess(chatId, session.user.id)
|
||||
const {
|
||||
hasAccess,
|
||||
chat: chatRecord,
|
||||
workspaceId: chatWorkspaceId,
|
||||
} = await checkChatAccess(chatId, session.user.id)
|
||||
|
||||
if (!hasAccess) {
|
||||
return createErrorResponse('Chat not found or access denied', 404)
|
||||
@@ -262,6 +284,19 @@ export async function DELETE(
|
||||
|
||||
logger.info(`Chat "${chatId}" deleted successfully`)
|
||||
|
||||
recordAudit({
|
||||
workspaceId: chatWorkspaceId || null,
|
||||
actorId: session.user.id,
|
||||
actorName: session.user.name,
|
||||
actorEmail: session.user.email,
|
||||
action: AuditAction.CHAT_DELETED,
|
||||
resourceType: AuditResourceType.CHAT,
|
||||
resourceId: chatId,
|
||||
resourceName: chatRecord?.title || chatId,
|
||||
description: `Deleted chat deployment "${chatRecord?.title || chatId}"`,
|
||||
request: _request,
|
||||
})
|
||||
|
||||
return createSuccessResponse({
|
||||
message: 'Chat deployment deleted successfully',
|
||||
})
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { NextRequest } from 'next/server'
|
||||
/**
|
||||
* Tests for chat API route
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { auditMock } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
describe('Chat API Route', () => {
|
||||
@@ -30,6 +31,8 @@ describe('Chat API Route', () => {
|
||||
mockInsert.mockReturnValue({ values: mockValues })
|
||||
mockValues.mockReturnValue({ returning: mockReturning })
|
||||
|
||||
vi.doMock('@/lib/audit/log', () => auditMock)
|
||||
|
||||
vi.doMock('@sim/db', () => ({
|
||||
db: {
|
||||
select: mockSelect,
|
||||
|
||||
@@ -5,6 +5,7 @@ import { eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { isDev } from '@/lib/core/config/feature-flags'
|
||||
import { encryptSecret } from '@/lib/core/security/encryption'
|
||||
@@ -42,7 +43,7 @@ const chatSchema = z.object({
|
||||
.default([]),
|
||||
})
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
export async function GET(_request: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
@@ -174,7 +175,7 @@ export async function POST(request: NextRequest) {
|
||||
userId: session.user.id,
|
||||
identifier,
|
||||
title,
|
||||
description: description || '',
|
||||
description: description || null,
|
||||
customizations: mergedCustomizations,
|
||||
isActive: true,
|
||||
authType,
|
||||
@@ -224,6 +225,20 @@ export async function POST(request: NextRequest) {
|
||||
// Silently fail
|
||||
}
|
||||
|
||||
recordAudit({
|
||||
workspaceId: workflowRecord.workspaceId || null,
|
||||
actorId: session.user.id,
|
||||
actorName: session.user.name,
|
||||
actorEmail: session.user.email,
|
||||
action: AuditAction.CHAT_DEPLOYED,
|
||||
resourceType: AuditResourceType.CHAT,
|
||||
resourceId: id,
|
||||
resourceName: title,
|
||||
description: `Deployed chat "${title}"`,
|
||||
metadata: { workflowId, identifier, authType },
|
||||
request,
|
||||
})
|
||||
|
||||
return createSuccessResponse({
|
||||
id,
|
||||
chatUrl,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { databaseMock, loggerMock } from '@sim/testing'
|
||||
import { databaseMock, loggerMock, requestUtilsMock } from '@sim/testing'
|
||||
import type { NextResponse } from 'next/server'
|
||||
/**
|
||||
* Tests for chat API utils
|
||||
@@ -37,9 +37,7 @@ vi.mock('@/lib/core/security/encryption', () => ({
|
||||
decryptSecret: mockDecryptSecret,
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: vi.fn(),
|
||||
}))
|
||||
vi.mock('@/lib/core/utils/request', () => requestUtilsMock)
|
||||
|
||||
vi.mock('@/lib/core/config/feature-flags', () => ({
|
||||
isDev: true,
|
||||
|
||||
@@ -52,7 +52,7 @@ export async function checkWorkflowAccessForChatCreation(
|
||||
export async function checkChatAccess(
|
||||
chatId: string,
|
||||
userId: string
|
||||
): Promise<{ hasAccess: boolean; chat?: any }> {
|
||||
): Promise<{ hasAccess: boolean; chat?: any; workspaceId?: string }> {
|
||||
const chatData = await db
|
||||
.select({
|
||||
chat: chat,
|
||||
@@ -78,7 +78,9 @@ export async function checkChatAccess(
|
||||
action: 'admin',
|
||||
})
|
||||
|
||||
return authorization.allowed ? { hasAccess: true, chat: chatRecord } : { hasAccess: false }
|
||||
return authorization.allowed
|
||||
? { hasAccess: true, chat: chatRecord, workspaceId: workflowWorkspaceId }
|
||||
: { hasAccess: false }
|
||||
}
|
||||
|
||||
export async function validateChatAuth(
|
||||
|
||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getEmailSubject, renderPollingGroupInvitationEmail } from '@/components/emails'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasCredentialSetsAccess } from '@/lib/billing'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
@@ -148,6 +149,19 @@ export async function POST(
|
||||
userId: session.user.id,
|
||||
})
|
||||
|
||||
recordAudit({
|
||||
actorId: session.user.id,
|
||||
actorName: session.user.name,
|
||||
actorEmail: session.user.email,
|
||||
action: AuditAction.CREDENTIAL_SET_INVITATION_RESENT,
|
||||
resourceType: AuditResourceType.CREDENTIAL_SET,
|
||||
resourceId: id,
|
||||
resourceName: result.set.name,
|
||||
description: `Resent credential set invitation to ${invitation.email}`,
|
||||
metadata: { invitationId, email: invitation.email },
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
logger.error('Error resending invitation', error)
|
||||
|
||||
@@ -5,6 +5,7 @@ import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getEmailSubject, renderPollingGroupInvitationEmail } from '@/components/emails'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasCredentialSetsAccess } from '@/lib/billing'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
@@ -175,6 +176,19 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
emailSent: !!email,
|
||||
})
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.CREDENTIAL_SET_INVITATION_CREATED,
|
||||
resourceType: AuditResourceType.CREDENTIAL_SET,
|
||||
resourceId: id,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
resourceName: result.set.name,
|
||||
description: `Created invitation for credential set "${result.set.name}"${email ? ` to ${email}` : ''}`,
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
invitation: {
|
||||
...invitation,
|
||||
@@ -235,6 +249,19 @@ export async function DELETE(req: NextRequest, { params }: { params: Promise<{ i
|
||||
)
|
||||
)
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.CREDENTIAL_SET_INVITATION_REVOKED,
|
||||
resourceType: AuditResourceType.CREDENTIAL_SET,
|
||||
resourceId: id,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
resourceName: result.set.name,
|
||||
description: `Revoked invitation "${invitationId}" for credential set "${result.set.name}"`,
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
logger.error('Error cancelling invitation', error)
|
||||
|
||||
@@ -3,6 +3,7 @@ import { account, credentialSet, credentialSetMember, member, user } from '@sim/
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, inArray } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasCredentialSetsAccess } from '@/lib/billing'
|
||||
import { syncAllWebhooksForCredentialSet } from '@/lib/webhooks/utils.server'
|
||||
@@ -13,6 +14,7 @@ async function getCredentialSetWithAccess(credentialSetId: string, userId: strin
|
||||
const [set] = await db
|
||||
.select({
|
||||
id: credentialSet.id,
|
||||
name: credentialSet.name,
|
||||
organizationId: credentialSet.organizationId,
|
||||
providerId: credentialSet.providerId,
|
||||
})
|
||||
@@ -177,6 +179,19 @@ export async function DELETE(req: NextRequest, { params }: { params: Promise<{ i
|
||||
userId: session.user.id,
|
||||
})
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.CREDENTIAL_SET_MEMBER_REMOVED,
|
||||
resourceType: AuditResourceType.CREDENTIAL_SET,
|
||||
resourceId: id,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
resourceName: result.set.name,
|
||||
description: `Removed member from credential set "${result.set.name}"`,
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
logger.error('Error removing member from credential set', error)
|
||||
|
||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasCredentialSetsAccess } from '@/lib/billing'
|
||||
|
||||
@@ -131,6 +132,19 @@ export async function PUT(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
|
||||
const [updated] = await db.select().from(credentialSet).where(eq(credentialSet.id, id)).limit(1)
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.CREDENTIAL_SET_UPDATED,
|
||||
resourceType: AuditResourceType.CREDENTIAL_SET,
|
||||
resourceId: id,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
resourceName: updated?.name ?? result.set.name,
|
||||
description: `Updated credential set "${updated?.name ?? result.set.name}"`,
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({ credentialSet: updated })
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
@@ -175,6 +189,19 @@ export async function DELETE(req: NextRequest, { params }: { params: Promise<{ i
|
||||
|
||||
logger.info('Deleted credential set', { credentialSetId: id, userId: session.user.id })
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.CREDENTIAL_SET_DELETED,
|
||||
resourceType: AuditResourceType.CREDENTIAL_SET,
|
||||
resourceId: id,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
resourceName: result.set.name,
|
||||
description: `Deleted credential set "${result.set.name}"`,
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
logger.error('Error deleting credential set', error)
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { syncAllWebhooksForCredentialSet } from '@/lib/webhooks/utils.server'
|
||||
|
||||
@@ -78,6 +79,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ tok
|
||||
status: credentialSetInvitation.status,
|
||||
expiresAt: credentialSetInvitation.expiresAt,
|
||||
invitedBy: credentialSetInvitation.invitedBy,
|
||||
credentialSetName: credentialSet.name,
|
||||
providerId: credentialSet.providerId,
|
||||
})
|
||||
.from(credentialSetInvitation)
|
||||
@@ -125,7 +127,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ tok
|
||||
const now = new Date()
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
// Use transaction to ensure membership + invitation update + webhook sync are atomic
|
||||
await db.transaction(async (tx) => {
|
||||
await tx.insert(credentialSetMember).values({
|
||||
id: crypto.randomUUID(),
|
||||
@@ -147,8 +148,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ tok
|
||||
})
|
||||
.where(eq(credentialSetInvitation.id, invitation.id))
|
||||
|
||||
// Clean up all other pending invitations for the same credential set and email
|
||||
// This prevents duplicate invites from showing up after accepting one
|
||||
if (invitation.email) {
|
||||
await tx
|
||||
.update(credentialSetInvitation)
|
||||
@@ -166,7 +165,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ tok
|
||||
)
|
||||
}
|
||||
|
||||
// Sync webhooks within the transaction
|
||||
const syncResult = await syncAllWebhooksForCredentialSet(
|
||||
invitation.credentialSetId,
|
||||
requestId,
|
||||
@@ -184,6 +182,19 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ tok
|
||||
userId: session.user.id,
|
||||
})
|
||||
|
||||
recordAudit({
|
||||
actorId: session.user.id,
|
||||
actorName: session.user.name,
|
||||
actorEmail: session.user.email,
|
||||
action: AuditAction.CREDENTIAL_SET_INVITATION_ACCEPTED,
|
||||
resourceType: AuditResourceType.CREDENTIAL_SET,
|
||||
resourceId: invitation.credentialSetId,
|
||||
resourceName: invitation.credentialSetName,
|
||||
description: `Accepted credential set invitation`,
|
||||
metadata: { invitationId: invitation.id },
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
credentialSetId: invitation.credentialSetId,
|
||||
|
||||
@@ -3,6 +3,7 @@ import { credentialSet, credentialSetMember, organization } from '@sim/db/schema
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { syncAllWebhooksForCredentialSet } from '@/lib/webhooks/utils.server'
|
||||
|
||||
@@ -106,6 +107,17 @@ export async function DELETE(req: NextRequest) {
|
||||
userId: session.user.id,
|
||||
})
|
||||
|
||||
recordAudit({
|
||||
actorId: session.user.id,
|
||||
actorName: session.user.name,
|
||||
actorEmail: session.user.email,
|
||||
action: AuditAction.CREDENTIAL_SET_MEMBER_LEFT,
|
||||
resourceType: AuditResourceType.CREDENTIAL_SET,
|
||||
resourceId: credentialSetId,
|
||||
description: `Left credential set`,
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Failed to leave credential set'
|
||||
|
||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, count, desc, eq } from 'drizzle-orm'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasCredentialSetsAccess } from '@/lib/billing'
|
||||
|
||||
@@ -165,6 +166,19 @@ export async function POST(req: Request) {
|
||||
userId: session.user.id,
|
||||
})
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.CREDENTIAL_SET_CREATED,
|
||||
resourceType: AuditResourceType.CREDENTIAL_SET,
|
||||
resourceId: newCredentialSet.id,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
resourceName: name,
|
||||
description: `Created credential set "${name}"`,
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({ credentialSet: newCredentialSet }, { status: 201 })
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
|
||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
@@ -53,6 +54,17 @@ export async function POST(req: NextRequest) {
|
||||
},
|
||||
})
|
||||
|
||||
recordAudit({
|
||||
actorId: session.user.id,
|
||||
actorName: session.user.name,
|
||||
actorEmail: session.user.email,
|
||||
action: AuditAction.ENVIRONMENT_UPDATED,
|
||||
resourceType: AuditResourceType.ENVIRONMENT,
|
||||
description: 'Updated global environment variables',
|
||||
metadata: { variableCount: Object.keys(variables).length },
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (validationError) {
|
||||
if (validationError instanceof z.ZodError) {
|
||||
|
||||
@@ -2,6 +2,7 @@ import {
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockCryptoUuid,
|
||||
mockHybridAuth,
|
||||
mockUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@sim/testing'
|
||||
@@ -28,13 +29,12 @@ function setupFileApiMocks(
|
||||
authMocks.setUnauthenticated()
|
||||
}
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
success: authenticated,
|
||||
userId: authenticated ? 'test-user-id' : undefined,
|
||||
error: authenticated ? undefined : 'Unauthorized',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckSessionOrInternalAuth } = mockHybridAuth()
|
||||
mockCheckSessionOrInternalAuth.mockResolvedValue({
|
||||
success: authenticated,
|
||||
userId: authenticated ? 'test-user-id' : undefined,
|
||||
error: authenticated ? undefined : 'Unauthorized',
|
||||
})
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockCryptoUuid,
|
||||
mockHybridAuth,
|
||||
mockUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@sim/testing'
|
||||
@@ -34,13 +35,12 @@ function setupFileApiMocks(
|
||||
authMocks.setUnauthenticated()
|
||||
}
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkInternalAuth: vi.fn().mockResolvedValue({
|
||||
success: authenticated,
|
||||
userId: authenticated ? 'test-user-id' : undefined,
|
||||
error: authenticated ? undefined : 'Unauthorized',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckInternalAuth } = mockHybridAuth()
|
||||
mockCheckInternalAuth.mockResolvedValue({
|
||||
success: authenticated,
|
||||
userId: authenticated ? 'test-user-id' : undefined,
|
||||
error: authenticated ? undefined : 'Unauthorized',
|
||||
})
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
import { mockAuth, mockCryptoUuid, mockUuid, setupCommonApiMocks } from '@sim/testing'
|
||||
import {
|
||||
mockAuth,
|
||||
mockCryptoUuid,
|
||||
mockHybridAuth,
|
||||
mockUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
@@ -28,13 +34,12 @@ function setupFileApiMocks(
|
||||
authMocks.setUnauthenticated()
|
||||
}
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||
success: authenticated,
|
||||
userId: authenticated ? 'test-user-id' : undefined,
|
||||
error: authenticated ? undefined : 'Unauthorized',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckHybridAuth } = mockHybridAuth()
|
||||
mockCheckHybridAuth.mockResolvedValue({
|
||||
success: authenticated,
|
||||
userId: authenticated ? 'test-user-id' : undefined,
|
||||
error: authenticated ? undefined : 'Unauthorized',
|
||||
})
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
defaultMockUser,
|
||||
mockAuth,
|
||||
mockCryptoUuid,
|
||||
mockHybridAuth,
|
||||
mockUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@sim/testing'
|
||||
@@ -54,12 +55,11 @@ describe('File Serve API Route', () => {
|
||||
withUploadUtils: true,
|
||||
})
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'test-user-id',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckSessionOrInternalAuth: serveAuthMock } = mockHybridAuth()
|
||||
serveAuthMock.mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'test-user-id',
|
||||
})
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
@@ -164,12 +164,11 @@ describe('File Serve API Route', () => {
|
||||
findLocalFile: vi.fn().mockReturnValue('/test/uploads/nested/path/file.txt'),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'test-user-id',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckSessionOrInternalAuth: serveAuthMock } = mockHybridAuth()
|
||||
serveAuthMock.mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'test-user-id',
|
||||
})
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
@@ -225,12 +224,11 @@ describe('File Serve API Route', () => {
|
||||
USE_BLOB_STORAGE: false,
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'test-user-id',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckSessionOrInternalAuth: serveAuthMock } = mockHybridAuth()
|
||||
serveAuthMock.mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'test-user-id',
|
||||
})
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
@@ -290,12 +288,11 @@ describe('File Serve API Route', () => {
|
||||
readFile: vi.fn().mockRejectedValue(new Error('ENOENT: no such file or directory')),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'test-user-id',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckSessionOrInternalAuth: serveAuthMock } = mockHybridAuth()
|
||||
serveAuthMock.mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'test-user-id',
|
||||
})
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(false), // File not found = no access
|
||||
@@ -349,12 +346,11 @@ describe('File Serve API Route', () => {
|
||||
|
||||
for (const test of contentTypeTests) {
|
||||
it(`should serve ${test.ext} file with correct content type`, async () => {
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'test-user-id',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckSessionOrInternalAuth: ctAuthMock } = mockHybridAuth()
|
||||
ctAuthMock.mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'test-user-id',
|
||||
})
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
|
||||
@@ -3,7 +3,13 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { mockAuth, mockCryptoUuid, mockUuid, setupCommonApiMocks } from '@sim/testing'
|
||||
import {
|
||||
mockAuth,
|
||||
mockCryptoUuid,
|
||||
mockHybridAuth,
|
||||
mockUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
@@ -27,13 +33,12 @@ function setupFileApiMocks(
|
||||
authMocks.setUnauthenticated()
|
||||
}
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||
success: authenticated,
|
||||
userId: authenticated ? 'test-user-id' : undefined,
|
||||
error: authenticated ? undefined : 'Unauthorized',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckHybridAuth } = mockHybridAuth()
|
||||
mockCheckHybridAuth.mockResolvedValue({
|
||||
success: authenticated,
|
||||
userId: authenticated ? 'test-user-id' : undefined,
|
||||
error: authenticated ? undefined : 'Unauthorized',
|
||||
})
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflow, workflowFolder } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { and, eq, isNull, min } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { duplicateWorkflow } from '@/lib/workflows/persistence/duplicate'
|
||||
@@ -36,7 +37,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
|
||||
logger.info(`[${requestId}] Duplicating folder ${sourceFolderId} for user ${session.user.id}`)
|
||||
|
||||
// Verify the source folder exists
|
||||
const sourceFolder = await db
|
||||
.select()
|
||||
.from(workflowFolder)
|
||||
@@ -47,7 +47,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
throw new Error('Source folder not found')
|
||||
}
|
||||
|
||||
// Check if user has permission to access the source folder
|
||||
const userPermission = await getUserEntityPermissions(
|
||||
session.user.id,
|
||||
'workspace',
|
||||
@@ -60,26 +59,51 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
|
||||
const targetWorkspaceId = workspaceId || sourceFolder.workspaceId
|
||||
|
||||
// Step 1: Duplicate folder structure
|
||||
const { newFolderId, folderMapping } = await db.transaction(async (tx) => {
|
||||
const newFolderId = crypto.randomUUID()
|
||||
const now = new Date()
|
||||
const targetParentId = parentId ?? sourceFolder.parentId
|
||||
|
||||
const folderParentCondition = targetParentId
|
||||
? eq(workflowFolder.parentId, targetParentId)
|
||||
: isNull(workflowFolder.parentId)
|
||||
const workflowParentCondition = targetParentId
|
||||
? eq(workflow.folderId, targetParentId)
|
||||
: isNull(workflow.folderId)
|
||||
|
||||
const [[folderResult], [workflowResult]] = await Promise.all([
|
||||
tx
|
||||
.select({ minSortOrder: min(workflowFolder.sortOrder) })
|
||||
.from(workflowFolder)
|
||||
.where(and(eq(workflowFolder.workspaceId, targetWorkspaceId), folderParentCondition)),
|
||||
tx
|
||||
.select({ minSortOrder: min(workflow.sortOrder) })
|
||||
.from(workflow)
|
||||
.where(and(eq(workflow.workspaceId, targetWorkspaceId), workflowParentCondition)),
|
||||
])
|
||||
|
||||
const minSortOrder = [folderResult?.minSortOrder, workflowResult?.minSortOrder].reduce<
|
||||
number | null
|
||||
>((currentMin, candidate) => {
|
||||
if (candidate == null) return currentMin
|
||||
if (currentMin == null) return candidate
|
||||
return Math.min(currentMin, candidate)
|
||||
}, null)
|
||||
const sortOrder = minSortOrder != null ? minSortOrder - 1 : 0
|
||||
|
||||
// Create the new root folder
|
||||
await tx.insert(workflowFolder).values({
|
||||
id: newFolderId,
|
||||
userId: session.user.id,
|
||||
workspaceId: targetWorkspaceId,
|
||||
name,
|
||||
color: color || sourceFolder.color,
|
||||
parentId: parentId || sourceFolder.parentId,
|
||||
sortOrder: sourceFolder.sortOrder,
|
||||
parentId: targetParentId,
|
||||
sortOrder,
|
||||
isExpanded: false,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
|
||||
// Recursively duplicate child folders
|
||||
const folderMapping = new Map<string, string>([[sourceFolderId, newFolderId]])
|
||||
await duplicateFolderStructure(
|
||||
tx,
|
||||
@@ -95,7 +119,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
return { newFolderId, folderMapping }
|
||||
})
|
||||
|
||||
// Step 2: Duplicate workflows
|
||||
const workflowStats = await duplicateWorkflowsInFolderTree(
|
||||
sourceFolder.workspaceId,
|
||||
targetWorkspaceId,
|
||||
@@ -115,6 +138,19 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
}
|
||||
)
|
||||
|
||||
recordAudit({
|
||||
workspaceId: targetWorkspaceId,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.FOLDER_DUPLICATED,
|
||||
resourceType: AuditResourceType.FOLDER,
|
||||
resourceId: newFolderId,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
resourceName: name,
|
||||
description: `Duplicated folder "${sourceFolder.name}" as "${name}"`,
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
id: newFolderId,
|
||||
@@ -159,7 +195,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
}
|
||||
}
|
||||
|
||||
// Helper to recursively duplicate folder structure
|
||||
async function duplicateFolderStructure(
|
||||
tx: any,
|
||||
sourceFolderId: string,
|
||||
@@ -170,7 +205,6 @@ async function duplicateFolderStructure(
|
||||
timestamp: Date,
|
||||
folderMapping: Map<string, string>
|
||||
): Promise<void> {
|
||||
// Get all child folders
|
||||
const childFolders = await tx
|
||||
.select()
|
||||
.from(workflowFolder)
|
||||
@@ -181,7 +215,6 @@ async function duplicateFolderStructure(
|
||||
)
|
||||
)
|
||||
|
||||
// Create each child folder and recurse
|
||||
for (const childFolder of childFolders) {
|
||||
const newChildFolderId = crypto.randomUUID()
|
||||
folderMapping.set(childFolder.id, newChildFolderId)
|
||||
@@ -199,7 +232,6 @@ async function duplicateFolderStructure(
|
||||
updatedAt: timestamp,
|
||||
})
|
||||
|
||||
// Recurse for this child's children
|
||||
await duplicateFolderStructure(
|
||||
tx,
|
||||
childFolder.id,
|
||||
@@ -213,7 +245,6 @@ async function duplicateFolderStructure(
|
||||
}
|
||||
}
|
||||
|
||||
// Helper to duplicate all workflows in a folder tree
|
||||
async function duplicateWorkflowsInFolderTree(
|
||||
sourceWorkspaceId: string,
|
||||
targetWorkspaceId: string,
|
||||
@@ -223,9 +254,7 @@ async function duplicateWorkflowsInFolderTree(
|
||||
): Promise<{ total: number; succeeded: number; failed: number }> {
|
||||
const stats = { total: 0, succeeded: 0, failed: 0 }
|
||||
|
||||
// Process each folder in the mapping
|
||||
for (const [oldFolderId, newFolderId] of folderMapping.entries()) {
|
||||
// Get workflows in this folder
|
||||
const workflowsInFolder = await db
|
||||
.select()
|
||||
.from(workflow)
|
||||
@@ -233,7 +262,6 @@ async function duplicateWorkflowsInFolderTree(
|
||||
|
||||
stats.total += workflowsInFolder.length
|
||||
|
||||
// Duplicate each workflow
|
||||
for (const sourceWorkflow of workflowsInFolder) {
|
||||
try {
|
||||
await duplicateWorkflow({
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import {
|
||||
auditMock,
|
||||
createMockRequest,
|
||||
type MockUser,
|
||||
mockAuth,
|
||||
@@ -12,6 +13,8 @@ import {
|
||||
} from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@/lib/audit/log', () => auditMock)
|
||||
|
||||
/** Type for captured folder values in tests */
|
||||
interface CapturedFolderValues {
|
||||
name?: string
|
||||
|
||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
@@ -167,6 +168,19 @@ export async function DELETE(
|
||||
deletionStats,
|
||||
})
|
||||
|
||||
recordAudit({
|
||||
workspaceId: existingFolder.workspaceId,
|
||||
actorId: session.user.id,
|
||||
actorName: session.user.name,
|
||||
actorEmail: session.user.email,
|
||||
action: AuditAction.FOLDER_DELETED,
|
||||
resourceType: AuditResourceType.FOLDER,
|
||||
resourceId: id,
|
||||
resourceName: existingFolder.name,
|
||||
description: `Deleted folder "${existingFolder.name}"`,
|
||||
request,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
deletedItems: deletionStats,
|
||||
|
||||
@@ -3,9 +3,22 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockRequest, mockAuth, mockConsoleLogger, setupCommonApiMocks } from '@sim/testing'
|
||||
import {
|
||||
auditMock,
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockConsoleLogger,
|
||||
setupCommonApiMocks,
|
||||
} from '@sim/testing'
|
||||
import { drizzleOrmMock } from '@sim/testing/mocks'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@/lib/audit/log', () => auditMock)
|
||||
vi.mock('drizzle-orm', () => ({
|
||||
...drizzleOrmMock,
|
||||
min: vi.fn((field) => ({ type: 'min', field })),
|
||||
}))
|
||||
|
||||
interface CapturedFolderValues {
|
||||
name?: string
|
||||
color?: string
|
||||
@@ -16,29 +29,35 @@ interface CapturedFolderValues {
|
||||
}
|
||||
|
||||
function createMockTransaction(mockData: {
|
||||
selectData?: Array<{ id: string; [key: string]: unknown }>
|
||||
selectResults?: Array<Array<{ [key: string]: unknown }>>
|
||||
insertResult?: Array<{ id: string; [key: string]: unknown }>
|
||||
onInsertValues?: (values: CapturedFolderValues) => void
|
||||
}) {
|
||||
const { selectData = [], insertResult = [] } = mockData
|
||||
return vi.fn().mockImplementation(async (callback: (tx: unknown) => Promise<unknown>) => {
|
||||
const { selectResults = [[], []], insertResult = [], onInsertValues } = mockData
|
||||
return async (callback: (tx: unknown) => Promise<unknown>) => {
|
||||
const where = vi.fn()
|
||||
for (const result of selectResults) {
|
||||
where.mockReturnValueOnce(result)
|
||||
}
|
||||
where.mockReturnValue([])
|
||||
|
||||
const tx = {
|
||||
select: vi.fn().mockReturnValue({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
orderBy: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockReturnValue(selectData),
|
||||
}),
|
||||
}),
|
||||
where,
|
||||
}),
|
||||
}),
|
||||
insert: vi.fn().mockReturnValue({
|
||||
values: vi.fn().mockReturnValue({
|
||||
returning: vi.fn().mockReturnValue(insertResult),
|
||||
values: vi.fn().mockImplementation((values: CapturedFolderValues) => {
|
||||
onInsertValues?.(values)
|
||||
return {
|
||||
returning: vi.fn().mockReturnValue(insertResult),
|
||||
}
|
||||
}),
|
||||
}),
|
||||
}
|
||||
return await callback(tx)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
describe('Folders API Route', () => {
|
||||
@@ -249,25 +268,12 @@ describe('Folders API Route', () => {
|
||||
it('should create a new folder successfully', async () => {
|
||||
mockAuthenticatedUser()
|
||||
|
||||
mockTransaction.mockImplementationOnce(async (callback: any) => {
|
||||
const tx = {
|
||||
select: vi.fn().mockReturnValue({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
orderBy: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockReturnValue([]), // No existing folders
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
insert: vi.fn().mockReturnValue({
|
||||
values: vi.fn().mockReturnValue({
|
||||
returning: vi.fn().mockReturnValue([mockFolders[0]]),
|
||||
}),
|
||||
}),
|
||||
}
|
||||
return await callback(tx)
|
||||
})
|
||||
mockTransaction.mockImplementationOnce(
|
||||
createMockTransaction({
|
||||
selectResults: [[], []],
|
||||
insertResult: [mockFolders[0]],
|
||||
})
|
||||
)
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
name: 'New Test Folder',
|
||||
@@ -277,12 +283,11 @@ describe('Folders API Route', () => {
|
||||
|
||||
const { POST } = await import('@/app/api/folders/route')
|
||||
const response = await POST(req)
|
||||
const responseBody = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('folder')
|
||||
expect(data.folder).toMatchObject({
|
||||
expect(responseBody).toHaveProperty('folder')
|
||||
expect(responseBody.folder).toMatchObject({
|
||||
id: 'folder-1',
|
||||
name: 'Test Folder 1',
|
||||
workspaceId: 'workspace-123',
|
||||
@@ -291,26 +296,17 @@ describe('Folders API Route', () => {
|
||||
|
||||
it('should create folder with correct sort order', async () => {
|
||||
mockAuthenticatedUser()
|
||||
let capturedValues: CapturedFolderValues | null = null
|
||||
|
||||
mockTransaction.mockImplementationOnce(async (callback: any) => {
|
||||
const tx = {
|
||||
select: vi.fn().mockReturnValue({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
orderBy: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockReturnValue([{ sortOrder: 5 }]), // Existing folder with sort order 5
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
insert: vi.fn().mockReturnValue({
|
||||
values: vi.fn().mockReturnValue({
|
||||
returning: vi.fn().mockReturnValue([{ ...mockFolders[0], sortOrder: 6 }]),
|
||||
}),
|
||||
}),
|
||||
}
|
||||
return await callback(tx)
|
||||
})
|
||||
mockTransaction.mockImplementationOnce(
|
||||
createMockTransaction({
|
||||
selectResults: [[{ minSortOrder: 5 }], [{ minSortOrder: 2 }]],
|
||||
insertResult: [{ ...mockFolders[0], sortOrder: 1 }],
|
||||
onInsertValues: (values) => {
|
||||
capturedValues = values
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
name: 'New Test Folder',
|
||||
@@ -324,8 +320,10 @@ describe('Folders API Route', () => {
|
||||
|
||||
const data = await response.json()
|
||||
expect(data.folder).toMatchObject({
|
||||
sortOrder: 6,
|
||||
sortOrder: 1,
|
||||
})
|
||||
expect(capturedValues).not.toBeNull()
|
||||
expect(capturedValues!.sortOrder).toBe(1)
|
||||
})
|
||||
|
||||
it('should create subfolder with parent reference', async () => {
|
||||
@@ -333,7 +331,7 @@ describe('Folders API Route', () => {
|
||||
|
||||
mockTransaction.mockImplementationOnce(
|
||||
createMockTransaction({
|
||||
selectData: [], // No existing folders
|
||||
selectResults: [[], []],
|
||||
insertResult: [{ ...mockFolders[1] }],
|
||||
})
|
||||
)
|
||||
@@ -394,25 +392,12 @@ describe('Folders API Route', () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('write') // Write permissions
|
||||
|
||||
mockTransaction.mockImplementationOnce(async (callback: any) => {
|
||||
const tx = {
|
||||
select: vi.fn().mockReturnValue({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
orderBy: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockReturnValue([]), // No existing folders
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
insert: vi.fn().mockReturnValue({
|
||||
values: vi.fn().mockReturnValue({
|
||||
returning: vi.fn().mockReturnValue([mockFolders[0]]),
|
||||
}),
|
||||
}),
|
||||
}
|
||||
return await callback(tx)
|
||||
})
|
||||
mockTransaction.mockImplementationOnce(
|
||||
createMockTransaction({
|
||||
selectResults: [[], []],
|
||||
insertResult: [mockFolders[0]],
|
||||
})
|
||||
)
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
name: 'Test Folder',
|
||||
@@ -432,25 +417,12 @@ describe('Folders API Route', () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('admin') // Admin permissions
|
||||
|
||||
mockTransaction.mockImplementationOnce(async (callback: any) => {
|
||||
const tx = {
|
||||
select: vi.fn().mockReturnValue({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
orderBy: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockReturnValue([]), // No existing folders
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
insert: vi.fn().mockReturnValue({
|
||||
values: vi.fn().mockReturnValue({
|
||||
returning: vi.fn().mockReturnValue([mockFolders[0]]),
|
||||
}),
|
||||
}),
|
||||
}
|
||||
return await callback(tx)
|
||||
})
|
||||
mockTransaction.mockImplementationOnce(
|
||||
createMockTransaction({
|
||||
selectResults: [[], []],
|
||||
insertResult: [mockFolders[0]],
|
||||
})
|
||||
)
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
name: 'Test Folder',
|
||||
@@ -519,28 +491,15 @@ describe('Folders API Route', () => {
|
||||
|
||||
let capturedValues: CapturedFolderValues | null = null
|
||||
|
||||
mockTransaction.mockImplementationOnce(async (callback: any) => {
|
||||
const tx = {
|
||||
select: vi.fn().mockReturnValue({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
orderBy: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockReturnValue([]),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
insert: vi.fn().mockReturnValue({
|
||||
values: vi.fn().mockImplementation((values) => {
|
||||
capturedValues = values
|
||||
return {
|
||||
returning: vi.fn().mockReturnValue([mockFolders[0]]),
|
||||
}
|
||||
}),
|
||||
}),
|
||||
}
|
||||
return await callback(tx)
|
||||
})
|
||||
mockTransaction.mockImplementationOnce(
|
||||
createMockTransaction({
|
||||
selectResults: [[], []],
|
||||
insertResult: [mockFolders[0]],
|
||||
onInsertValues: (values) => {
|
||||
capturedValues = values
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
name: ' Test Folder With Spaces ',
|
||||
@@ -559,28 +518,15 @@ describe('Folders API Route', () => {
|
||||
|
||||
let capturedValues: CapturedFolderValues | null = null
|
||||
|
||||
mockTransaction.mockImplementationOnce(async (callback: any) => {
|
||||
const tx = {
|
||||
select: vi.fn().mockReturnValue({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
orderBy: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockReturnValue([]),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
insert: vi.fn().mockReturnValue({
|
||||
values: vi.fn().mockImplementation((values) => {
|
||||
capturedValues = values
|
||||
return {
|
||||
returning: vi.fn().mockReturnValue([mockFolders[0]]),
|
||||
}
|
||||
}),
|
||||
}),
|
||||
}
|
||||
return await callback(tx)
|
||||
})
|
||||
mockTransaction.mockImplementationOnce(
|
||||
createMockTransaction({
|
||||
selectResults: [[], []],
|
||||
insertResult: [mockFolders[0]],
|
||||
onInsertValues: (values) => {
|
||||
capturedValues = values
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
name: 'Test Folder',
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflowFolder } from '@sim/db/schema'
|
||||
import { workflow, workflowFolder } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, asc, desc, eq, isNull } from 'drizzle-orm'
|
||||
import { and, asc, eq, isNull, min } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
@@ -86,19 +87,33 @@ export async function POST(request: NextRequest) {
|
||||
if (providedSortOrder !== undefined) {
|
||||
sortOrder = providedSortOrder
|
||||
} else {
|
||||
const existingFolders = await tx
|
||||
.select({ sortOrder: workflowFolder.sortOrder })
|
||||
.from(workflowFolder)
|
||||
.where(
|
||||
and(
|
||||
eq(workflowFolder.workspaceId, workspaceId),
|
||||
parentId ? eq(workflowFolder.parentId, parentId) : isNull(workflowFolder.parentId)
|
||||
)
|
||||
)
|
||||
.orderBy(desc(workflowFolder.sortOrder))
|
||||
.limit(1)
|
||||
const folderParentCondition = parentId
|
||||
? eq(workflowFolder.parentId, parentId)
|
||||
: isNull(workflowFolder.parentId)
|
||||
const workflowParentCondition = parentId
|
||||
? eq(workflow.folderId, parentId)
|
||||
: isNull(workflow.folderId)
|
||||
|
||||
sortOrder = existingFolders.length > 0 ? existingFolders[0].sortOrder + 1 : 0
|
||||
const [[folderResult], [workflowResult]] = await Promise.all([
|
||||
tx
|
||||
.select({ minSortOrder: min(workflowFolder.sortOrder) })
|
||||
.from(workflowFolder)
|
||||
.where(and(eq(workflowFolder.workspaceId, workspaceId), folderParentCondition)),
|
||||
tx
|
||||
.select({ minSortOrder: min(workflow.sortOrder) })
|
||||
.from(workflow)
|
||||
.where(and(eq(workflow.workspaceId, workspaceId), workflowParentCondition)),
|
||||
])
|
||||
|
||||
const minSortOrder = [folderResult?.minSortOrder, workflowResult?.minSortOrder].reduce<
|
||||
number | null
|
||||
>((currentMin, candidate) => {
|
||||
if (candidate == null) return currentMin
|
||||
if (currentMin == null) return candidate
|
||||
return Math.min(currentMin, candidate)
|
||||
}, null)
|
||||
|
||||
sortOrder = minSortOrder != null ? minSortOrder - 1 : 0
|
||||
}
|
||||
|
||||
const [folder] = await tx
|
||||
@@ -119,6 +134,20 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
logger.info('Created new folder:', { id, name, workspaceId, parentId })
|
||||
|
||||
recordAudit({
|
||||
workspaceId,
|
||||
actorId: session.user.id,
|
||||
actorName: session.user.name,
|
||||
actorEmail: session.user.email,
|
||||
action: AuditAction.FOLDER_CREATED,
|
||||
resourceType: AuditResourceType.FOLDER,
|
||||
resourceId: id,
|
||||
resourceName: name.trim(),
|
||||
description: `Created folder "${name.trim()}"`,
|
||||
metadata: { name: name.trim() },
|
||||
request,
|
||||
})
|
||||
|
||||
return NextResponse.json({ folder: newFolder })
|
||||
} catch (error) {
|
||||
logger.error('Error creating folder:', { error })
|
||||
|
||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { checkFormAccess, DEFAULT_FORM_CUSTOMIZATIONS } from '@/app/api/form/utils'
|
||||
@@ -102,7 +103,11 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
|
||||
|
||||
const { id } = await params
|
||||
|
||||
const { hasAccess, form: formRecord } = await checkFormAccess(id, session.user.id)
|
||||
const {
|
||||
hasAccess,
|
||||
form: formRecord,
|
||||
workspaceId: formWorkspaceId,
|
||||
} = await checkFormAccess(id, session.user.id)
|
||||
|
||||
if (!hasAccess || !formRecord) {
|
||||
return createErrorResponse('Form not found or access denied', 404)
|
||||
@@ -184,6 +189,19 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
|
||||
|
||||
logger.info(`Form ${id} updated successfully`)
|
||||
|
||||
recordAudit({
|
||||
workspaceId: formWorkspaceId ?? null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.FORM_UPDATED,
|
||||
resourceType: AuditResourceType.FORM,
|
||||
resourceId: id,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
resourceName: formRecord.title ?? undefined,
|
||||
description: `Updated form "${formRecord.title}"`,
|
||||
request,
|
||||
})
|
||||
|
||||
return createSuccessResponse({
|
||||
message: 'Form updated successfully',
|
||||
})
|
||||
@@ -213,7 +231,11 @@ export async function DELETE(
|
||||
|
||||
const { id } = await params
|
||||
|
||||
const { hasAccess, form: formRecord } = await checkFormAccess(id, session.user.id)
|
||||
const {
|
||||
hasAccess,
|
||||
form: formRecord,
|
||||
workspaceId: formWorkspaceId,
|
||||
} = await checkFormAccess(id, session.user.id)
|
||||
|
||||
if (!hasAccess || !formRecord) {
|
||||
return createErrorResponse('Form not found or access denied', 404)
|
||||
@@ -223,6 +245,19 @@ export async function DELETE(
|
||||
|
||||
logger.info(`Form ${id} deleted (soft delete)`)
|
||||
|
||||
recordAudit({
|
||||
workspaceId: formWorkspaceId ?? null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.FORM_DELETED,
|
||||
resourceType: AuditResourceType.FORM,
|
||||
resourceId: id,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
resourceName: formRecord.title ?? undefined,
|
||||
description: `Deleted form "${formRecord.title}"`,
|
||||
request,
|
||||
})
|
||||
|
||||
return createSuccessResponse({
|
||||
message: 'Form deleted successfully',
|
||||
})
|
||||
|
||||
@@ -5,6 +5,7 @@ import { eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { isDev } from '@/lib/core/config/feature-flags'
|
||||
import { encryptSecret } from '@/lib/core/security/encryption'
|
||||
@@ -178,7 +179,7 @@ export async function POST(request: NextRequest) {
|
||||
userId: session.user.id,
|
||||
identifier,
|
||||
title,
|
||||
description: description || '',
|
||||
description: description || null,
|
||||
customizations: mergedCustomizations,
|
||||
isActive: true,
|
||||
authType,
|
||||
@@ -195,6 +196,19 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
logger.info(`Form "${title}" deployed successfully at ${formUrl}`)
|
||||
|
||||
recordAudit({
|
||||
workspaceId: workflowRecord.workspaceId ?? null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.FORM_CREATED,
|
||||
resourceType: AuditResourceType.FORM,
|
||||
resourceId: id,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
resourceName: title,
|
||||
description: `Created form "${title}" for workflow ${workflowId}`,
|
||||
request,
|
||||
})
|
||||
|
||||
return createSuccessResponse({
|
||||
id,
|
||||
formUrl,
|
||||
|
||||
@@ -52,7 +52,7 @@ export async function checkWorkflowAccessForFormCreation(
|
||||
export async function checkFormAccess(
|
||||
formId: string,
|
||||
userId: string
|
||||
): Promise<{ hasAccess: boolean; form?: any }> {
|
||||
): Promise<{ hasAccess: boolean; form?: any; workspaceId?: string }> {
|
||||
const formData = await db
|
||||
.select({ form: form, workflowWorkspaceId: workflow.workspaceId })
|
||||
.from(form)
|
||||
@@ -75,7 +75,9 @@ export async function checkFormAccess(
|
||||
action: 'admin',
|
||||
})
|
||||
|
||||
return authorization.allowed ? { hasAccess: true, form: formRecord } : { hasAccess: false }
|
||||
return authorization.allowed
|
||||
? { hasAccess: true, form: formRecord, workspaceId: workflowWorkspaceId }
|
||||
: { hasAccess: false }
|
||||
}
|
||||
|
||||
export async function validateFormAuth(
|
||||
|
||||
@@ -1,300 +0,0 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockRequest, mockConsoleLogger, mockDrizzleOrm } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@sim/db/schema', () => ({
|
||||
document: {
|
||||
id: 'id',
|
||||
connectorId: 'connectorId',
|
||||
deletedAt: 'deletedAt',
|
||||
filename: 'filename',
|
||||
externalId: 'externalId',
|
||||
sourceUrl: 'sourceUrl',
|
||||
enabled: 'enabled',
|
||||
userExcluded: 'userExcluded',
|
||||
uploadedAt: 'uploadedAt',
|
||||
processingStatus: 'processingStatus',
|
||||
},
|
||||
knowledgeConnector: {
|
||||
id: 'id',
|
||||
knowledgeBaseId: 'knowledgeBaseId',
|
||||
deletedAt: 'deletedAt',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/app/api/knowledge/utils', () => ({
|
||||
checkKnowledgeBaseAccess: vi.fn(),
|
||||
checkKnowledgeBaseWriteAccess: vi.fn(),
|
||||
}))
|
||||
vi.mock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn(),
|
||||
}))
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: vi.fn().mockReturnValue('test-req-id'),
|
||||
}))
|
||||
|
||||
mockDrizzleOrm()
|
||||
mockConsoleLogger()
|
||||
|
||||
describe('Connector Documents API Route', () => {
|
||||
/**
|
||||
* The route chains db calls in sequence. We track call order
|
||||
* to return different values for connector lookup vs document queries.
|
||||
*/
|
||||
let limitCallCount: number
|
||||
let orderByCallCount: number
|
||||
|
||||
const mockDbChain = {
|
||||
select: vi.fn().mockReturnThis(),
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
orderBy: vi.fn(() => {
|
||||
orderByCallCount++
|
||||
return Promise.resolve([])
|
||||
}),
|
||||
limit: vi.fn(() => {
|
||||
limitCallCount++
|
||||
return Promise.resolve([])
|
||||
}),
|
||||
update: vi.fn().mockReturnThis(),
|
||||
set: vi.fn().mockReturnThis(),
|
||||
returning: vi.fn().mockResolvedValue([]),
|
||||
}
|
||||
|
||||
const mockParams = Promise.resolve({ id: 'kb-123', connectorId: 'conn-456' })
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
limitCallCount = 0
|
||||
orderByCallCount = 0
|
||||
mockDbChain.select.mockReturnThis()
|
||||
mockDbChain.from.mockReturnThis()
|
||||
mockDbChain.where.mockReturnThis()
|
||||
mockDbChain.orderBy.mockImplementation(() => {
|
||||
orderByCallCount++
|
||||
return Promise.resolve([])
|
||||
})
|
||||
mockDbChain.limit.mockImplementation(() => {
|
||||
limitCallCount++
|
||||
return Promise.resolve([])
|
||||
})
|
||||
mockDbChain.update.mockReturnThis()
|
||||
mockDbChain.set.mockReturnThis()
|
||||
mockDbChain.returning.mockResolvedValue([])
|
||||
|
||||
vi.doMock('@sim/db', () => ({ db: mockDbChain }))
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('GET', () => {
|
||||
it('returns 401 when unauthenticated', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: false,
|
||||
userId: null,
|
||||
} as never)
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import(
|
||||
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
|
||||
)
|
||||
const response = await GET(req as never, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
})
|
||||
|
||||
it('returns 404 when connector not found', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-1',
|
||||
} as never)
|
||||
vi.mocked(checkKnowledgeBaseAccess).mockResolvedValue({ hasAccess: true } as never)
|
||||
|
||||
mockDbChain.limit.mockResolvedValueOnce([])
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import(
|
||||
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
|
||||
)
|
||||
const response = await GET(req as never, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(404)
|
||||
})
|
||||
|
||||
it('returns documents list on success', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-1',
|
||||
} as never)
|
||||
vi.mocked(checkKnowledgeBaseAccess).mockResolvedValue({ hasAccess: true } as never)
|
||||
|
||||
const doc = { id: 'doc-1', filename: 'test.txt', userExcluded: false }
|
||||
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }])
|
||||
mockDbChain.orderBy.mockResolvedValueOnce([doc])
|
||||
|
||||
const url = 'http://localhost/api/knowledge/kb-123/connectors/conn-456/documents'
|
||||
const req = createMockRequest('GET', undefined, undefined, url)
|
||||
Object.assign(req, { nextUrl: new URL(url) })
|
||||
const { GET } = await import(
|
||||
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
|
||||
)
|
||||
const response = await GET(req as never, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data.documents).toHaveLength(1)
|
||||
expect(data.data.counts.active).toBe(1)
|
||||
expect(data.data.counts.excluded).toBe(0)
|
||||
})
|
||||
|
||||
it('includes excluded documents when includeExcluded=true', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-1',
|
||||
} as never)
|
||||
vi.mocked(checkKnowledgeBaseAccess).mockResolvedValue({ hasAccess: true } as never)
|
||||
|
||||
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }])
|
||||
mockDbChain.orderBy
|
||||
.mockResolvedValueOnce([{ id: 'doc-1', userExcluded: false }])
|
||||
.mockResolvedValueOnce([{ id: 'doc-2', userExcluded: true }])
|
||||
|
||||
const url =
|
||||
'http://localhost/api/knowledge/kb-123/connectors/conn-456/documents?includeExcluded=true'
|
||||
const req = createMockRequest('GET', undefined, undefined, url)
|
||||
Object.assign(req, { nextUrl: new URL(url) })
|
||||
const { GET } = await import(
|
||||
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
|
||||
)
|
||||
const response = await GET(req as never, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data.documents).toHaveLength(2)
|
||||
expect(data.data.counts.active).toBe(1)
|
||||
expect(data.data.counts.excluded).toBe(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('PATCH', () => {
|
||||
it('returns 401 when unauthenticated', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: false,
|
||||
userId: null,
|
||||
} as never)
|
||||
|
||||
const req = createMockRequest('PATCH', { operation: 'restore', documentIds: ['doc-1'] })
|
||||
const { PATCH } = await import(
|
||||
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
|
||||
)
|
||||
const response = await PATCH(req as never, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
})
|
||||
|
||||
it('returns 400 for invalid body', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-1',
|
||||
} as never)
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
|
||||
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }])
|
||||
|
||||
const req = createMockRequest('PATCH', { documentIds: [] })
|
||||
const { PATCH } = await import(
|
||||
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
|
||||
)
|
||||
const response = await PATCH(req as never, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
})
|
||||
|
||||
it('returns 404 when connector not found', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-1',
|
||||
} as never)
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
|
||||
mockDbChain.limit.mockResolvedValueOnce([])
|
||||
|
||||
const req = createMockRequest('PATCH', { operation: 'restore', documentIds: ['doc-1'] })
|
||||
const { PATCH } = await import(
|
||||
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
|
||||
)
|
||||
const response = await PATCH(req as never, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(404)
|
||||
})
|
||||
|
||||
it('returns success for restore operation', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-1',
|
||||
} as never)
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
|
||||
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }])
|
||||
mockDbChain.returning.mockResolvedValueOnce([{ id: 'doc-1' }])
|
||||
|
||||
const req = createMockRequest('PATCH', { operation: 'restore', documentIds: ['doc-1'] })
|
||||
const { PATCH } = await import(
|
||||
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
|
||||
)
|
||||
const response = await PATCH(req as never, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data.restoredCount).toBe(1)
|
||||
})
|
||||
|
||||
it('returns success for exclude operation', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-1',
|
||||
} as never)
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
|
||||
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }])
|
||||
mockDbChain.returning.mockResolvedValueOnce([{ id: 'doc-2' }, { id: 'doc-3' }])
|
||||
|
||||
const req = createMockRequest('PATCH', {
|
||||
operation: 'exclude',
|
||||
documentIds: ['doc-2', 'doc-3'],
|
||||
})
|
||||
const { PATCH } = await import(
|
||||
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
|
||||
)
|
||||
const response = await PATCH(req as never, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data.excludedCount).toBe(2)
|
||||
expect(data.data.documentIds).toEqual(['doc-2', 'doc-3'])
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,210 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { document, knowledgeConnector } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, inArray, isNull } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
|
||||
|
||||
const logger = createLogger('ConnectorDocumentsAPI')
|
||||
|
||||
type RouteParams = { params: Promise<{ id: string; connectorId: string }> }
|
||||
|
||||
/**
|
||||
* GET /api/knowledge/[id]/connectors/[connectorId]/documents
|
||||
* Returns documents for a connector, optionally including user-excluded ones.
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: RouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId, connectorId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId)
|
||||
if (!accessCheck.hasAccess) {
|
||||
const status = 'notFound' in accessCheck && accessCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const connectorRows = await db
|
||||
.select({ id: knowledgeConnector.id })
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (connectorRows.length === 0) {
|
||||
return NextResponse.json({ error: 'Connector not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const includeExcluded = request.nextUrl.searchParams.get('includeExcluded') === 'true'
|
||||
|
||||
const activeDocs = await db
|
||||
.select({
|
||||
id: document.id,
|
||||
filename: document.filename,
|
||||
externalId: document.externalId,
|
||||
sourceUrl: document.sourceUrl,
|
||||
enabled: document.enabled,
|
||||
userExcluded: document.userExcluded,
|
||||
uploadedAt: document.uploadedAt,
|
||||
processingStatus: document.processingStatus,
|
||||
})
|
||||
.from(document)
|
||||
.where(
|
||||
and(
|
||||
eq(document.connectorId, connectorId),
|
||||
isNull(document.deletedAt),
|
||||
eq(document.userExcluded, false)
|
||||
)
|
||||
)
|
||||
.orderBy(document.filename)
|
||||
|
||||
const excludedDocs = includeExcluded
|
||||
? await db
|
||||
.select({
|
||||
id: document.id,
|
||||
filename: document.filename,
|
||||
externalId: document.externalId,
|
||||
sourceUrl: document.sourceUrl,
|
||||
enabled: document.enabled,
|
||||
userExcluded: document.userExcluded,
|
||||
uploadedAt: document.uploadedAt,
|
||||
processingStatus: document.processingStatus,
|
||||
})
|
||||
.from(document)
|
||||
.where(
|
||||
and(
|
||||
eq(document.connectorId, connectorId),
|
||||
eq(document.userExcluded, true),
|
||||
isNull(document.deletedAt)
|
||||
)
|
||||
)
|
||||
.orderBy(document.filename)
|
||||
: []
|
||||
|
||||
const docs = [...activeDocs, ...excludedDocs]
|
||||
const activeCount = activeDocs.length
|
||||
const excludedCount = excludedDocs.length
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
documents: docs,
|
||||
counts: { active: activeCount, excluded: excludedCount },
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error fetching connector documents`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
const PatchSchema = z.object({
|
||||
operation: z.enum(['restore', 'exclude']),
|
||||
documentIds: z.array(z.string()).min(1),
|
||||
})
|
||||
|
||||
/**
|
||||
* PATCH /api/knowledge/[id]/connectors/[connectorId]/documents
|
||||
* Restore or exclude connector documents.
|
||||
*/
|
||||
export async function PATCH(request: NextRequest, { params }: RouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId, connectorId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId)
|
||||
if (!writeCheck.hasAccess) {
|
||||
const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const connectorRows = await db
|
||||
.select({ id: knowledgeConnector.id })
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (connectorRows.length === 0) {
|
||||
return NextResponse.json({ error: 'Connector not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const parsed = PatchSchema.safeParse(body)
|
||||
if (!parsed.success) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request', details: parsed.error.flatten() },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { operation, documentIds } = parsed.data
|
||||
|
||||
if (operation === 'restore') {
|
||||
const updated = await db
|
||||
.update(document)
|
||||
.set({ userExcluded: false, deletedAt: null, enabled: true })
|
||||
.where(
|
||||
and(
|
||||
eq(document.connectorId, connectorId),
|
||||
inArray(document.id, documentIds),
|
||||
eq(document.userExcluded, true)
|
||||
)
|
||||
)
|
||||
.returning({ id: document.id })
|
||||
|
||||
logger.info(`[${requestId}] Restored ${updated.length} excluded documents`, { connectorId })
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: { restoredCount: updated.length, documentIds: updated.map((d) => d.id) },
|
||||
})
|
||||
}
|
||||
|
||||
const updated = await db
|
||||
.update(document)
|
||||
.set({ userExcluded: true })
|
||||
.where(
|
||||
and(
|
||||
eq(document.connectorId, connectorId),
|
||||
inArray(document.id, documentIds),
|
||||
eq(document.userExcluded, false),
|
||||
isNull(document.deletedAt)
|
||||
)
|
||||
)
|
||||
.returning({ id: document.id })
|
||||
|
||||
logger.info(`[${requestId}] Excluded ${updated.length} documents`, { connectorId })
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: { excludedCount: updated.length, documentIds: updated.map((d) => d.id) },
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error updating connector documents`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,231 +0,0 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockRequest, mockConsoleLogger, mockDrizzleOrm } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@/app/api/knowledge/utils', () => ({
|
||||
checkKnowledgeBaseAccess: vi.fn(),
|
||||
checkKnowledgeBaseWriteAccess: vi.fn(),
|
||||
}))
|
||||
vi.mock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn(),
|
||||
}))
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: vi.fn().mockReturnValue('test-req-id'),
|
||||
}))
|
||||
vi.mock('@/app/api/auth/oauth/utils', () => ({
|
||||
refreshAccessTokenIfNeeded: vi.fn(),
|
||||
}))
|
||||
vi.mock('@/connectors/registry', () => ({
|
||||
CONNECTOR_REGISTRY: {
|
||||
jira: { validateConfig: vi.fn() },
|
||||
},
|
||||
}))
|
||||
vi.mock('@sim/db/schema', () => ({
|
||||
knowledgeBase: { id: 'id', userId: 'userId' },
|
||||
knowledgeConnector: {
|
||||
id: 'id',
|
||||
knowledgeBaseId: 'knowledgeBaseId',
|
||||
deletedAt: 'deletedAt',
|
||||
connectorType: 'connectorType',
|
||||
credentialId: 'credentialId',
|
||||
},
|
||||
knowledgeConnectorSyncLog: { connectorId: 'connectorId', startedAt: 'startedAt' },
|
||||
}))
|
||||
|
||||
mockDrizzleOrm()
|
||||
mockConsoleLogger()
|
||||
|
||||
describe('Knowledge Connector By ID API Route', () => {
|
||||
const mockDbChain = {
|
||||
select: vi.fn().mockReturnThis(),
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
orderBy: vi.fn().mockReturnThis(),
|
||||
limit: vi.fn().mockResolvedValue([]),
|
||||
insert: vi.fn().mockReturnThis(),
|
||||
values: vi.fn().mockResolvedValue(undefined),
|
||||
update: vi.fn().mockReturnThis(),
|
||||
set: vi.fn().mockReturnThis(),
|
||||
returning: vi.fn().mockResolvedValue([]),
|
||||
}
|
||||
|
||||
const mockParams = Promise.resolve({ id: 'kb-123', connectorId: 'conn-456' })
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
vi.resetModules()
|
||||
mockDbChain.select.mockReturnThis()
|
||||
mockDbChain.from.mockReturnThis()
|
||||
mockDbChain.where.mockReturnThis()
|
||||
mockDbChain.orderBy.mockReturnThis()
|
||||
mockDbChain.limit.mockResolvedValue([])
|
||||
mockDbChain.update.mockReturnThis()
|
||||
mockDbChain.set.mockReturnThis()
|
||||
|
||||
vi.doMock('@sim/db', () => ({ db: mockDbChain }))
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('GET', () => {
|
||||
it('returns 401 when unauthenticated', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: false, userId: null })
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
|
||||
const response = await GET(req, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
})
|
||||
|
||||
it('returns 404 when KB not found', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
|
||||
vi.mocked(checkKnowledgeBaseAccess).mockResolvedValue({ hasAccess: false, notFound: true })
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
|
||||
const response = await GET(req, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(404)
|
||||
})
|
||||
|
||||
it('returns 404 when connector not found', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
|
||||
vi.mocked(checkKnowledgeBaseAccess).mockResolvedValue({ hasAccess: true })
|
||||
mockDbChain.limit.mockResolvedValueOnce([])
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
|
||||
const response = await GET(req, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(404)
|
||||
})
|
||||
|
||||
it('returns connector with sync logs on success', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
|
||||
vi.mocked(checkKnowledgeBaseAccess).mockResolvedValue({ hasAccess: true })
|
||||
|
||||
const mockConnector = { id: 'conn-456', connectorType: 'jira', status: 'active' }
|
||||
const mockLogs = [{ id: 'log-1', status: 'completed' }]
|
||||
|
||||
mockDbChain.limit.mockResolvedValueOnce([mockConnector]).mockResolvedValueOnce(mockLogs)
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
|
||||
const response = await GET(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.success).toBe(true)
|
||||
expect(data.data.id).toBe('conn-456')
|
||||
expect(data.data.syncLogs).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('PATCH', () => {
|
||||
it('returns 401 when unauthenticated', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: false, userId: null })
|
||||
|
||||
const req = createMockRequest('PATCH', { status: 'paused' })
|
||||
const { PATCH } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
|
||||
const response = await PATCH(req, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
})
|
||||
|
||||
it('returns 400 for invalid body', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true })
|
||||
|
||||
const req = createMockRequest('PATCH', { syncIntervalMinutes: 'not a number' })
|
||||
const { PATCH } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
|
||||
const response = await PATCH(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Invalid request')
|
||||
})
|
||||
|
||||
it('returns 404 when connector not found during sourceConfig validation', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true })
|
||||
mockDbChain.limit.mockResolvedValueOnce([])
|
||||
|
||||
const req = createMockRequest('PATCH', { sourceConfig: { project: 'NEW' } })
|
||||
const { PATCH } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
|
||||
const response = await PATCH(req, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(404)
|
||||
})
|
||||
|
||||
it('returns 200 and updates status', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true })
|
||||
|
||||
const updatedConnector = { id: 'conn-456', status: 'paused', syncIntervalMinutes: 120 }
|
||||
mockDbChain.limit.mockResolvedValueOnce([updatedConnector])
|
||||
|
||||
const req = createMockRequest('PATCH', { status: 'paused', syncIntervalMinutes: 120 })
|
||||
const { PATCH } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
|
||||
const response = await PATCH(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.success).toBe(true)
|
||||
expect(data.data.status).toBe('paused')
|
||||
})
|
||||
})
|
||||
|
||||
describe('DELETE', () => {
|
||||
it('returns 401 when unauthenticated', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: false, userId: null })
|
||||
|
||||
const req = createMockRequest('DELETE')
|
||||
const { DELETE } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
|
||||
const response = await DELETE(req, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
})
|
||||
|
||||
it('returns 200 on successful soft-delete', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true })
|
||||
|
||||
const req = createMockRequest('DELETE')
|
||||
const { DELETE } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
|
||||
const response = await DELETE(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.success).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,248 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { knowledgeBase, knowledgeConnector, knowledgeConnectorSyncLog } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, desc, eq, isNull } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
|
||||
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
|
||||
|
||||
const logger = createLogger('KnowledgeConnectorByIdAPI')
|
||||
|
||||
type RouteParams = { params: Promise<{ id: string; connectorId: string }> }
|
||||
|
||||
const UpdateConnectorSchema = z.object({
|
||||
sourceConfig: z.record(z.unknown()).optional(),
|
||||
syncIntervalMinutes: z.number().int().min(0).optional(),
|
||||
status: z.enum(['active', 'paused']).optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
* GET /api/knowledge/[id]/connectors/[connectorId] - Get connector details with recent sync logs
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: RouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId, connectorId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId)
|
||||
if (!accessCheck.hasAccess) {
|
||||
const status = 'notFound' in accessCheck && accessCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const connectorRows = await db
|
||||
.select()
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (connectorRows.length === 0) {
|
||||
return NextResponse.json({ error: 'Connector not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const syncLogs = await db
|
||||
.select()
|
||||
.from(knowledgeConnectorSyncLog)
|
||||
.where(eq(knowledgeConnectorSyncLog.connectorId, connectorId))
|
||||
.orderBy(desc(knowledgeConnectorSyncLog.startedAt))
|
||||
.limit(10)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
...connectorRows[0],
|
||||
syncLogs,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error fetching connector`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* PATCH /api/knowledge/[id]/connectors/[connectorId] - Update a connector
|
||||
*/
|
||||
export async function PATCH(request: NextRequest, { params }: RouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId, connectorId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId)
|
||||
if (!writeCheck.hasAccess) {
|
||||
const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const parsed = UpdateConnectorSchema.safeParse(body)
|
||||
if (!parsed.success) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request', details: parsed.error.flatten() },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (parsed.data.sourceConfig !== undefined) {
|
||||
const existingRows = await db
|
||||
.select()
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingRows.length === 0) {
|
||||
return NextResponse.json({ error: 'Connector not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const existing = existingRows[0]
|
||||
const connectorConfig = CONNECTOR_REGISTRY[existing.connectorType]
|
||||
|
||||
if (!connectorConfig) {
|
||||
return NextResponse.json(
|
||||
{ error: `Unknown connector type: ${existing.connectorType}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const kbRows = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
.limit(1)
|
||||
|
||||
if (kbRows.length === 0) {
|
||||
return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const accessToken = await refreshAccessTokenIfNeeded(
|
||||
existing.credentialId,
|
||||
kbRows[0].userId,
|
||||
`patch-${connectorId}`
|
||||
)
|
||||
|
||||
if (!accessToken) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Failed to refresh access token. Please reconnect your account.' },
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const validation = await connectorConfig.validateConfig(accessToken, parsed.data.sourceConfig)
|
||||
if (!validation.valid) {
|
||||
return NextResponse.json(
|
||||
{ error: validation.error || 'Invalid source configuration' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const updates: Record<string, unknown> = { updatedAt: new Date() }
|
||||
if (parsed.data.sourceConfig !== undefined) {
|
||||
updates.sourceConfig = parsed.data.sourceConfig
|
||||
}
|
||||
if (parsed.data.syncIntervalMinutes !== undefined) {
|
||||
updates.syncIntervalMinutes = parsed.data.syncIntervalMinutes
|
||||
if (parsed.data.syncIntervalMinutes > 0) {
|
||||
updates.nextSyncAt = new Date(Date.now() + parsed.data.syncIntervalMinutes * 60 * 1000)
|
||||
} else {
|
||||
updates.nextSyncAt = null
|
||||
}
|
||||
}
|
||||
if (parsed.data.status !== undefined) {
|
||||
updates.status = parsed.data.status
|
||||
}
|
||||
|
||||
await db
|
||||
.update(knowledgeConnector)
|
||||
.set(updates)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
|
||||
const updated = await db
|
||||
.select()
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
return NextResponse.json({ success: true, data: updated[0] })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error updating connector`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE /api/knowledge/[id]/connectors/[connectorId] - Soft-delete a connector
|
||||
*/
|
||||
export async function DELETE(request: NextRequest, { params }: RouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId, connectorId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId)
|
||||
if (!writeCheck.hasAccess) {
|
||||
const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
await db
|
||||
.update(knowledgeConnector)
|
||||
.set({ deletedAt: new Date(), status: 'paused', updatedAt: new Date() })
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
|
||||
logger.info(`[${requestId}] Soft-deleted connector ${connectorId}`)
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error deleting connector`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,133 +0,0 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockRequest, mockConsoleLogger, mockDrizzleOrm } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@sim/db/schema', () => ({
|
||||
knowledgeConnector: {
|
||||
id: 'id',
|
||||
knowledgeBaseId: 'knowledgeBaseId',
|
||||
deletedAt: 'deletedAt',
|
||||
status: 'status',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/app/api/knowledge/utils', () => ({
|
||||
checkKnowledgeBaseWriteAccess: vi.fn(),
|
||||
}))
|
||||
vi.mock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn(),
|
||||
}))
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: vi.fn().mockReturnValue('test-req-id'),
|
||||
}))
|
||||
vi.mock('@/lib/knowledge/connectors/sync-engine', () => ({
|
||||
dispatchSync: vi.fn().mockResolvedValue(undefined),
|
||||
}))
|
||||
|
||||
mockDrizzleOrm()
|
||||
mockConsoleLogger()
|
||||
|
||||
describe('Connector Manual Sync API Route', () => {
|
||||
const mockDbChain = {
|
||||
select: vi.fn().mockReturnThis(),
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
orderBy: vi.fn().mockResolvedValue([]),
|
||||
limit: vi.fn().mockResolvedValue([]),
|
||||
update: vi.fn().mockReturnThis(),
|
||||
set: vi.fn().mockReturnThis(),
|
||||
}
|
||||
|
||||
const mockParams = Promise.resolve({ id: 'kb-123', connectorId: 'conn-456' })
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockDbChain.select.mockReturnThis()
|
||||
mockDbChain.from.mockReturnThis()
|
||||
mockDbChain.where.mockReturnThis()
|
||||
mockDbChain.orderBy.mockResolvedValue([])
|
||||
mockDbChain.limit.mockResolvedValue([])
|
||||
mockDbChain.update.mockReturnThis()
|
||||
mockDbChain.set.mockReturnThis()
|
||||
|
||||
vi.doMock('@sim/db', () => ({ db: mockDbChain }))
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('returns 401 when unauthenticated', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: false,
|
||||
userId: null,
|
||||
} as never)
|
||||
|
||||
const req = createMockRequest('POST')
|
||||
const { POST } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/sync/route')
|
||||
const response = await POST(req as never, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
})
|
||||
|
||||
it('returns 404 when connector not found', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-1',
|
||||
} as never)
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
|
||||
mockDbChain.limit.mockResolvedValueOnce([])
|
||||
|
||||
const req = createMockRequest('POST')
|
||||
const { POST } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/sync/route')
|
||||
const response = await POST(req as never, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(404)
|
||||
})
|
||||
|
||||
it('returns 409 when connector is syncing', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-1',
|
||||
} as never)
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
|
||||
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456', status: 'syncing' }])
|
||||
|
||||
const req = createMockRequest('POST')
|
||||
const { POST } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/sync/route')
|
||||
const response = await POST(req as never, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(409)
|
||||
})
|
||||
|
||||
it('dispatches sync on valid request', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
const { dispatchSync } = await import('@/lib/knowledge/connectors/sync-engine')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-1',
|
||||
} as never)
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
|
||||
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456', status: 'active' }])
|
||||
|
||||
const req = createMockRequest('POST')
|
||||
const { POST } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/sync/route')
|
||||
const response = await POST(req as never, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.success).toBe(true)
|
||||
expect(vi.mocked(dispatchSync)).toHaveBeenCalledWith('conn-456', { requestId: 'test-req-id' })
|
||||
})
|
||||
})
|
||||
@@ -1,71 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { knowledgeConnector } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine'
|
||||
import { checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
|
||||
|
||||
const logger = createLogger('ConnectorManualSyncAPI')
|
||||
|
||||
type RouteParams = { params: Promise<{ id: string; connectorId: string }> }
|
||||
|
||||
/**
|
||||
* POST /api/knowledge/[id]/connectors/[connectorId]/sync - Trigger a manual sync
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: RouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId, connectorId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId)
|
||||
if (!writeCheck.hasAccess) {
|
||||
const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const connectorRows = await db
|
||||
.select()
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (connectorRows.length === 0) {
|
||||
return NextResponse.json({ error: 'Connector not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (connectorRows[0].status === 'syncing') {
|
||||
return NextResponse.json({ error: 'Sync already in progress' }, { status: 409 })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Manual sync triggered for connector ${connectorId}`)
|
||||
|
||||
dispatchSync(connectorId, { requestId }).catch((error) => {
|
||||
logger.error(
|
||||
`[${requestId}] Failed to dispatch manual sync for connector ${connectorId}`,
|
||||
error
|
||||
)
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'Sync triggered',
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error triggering manual sync`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,204 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { knowledgeBaseTagDefinitions, knowledgeConnector } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, desc, eq, isNull } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine'
|
||||
import { allocateTagSlots } from '@/lib/knowledge/constants'
|
||||
import { createTagDefinition } from '@/lib/knowledge/tags/service'
|
||||
import { getCredential } from '@/app/api/auth/oauth/utils'
|
||||
import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
|
||||
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
|
||||
|
||||
const logger = createLogger('KnowledgeConnectorsAPI')
|
||||
|
||||
const CreateConnectorSchema = z.object({
|
||||
connectorType: z.string().min(1),
|
||||
credentialId: z.string().min(1),
|
||||
sourceConfig: z.record(z.unknown()),
|
||||
syncIntervalMinutes: z.number().int().min(0).default(1440),
|
||||
})
|
||||
|
||||
/**
|
||||
* GET /api/knowledge/[id]/connectors - List connectors for a knowledge base
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId)
|
||||
if (!accessCheck.hasAccess) {
|
||||
const status = 'notFound' in accessCheck && accessCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const connectors = await db
|
||||
.select()
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
.orderBy(desc(knowledgeConnector.createdAt))
|
||||
|
||||
return NextResponse.json({ success: true, data: connectors })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error listing connectors`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/knowledge/[id]/connectors - Create a new connector
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId)
|
||||
if (!writeCheck.hasAccess) {
|
||||
const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const parsed = CreateConnectorSchema.safeParse(body)
|
||||
if (!parsed.success) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request', details: parsed.error.flatten() },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { connectorType, credentialId, sourceConfig, syncIntervalMinutes } = parsed.data
|
||||
|
||||
const connectorConfig = CONNECTOR_REGISTRY[connectorType]
|
||||
if (!connectorConfig) {
|
||||
return NextResponse.json(
|
||||
{ error: `Unknown connector type: ${connectorType}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const credential = await getCredential(requestId, credentialId, auth.userId)
|
||||
if (!credential) {
|
||||
return NextResponse.json({ error: 'Credential not found' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!credential.accessToken) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Credential has no access token. Please reconnect your account.' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const validation = await connectorConfig.validateConfig(credential.accessToken, sourceConfig)
|
||||
if (!validation.valid) {
|
||||
return NextResponse.json(
|
||||
{ error: validation.error || 'Invalid source configuration' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
let finalSourceConfig: Record<string, unknown> = sourceConfig
|
||||
const tagSlotMapping: Record<string, string> = {}
|
||||
|
||||
if (connectorConfig.tagDefinitions?.length) {
|
||||
const disabledIds = new Set((sourceConfig.disabledTagIds as string[] | undefined) ?? [])
|
||||
const enabledDefs = connectorConfig.tagDefinitions.filter((td) => !disabledIds.has(td.id))
|
||||
|
||||
const existingDefs = await db
|
||||
.select({ tagSlot: knowledgeBaseTagDefinitions.tagSlot })
|
||||
.from(knowledgeBaseTagDefinitions)
|
||||
.where(eq(knowledgeBaseTagDefinitions.knowledgeBaseId, knowledgeBaseId))
|
||||
|
||||
const usedSlots = new Set<string>(existingDefs.map((d) => d.tagSlot))
|
||||
const { mapping, skipped: skippedTags } = allocateTagSlots(enabledDefs, usedSlots)
|
||||
Object.assign(tagSlotMapping, mapping)
|
||||
|
||||
for (const name of skippedTags) {
|
||||
logger.warn(`[${requestId}] No available slots for "${name}"`)
|
||||
}
|
||||
|
||||
if (skippedTags.length > 0 && Object.keys(tagSlotMapping).length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: `No available tag slots. Could not assign: ${skippedTags.join(', ')}` },
|
||||
{ status: 422 }
|
||||
)
|
||||
}
|
||||
|
||||
finalSourceConfig = { ...sourceConfig, tagSlotMapping }
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
const connectorId = crypto.randomUUID()
|
||||
const nextSyncAt =
|
||||
syncIntervalMinutes > 0 ? new Date(now.getTime() + syncIntervalMinutes * 60 * 1000) : null
|
||||
|
||||
await db.transaction(async (tx) => {
|
||||
for (const [semanticId, slot] of Object.entries(tagSlotMapping)) {
|
||||
const td = connectorConfig.tagDefinitions!.find((d) => d.id === semanticId)!
|
||||
await createTagDefinition(
|
||||
{
|
||||
knowledgeBaseId,
|
||||
tagSlot: slot,
|
||||
displayName: td.displayName,
|
||||
fieldType: td.fieldType,
|
||||
},
|
||||
requestId,
|
||||
tx
|
||||
)
|
||||
}
|
||||
|
||||
await tx.insert(knowledgeConnector).values({
|
||||
id: connectorId,
|
||||
knowledgeBaseId,
|
||||
connectorType,
|
||||
credentialId,
|
||||
sourceConfig: finalSourceConfig,
|
||||
syncIntervalMinutes,
|
||||
status: 'active',
|
||||
nextSyncAt,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Created connector ${connectorId} for KB ${knowledgeBaseId}`)
|
||||
|
||||
dispatchSync(connectorId, { requestId }).catch((error) => {
|
||||
logger.error(
|
||||
`[${requestId}] Failed to dispatch initial sync for connector ${connectorId}`,
|
||||
error
|
||||
)
|
||||
})
|
||||
|
||||
const created = await db
|
||||
.select()
|
||||
.from(knowledgeConnector)
|
||||
.where(eq(knowledgeConnector.id, connectorId))
|
||||
.limit(1)
|
||||
|
||||
return NextResponse.json({ success: true, data: created[0] }, { status: 201 })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error creating connector`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,7 @@
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import {
|
||||
auditMock,
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockConsoleLogger,
|
||||
@@ -35,6 +36,8 @@ vi.mock('@/lib/knowledge/documents/service', () => ({
|
||||
mockDrizzleOrm()
|
||||
mockConsoleLogger()
|
||||
|
||||
vi.mock('@/lib/audit/log', () => auditMock)
|
||||
|
||||
describe('Document By ID API Route', () => {
|
||||
const mockAuth$ = mockAuth()
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import {
|
||||
@@ -197,6 +198,19 @@ export async function PUT(
|
||||
`[${requestId}] Document updated: ${documentId} in knowledge base ${knowledgeBaseId}`
|
||||
)
|
||||
|
||||
recordAudit({
|
||||
workspaceId: accessCheck.knowledgeBase?.workspaceId ?? null,
|
||||
actorId: userId,
|
||||
actorName: auth.userName,
|
||||
actorEmail: auth.userEmail,
|
||||
action: AuditAction.DOCUMENT_UPDATED,
|
||||
resourceType: AuditResourceType.DOCUMENT,
|
||||
resourceId: documentId,
|
||||
resourceName: validatedData.filename ?? accessCheck.document?.filename,
|
||||
description: `Updated document "${documentId}" in knowledge base "${knowledgeBaseId}"`,
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: updatedDocument,
|
||||
@@ -257,6 +271,19 @@ export async function DELETE(
|
||||
`[${requestId}] Document deleted: ${documentId} from knowledge base ${knowledgeBaseId}`
|
||||
)
|
||||
|
||||
recordAudit({
|
||||
workspaceId: accessCheck.knowledgeBase?.workspaceId ?? null,
|
||||
actorId: userId,
|
||||
actorName: auth.userName,
|
||||
actorEmail: auth.userEmail,
|
||||
action: AuditAction.DOCUMENT_DELETED,
|
||||
resourceType: AuditResourceType.DOCUMENT,
|
||||
resourceId: documentId,
|
||||
resourceName: accessCheck.document?.filename,
|
||||
description: `Deleted document "${documentId}" from knowledge base "${knowledgeBaseId}"`,
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: result,
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import {
|
||||
auditMock,
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockConsoleLogger,
|
||||
@@ -40,6 +41,8 @@ vi.mock('@/lib/knowledge/documents/service', () => ({
|
||||
mockDrizzleOrm()
|
||||
mockConsoleLogger()
|
||||
|
||||
vi.mock('@/lib/audit/log', () => auditMock)
|
||||
|
||||
describe('Knowledge Base Documents API Route', () => {
|
||||
const mockAuth$ = mockAuth()
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
@@ -12,7 +13,6 @@ import {
|
||||
getDocuments,
|
||||
getProcessingConfig,
|
||||
processDocumentsWithQueue,
|
||||
type TagFilterCondition,
|
||||
} from '@/lib/knowledge/documents/service'
|
||||
import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
@@ -131,21 +131,6 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
? (sortOrderParam as SortOrder)
|
||||
: undefined
|
||||
|
||||
let tagFilters: TagFilterCondition[] | undefined
|
||||
const tagFiltersParam = url.searchParams.get('tagFilters')
|
||||
if (tagFiltersParam) {
|
||||
try {
|
||||
const parsed = JSON.parse(tagFiltersParam)
|
||||
if (Array.isArray(parsed)) {
|
||||
tagFilters = parsed.filter(
|
||||
(f: TagFilterCondition) => f.tagSlot && f.operator && f.value !== undefined
|
||||
)
|
||||
}
|
||||
} catch {
|
||||
logger.warn(`[${requestId}] Invalid tagFilters param`)
|
||||
}
|
||||
}
|
||||
|
||||
const result = await getDocuments(
|
||||
knowledgeBaseId,
|
||||
{
|
||||
@@ -155,7 +140,6 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
offset,
|
||||
...(sortBy && { sortBy }),
|
||||
...(sortOrder && { sortOrder }),
|
||||
tagFilters,
|
||||
},
|
||||
requestId
|
||||
)
|
||||
@@ -261,6 +245,19 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
logger.error(`[${requestId}] Critical error in document processing pipeline:`, error)
|
||||
})
|
||||
|
||||
recordAudit({
|
||||
workspaceId: accessCheck.knowledgeBase?.workspaceId ?? null,
|
||||
actorId: userId,
|
||||
actorName: auth.userName,
|
||||
actorEmail: auth.userEmail,
|
||||
action: AuditAction.DOCUMENT_UPLOADED,
|
||||
resourceType: AuditResourceType.DOCUMENT,
|
||||
resourceId: knowledgeBaseId,
|
||||
resourceName: `${createdDocuments.length} document(s)`,
|
||||
description: `Uploaded ${createdDocuments.length} document(s) to knowledge base "${knowledgeBaseId}"`,
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
@@ -309,6 +306,19 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
// Silently fail
|
||||
}
|
||||
|
||||
recordAudit({
|
||||
workspaceId: accessCheck.knowledgeBase?.workspaceId ?? null,
|
||||
actorId: userId,
|
||||
actorName: auth.userName,
|
||||
actorEmail: auth.userEmail,
|
||||
action: AuditAction.DOCUMENT_UPLOADED,
|
||||
resourceType: AuditResourceType.DOCUMENT,
|
||||
resourceId: knowledgeBaseId,
|
||||
resourceName: validatedData.filename,
|
||||
description: `Uploaded document "${validatedData.filename}" to knowledge base "${knowledgeBaseId}"`,
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: newDocument,
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import {
|
||||
auditMock,
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockConsoleLogger,
|
||||
@@ -16,6 +17,8 @@ mockKnowledgeSchemas()
|
||||
mockDrizzleOrm()
|
||||
mockConsoleLogger()
|
||||
|
||||
vi.mock('@/lib/audit/log', () => auditMock)
|
||||
|
||||
vi.mock('@/lib/knowledge/service', () => ({
|
||||
getKnowledgeBaseById: vi.fn(),
|
||||
updateKnowledgeBase: vi.fn(),
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
@@ -135,6 +136,19 @@ export async function PUT(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
|
||||
logger.info(`[${requestId}] Knowledge base updated: ${id} for user ${userId}`)
|
||||
|
||||
recordAudit({
|
||||
workspaceId: accessCheck.knowledgeBase.workspaceId ?? null,
|
||||
actorId: userId,
|
||||
actorName: auth.userName,
|
||||
actorEmail: auth.userEmail,
|
||||
action: AuditAction.KNOWLEDGE_BASE_UPDATED,
|
||||
resourceType: AuditResourceType.KNOWLEDGE_BASE,
|
||||
resourceId: id,
|
||||
resourceName: validatedData.name ?? updatedKnowledgeBase.name,
|
||||
description: `Updated knowledge base "${validatedData.name ?? updatedKnowledgeBase.name}"`,
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: updatedKnowledgeBase,
|
||||
@@ -197,6 +211,19 @@ export async function DELETE(
|
||||
|
||||
logger.info(`[${requestId}] Knowledge base deleted: ${id} for user ${userId}`)
|
||||
|
||||
recordAudit({
|
||||
workspaceId: accessCheck.knowledgeBase.workspaceId ?? null,
|
||||
actorId: userId,
|
||||
actorName: auth.userName,
|
||||
actorEmail: auth.userEmail,
|
||||
action: AuditAction.KNOWLEDGE_BASE_DELETED,
|
||||
resourceType: AuditResourceType.KNOWLEDGE_BASE,
|
||||
resourceId: id,
|
||||
resourceName: accessCheck.knowledgeBase.name,
|
||||
description: `Deleted knowledge base "${accessCheck.knowledgeBase.name || id}"`,
|
||||
request: _request,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: { message: 'Knowledge base deleted successfully' },
|
||||
|
||||
@@ -1,68 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { knowledgeConnector } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull, lte } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { verifyCronAuth } from '@/lib/auth/internal'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('ConnectorSyncSchedulerAPI')
|
||||
|
||||
/**
|
||||
* Cron endpoint that checks for connectors due for sync and dispatches sync jobs.
|
||||
* Should be called every 5 minutes by an external cron service.
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
logger.info(`[${requestId}] Connector sync scheduler triggered`)
|
||||
|
||||
const authError = verifyCronAuth(request, 'Connector sync scheduler')
|
||||
if (authError) {
|
||||
return authError
|
||||
}
|
||||
|
||||
try {
|
||||
const now = new Date()
|
||||
|
||||
const dueConnectors = await db
|
||||
.select({
|
||||
id: knowledgeConnector.id,
|
||||
})
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.status, 'active'),
|
||||
lte(knowledgeConnector.nextSyncAt, now),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
|
||||
logger.info(`[${requestId}] Found ${dueConnectors.length} connectors due for sync`)
|
||||
|
||||
if (dueConnectors.length === 0) {
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'No connectors due for sync',
|
||||
count: 0,
|
||||
})
|
||||
}
|
||||
|
||||
for (const connector of dueConnectors) {
|
||||
dispatchSync(connector.id, { requestId }).catch((error) => {
|
||||
logger.error(`[${requestId}] Failed to dispatch sync for connector ${connector.id}`, error)
|
||||
})
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: `Dispatched ${dueConnectors.length} connector sync(s)`,
|
||||
count: dueConnectors.length,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Connector sync scheduler error`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,7 @@
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import {
|
||||
auditMock,
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockConsoleLogger,
|
||||
@@ -16,6 +17,8 @@ mockKnowledgeSchemas()
|
||||
mockDrizzleOrm()
|
||||
mockConsoleLogger()
|
||||
|
||||
vi.mock('@/lib/audit/log', () => auditMock)
|
||||
|
||||
vi.mock('@/lib/workspaces/permissions/utils', () => ({
|
||||
getUserEntityPermissions: vi.fn().mockResolvedValue('admin'),
|
||||
}))
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
@@ -109,6 +110,20 @@ export async function POST(req: NextRequest) {
|
||||
`[${requestId}] Knowledge base created: ${newKnowledgeBase.id} for user ${session.user.id}`
|
||||
)
|
||||
|
||||
recordAudit({
|
||||
workspaceId: validatedData.workspaceId,
|
||||
actorId: session.user.id,
|
||||
actorName: session.user.name,
|
||||
actorEmail: session.user.email,
|
||||
action: AuditAction.KNOWLEDGE_BASE_CREATED,
|
||||
resourceType: AuditResourceType.KNOWLEDGE_BASE,
|
||||
resourceId: newKnowledgeBase.id,
|
||||
resourceName: validatedData.name,
|
||||
description: `Created knowledge base "${validatedData.name}"`,
|
||||
metadata: { name: validatedData.name },
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: newKnowledgeBase,
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
createMockRequest,
|
||||
mockConsoleLogger,
|
||||
mockKnowledgeSchemas,
|
||||
requestUtilsMock,
|
||||
} from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
@@ -29,9 +30,7 @@ mockKnowledgeSchemas()
|
||||
|
||||
vi.mock('@/lib/core/config/env', () => createEnvMock({ OPENAI_API_KEY: 'test-api-key' }))
|
||||
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: vi.fn(() => 'test-request-id'),
|
||||
}))
|
||||
vi.mock('@/lib/core/utils/request', () => requestUtilsMock)
|
||||
|
||||
vi.mock('@/lib/documents/utils', () => ({
|
||||
retryWithExponentialBackoff: vi.fn().mockImplementation((fn) => fn()),
|
||||
|
||||
@@ -99,7 +99,7 @@ export interface EmbeddingData {
|
||||
|
||||
export interface KnowledgeBaseAccessResult {
|
||||
hasAccess: true
|
||||
knowledgeBase: Pick<KnowledgeBaseData, 'id' | 'userId' | 'workspaceId'>
|
||||
knowledgeBase: Pick<KnowledgeBaseData, 'id' | 'userId' | 'workspaceId' | 'name'>
|
||||
}
|
||||
|
||||
export interface KnowledgeBaseAccessDenied {
|
||||
@@ -113,7 +113,7 @@ export type KnowledgeBaseAccessCheck = KnowledgeBaseAccessResult | KnowledgeBase
|
||||
export interface DocumentAccessResult {
|
||||
hasAccess: true
|
||||
document: DocumentData
|
||||
knowledgeBase: Pick<KnowledgeBaseData, 'id' | 'userId' | 'workspaceId'>
|
||||
knowledgeBase: Pick<KnowledgeBaseData, 'id' | 'userId' | 'workspaceId' | 'name'>
|
||||
}
|
||||
|
||||
export interface DocumentAccessDenied {
|
||||
@@ -128,7 +128,7 @@ export interface ChunkAccessResult {
|
||||
hasAccess: true
|
||||
chunk: EmbeddingData
|
||||
document: DocumentData
|
||||
knowledgeBase: Pick<KnowledgeBaseData, 'id' | 'userId' | 'workspaceId'>
|
||||
knowledgeBase: Pick<KnowledgeBaseData, 'id' | 'userId' | 'workspaceId' | 'name'>
|
||||
}
|
||||
|
||||
export interface ChunkAccessDenied {
|
||||
@@ -151,6 +151,7 @@ export async function checkKnowledgeBaseAccess(
|
||||
id: knowledgeBase.id,
|
||||
userId: knowledgeBase.userId,
|
||||
workspaceId: knowledgeBase.workspaceId,
|
||||
name: knowledgeBase.name,
|
||||
})
|
||||
.from(knowledgeBase)
|
||||
.where(and(eq(knowledgeBase.id, knowledgeBaseId), isNull(knowledgeBase.deletedAt)))
|
||||
@@ -162,17 +163,18 @@ export async function checkKnowledgeBaseAccess(
|
||||
|
||||
const kbData = kb[0]
|
||||
|
||||
// Case 1: User owns the knowledge base directly
|
||||
if (kbData.userId === userId) {
|
||||
return { hasAccess: true, knowledgeBase: kbData }
|
||||
}
|
||||
|
||||
// Case 2: Knowledge base belongs to a workspace the user has permissions for
|
||||
if (kbData.workspaceId) {
|
||||
// Workspace KB: use workspace permissions only
|
||||
const userPermission = await getUserEntityPermissions(userId, 'workspace', kbData.workspaceId)
|
||||
if (userPermission !== null) {
|
||||
return { hasAccess: true, knowledgeBase: kbData }
|
||||
}
|
||||
return { hasAccess: false }
|
||||
}
|
||||
|
||||
// Legacy non-workspace KB: allow owner access
|
||||
if (kbData.userId === userId) {
|
||||
return { hasAccess: true, knowledgeBase: kbData }
|
||||
}
|
||||
|
||||
return { hasAccess: false }
|
||||
@@ -181,8 +183,8 @@ export async function checkKnowledgeBaseAccess(
|
||||
/**
|
||||
* Check if a user has write access to a knowledge base
|
||||
* Write access is granted if:
|
||||
* 1. User owns the knowledge base directly, OR
|
||||
* 2. User has write or admin permissions on the knowledge base's workspace
|
||||
* 1. KB has a workspace: user has write or admin permissions on that workspace
|
||||
* 2. KB has no workspace (legacy): user owns the KB directly
|
||||
*/
|
||||
export async function checkKnowledgeBaseWriteAccess(
|
||||
knowledgeBaseId: string,
|
||||
@@ -193,6 +195,7 @@ export async function checkKnowledgeBaseWriteAccess(
|
||||
id: knowledgeBase.id,
|
||||
userId: knowledgeBase.userId,
|
||||
workspaceId: knowledgeBase.workspaceId,
|
||||
name: knowledgeBase.name,
|
||||
})
|
||||
.from(knowledgeBase)
|
||||
.where(and(eq(knowledgeBase.id, knowledgeBaseId), isNull(knowledgeBase.deletedAt)))
|
||||
@@ -204,17 +207,18 @@ export async function checkKnowledgeBaseWriteAccess(
|
||||
|
||||
const kbData = kb[0]
|
||||
|
||||
// Case 1: User owns the knowledge base directly
|
||||
if (kbData.userId === userId) {
|
||||
return { hasAccess: true, knowledgeBase: kbData }
|
||||
}
|
||||
|
||||
// Case 2: Knowledge base belongs to a workspace and user has write/admin permissions
|
||||
if (kbData.workspaceId) {
|
||||
// Workspace KB: use workspace permissions only
|
||||
const userPermission = await getUserEntityPermissions(userId, 'workspace', kbData.workspaceId)
|
||||
if (userPermission === 'write' || userPermission === 'admin') {
|
||||
return { hasAccess: true, knowledgeBase: kbData }
|
||||
}
|
||||
return { hasAccess: false }
|
||||
}
|
||||
|
||||
// Legacy non-workspace KB: allow owner access
|
||||
if (kbData.userId === userId) {
|
||||
return { hasAccess: true, knowledgeBase: kbData }
|
||||
}
|
||||
|
||||
return { hasAccess: false }
|
||||
|
||||
@@ -3,10 +3,11 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { mockHybridAuth } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
const mockCheckHybridAuth = vi.fn()
|
||||
let mockCheckHybridAuth: ReturnType<typeof vi.fn>
|
||||
const mockGetUserEntityPermissions = vi.fn()
|
||||
const mockGenerateInternalToken = vi.fn()
|
||||
const mockDbSelect = vi.fn()
|
||||
@@ -61,9 +62,7 @@ describe('MCP Serve Route', () => {
|
||||
isDeployed: 'isDeployed',
|
||||
},
|
||||
}))
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkHybridAuth: mockCheckHybridAuth,
|
||||
}))
|
||||
;({ mockCheckHybridAuth } = mockHybridAuth())
|
||||
vi.doMock('@/lib/workspaces/permissions/utils', () => ({
|
||||
getUserEntityPermissions: mockGetUserEntityPermissions,
|
||||
}))
|
||||
|
||||
@@ -3,6 +3,8 @@ import { mcpServers } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { McpDomainNotAllowedError, validateMcpDomain } from '@/lib/mcp/domain-check'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { mcpService } from '@/lib/mcp/service'
|
||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||
@@ -15,7 +17,11 @@ export const dynamic = 'force-dynamic'
|
||||
* PATCH - Update an MCP server in the workspace (requires write or admin permission)
|
||||
*/
|
||||
export const PATCH = withMcpAuth<{ id: string }>('write')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
|
||||
async (
|
||||
request: NextRequest,
|
||||
{ userId, userName, userEmail, workspaceId, requestId },
|
||||
{ params }
|
||||
) => {
|
||||
const { id: serverId } = await params
|
||||
|
||||
try {
|
||||
@@ -29,6 +35,17 @@ export const PATCH = withMcpAuth<{ id: string }>('write')(
|
||||
// Remove workspaceId from body to prevent it from being updated
|
||||
const { workspaceId: _, ...updateData } = body
|
||||
|
||||
if (updateData.url) {
|
||||
try {
|
||||
validateMcpDomain(updateData.url)
|
||||
} catch (e) {
|
||||
if (e instanceof McpDomainNotAllowedError) {
|
||||
return createMcpErrorResponse(e, e.message, 403)
|
||||
}
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
// Get the current server to check if URL is changing
|
||||
const [currentServer] = await db
|
||||
.select({ url: mcpServers.url })
|
||||
@@ -73,6 +90,20 @@ export const PATCH = withMcpAuth<{ id: string }>('write')(
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully updated MCP server: ${serverId}`)
|
||||
|
||||
recordAudit({
|
||||
workspaceId,
|
||||
actorId: userId,
|
||||
actorName: userName,
|
||||
actorEmail: userEmail,
|
||||
action: AuditAction.MCP_SERVER_UPDATED,
|
||||
resourceType: AuditResourceType.MCP_SERVER,
|
||||
resourceId: serverId,
|
||||
resourceName: updatedServer.name || serverId,
|
||||
description: `Updated MCP server "${updatedServer.name || serverId}"`,
|
||||
request,
|
||||
})
|
||||
|
||||
return createMcpSuccessResponse({ server: updatedServer })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error updating MCP server:`, error)
|
||||
|
||||
@@ -3,6 +3,8 @@ import { mcpServers } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { McpDomainNotAllowedError, validateMcpDomain } from '@/lib/mcp/domain-check'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { mcpService } from '@/lib/mcp/service'
|
||||
import {
|
||||
@@ -54,7 +56,7 @@ export const GET = withMcpAuth('read')(
|
||||
* it will be updated instead of creating a duplicate.
|
||||
*/
|
||||
export const POST = withMcpAuth('write')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }) => {
|
||||
async (request: NextRequest, { userId, userName, userEmail, workspaceId, requestId }) => {
|
||||
try {
|
||||
const body = getParsedBody(request) || (await request.json())
|
||||
|
||||
@@ -72,6 +74,15 @@ export const POST = withMcpAuth('write')(
|
||||
)
|
||||
}
|
||||
|
||||
try {
|
||||
validateMcpDomain(body.url)
|
||||
} catch (e) {
|
||||
if (e instanceof McpDomainNotAllowedError) {
|
||||
return createMcpErrorResponse(e, e.message, 403)
|
||||
}
|
||||
throw e
|
||||
}
|
||||
|
||||
const serverId = body.url ? generateMcpServerId(workspaceId, body.url) : crypto.randomUUID()
|
||||
|
||||
const [existingServer] = await db
|
||||
@@ -151,6 +162,20 @@ export const POST = withMcpAuth('write')(
|
||||
// Silently fail
|
||||
}
|
||||
|
||||
recordAudit({
|
||||
workspaceId,
|
||||
actorId: userId,
|
||||
actorName: userName,
|
||||
actorEmail: userEmail,
|
||||
action: AuditAction.MCP_SERVER_ADDED,
|
||||
resourceType: AuditResourceType.MCP_SERVER,
|
||||
resourceId: serverId,
|
||||
resourceName: body.name,
|
||||
description: `Added MCP server "${body.name}"`,
|
||||
metadata: { serverName: body.name, transport: body.transport },
|
||||
request,
|
||||
})
|
||||
|
||||
return createMcpSuccessResponse({ serverId }, 201)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error registering MCP server:`, error)
|
||||
@@ -167,7 +192,7 @@ export const POST = withMcpAuth('write')(
|
||||
* DELETE - Delete an MCP server from the workspace (requires admin permission)
|
||||
*/
|
||||
export const DELETE = withMcpAuth('admin')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }) => {
|
||||
async (request: NextRequest, { userId, userName, userEmail, workspaceId, requestId }) => {
|
||||
try {
|
||||
const { searchParams } = new URL(request.url)
|
||||
const serverId = searchParams.get('serverId')
|
||||
@@ -198,6 +223,20 @@ export const DELETE = withMcpAuth('admin')(
|
||||
await mcpService.clearCache(workspaceId)
|
||||
|
||||
logger.info(`[${requestId}] Successfully deleted MCP server: ${serverId}`)
|
||||
|
||||
recordAudit({
|
||||
workspaceId,
|
||||
actorId: userId,
|
||||
actorName: userName,
|
||||
actorEmail: userEmail,
|
||||
action: AuditAction.MCP_SERVER_REMOVED,
|
||||
resourceType: AuditResourceType.MCP_SERVER,
|
||||
resourceId: serverId!,
|
||||
resourceName: deletedServer.name,
|
||||
description: `Removed MCP server "${deletedServer.name}"`,
|
||||
request,
|
||||
})
|
||||
|
||||
return createMcpSuccessResponse({ message: `Server ${serverId} deleted successfully` })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error deleting MCP server:`, error)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { McpClient } from '@/lib/mcp/client'
|
||||
import { McpDomainNotAllowedError, validateMcpDomain } from '@/lib/mcp/domain-check'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { resolveMcpConfigEnvVars } from '@/lib/mcp/resolve-config'
|
||||
import type { McpTransport } from '@/lib/mcp/types'
|
||||
@@ -71,6 +72,15 @@ export const POST = withMcpAuth('write')(
|
||||
)
|
||||
}
|
||||
|
||||
try {
|
||||
validateMcpDomain(body.url)
|
||||
} catch (e) {
|
||||
if (e instanceof McpDomainNotAllowedError) {
|
||||
return createMcpErrorResponse(e, e.message, 403)
|
||||
}
|
||||
throw e
|
||||
}
|
||||
|
||||
// Build initial config for resolution
|
||||
const initialConfig = {
|
||||
id: `test-${requestId}`,
|
||||
@@ -95,6 +105,16 @@ export const POST = withMcpAuth('write')(
|
||||
logger.warn(`[${requestId}] Some environment variables not found:`, { missingVars })
|
||||
}
|
||||
|
||||
// Re-validate domain after env var resolution
|
||||
try {
|
||||
validateMcpDomain(testConfig.url)
|
||||
} catch (e) {
|
||||
if (e instanceof McpDomainNotAllowedError) {
|
||||
return createMcpErrorResponse(e, e.message, 403)
|
||||
}
|
||||
throw e
|
||||
}
|
||||
|
||||
const testSecurityPolicy = {
|
||||
requireConsent: false,
|
||||
auditLevel: 'none' as const,
|
||||
|
||||
@@ -3,6 +3,7 @@ import { workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { mcpPubSub } from '@/lib/mcp/pubsub'
|
||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||
@@ -71,7 +72,11 @@ export const GET = withMcpAuth<RouteParams>('read')(
|
||||
* PATCH - Update a workflow MCP server
|
||||
*/
|
||||
export const PATCH = withMcpAuth<RouteParams>('write')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
|
||||
async (
|
||||
request: NextRequest,
|
||||
{ userId, userName, userEmail, workspaceId, requestId },
|
||||
{ params }
|
||||
) => {
|
||||
try {
|
||||
const { id: serverId } = await params
|
||||
const body = getParsedBody(request) || (await request.json())
|
||||
@@ -112,6 +117,19 @@ export const PATCH = withMcpAuth<RouteParams>('write')(
|
||||
|
||||
logger.info(`[${requestId}] Successfully updated workflow MCP server: ${serverId}`)
|
||||
|
||||
recordAudit({
|
||||
workspaceId,
|
||||
actorId: userId,
|
||||
actorName: userName,
|
||||
actorEmail: userEmail,
|
||||
action: AuditAction.MCP_SERVER_UPDATED,
|
||||
resourceType: AuditResourceType.MCP_SERVER,
|
||||
resourceId: serverId,
|
||||
resourceName: updatedServer.name,
|
||||
description: `Updated workflow MCP server "${updatedServer.name}"`,
|
||||
request,
|
||||
})
|
||||
|
||||
return createMcpSuccessResponse({ server: updatedServer })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error updating workflow MCP server:`, error)
|
||||
@@ -128,7 +146,11 @@ export const PATCH = withMcpAuth<RouteParams>('write')(
|
||||
* DELETE - Delete a workflow MCP server and all its tools
|
||||
*/
|
||||
export const DELETE = withMcpAuth<RouteParams>('admin')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
|
||||
async (
|
||||
request: NextRequest,
|
||||
{ userId, userName, userEmail, workspaceId, requestId },
|
||||
{ params }
|
||||
) => {
|
||||
try {
|
||||
const { id: serverId } = await params
|
||||
|
||||
@@ -149,6 +171,19 @@ export const DELETE = withMcpAuth<RouteParams>('admin')(
|
||||
|
||||
mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId })
|
||||
|
||||
recordAudit({
|
||||
workspaceId,
|
||||
actorId: userId,
|
||||
actorName: userName,
|
||||
actorEmail: userEmail,
|
||||
action: AuditAction.MCP_SERVER_REMOVED,
|
||||
resourceType: AuditResourceType.MCP_SERVER,
|
||||
resourceId: serverId,
|
||||
resourceName: deletedServer.name,
|
||||
description: `Unpublished workflow MCP server "${deletedServer.name}"`,
|
||||
request,
|
||||
})
|
||||
|
||||
return createMcpSuccessResponse({ message: `Server ${serverId} deleted successfully` })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error deleting workflow MCP server:`, error)
|
||||
|
||||
@@ -3,6 +3,7 @@ import { workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { mcpPubSub } from '@/lib/mcp/pubsub'
|
||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||
@@ -65,7 +66,11 @@ export const GET = withMcpAuth<RouteParams>('read')(
|
||||
* PATCH - Update a tool's configuration
|
||||
*/
|
||||
export const PATCH = withMcpAuth<RouteParams>('write')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
|
||||
async (
|
||||
request: NextRequest,
|
||||
{ userId, userName, userEmail, workspaceId, requestId },
|
||||
{ params }
|
||||
) => {
|
||||
try {
|
||||
const { id: serverId, toolId } = await params
|
||||
const body = getParsedBody(request) || (await request.json())
|
||||
@@ -118,6 +123,19 @@ export const PATCH = withMcpAuth<RouteParams>('write')(
|
||||
|
||||
mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId })
|
||||
|
||||
recordAudit({
|
||||
workspaceId,
|
||||
actorId: userId,
|
||||
actorName: userName,
|
||||
actorEmail: userEmail,
|
||||
action: AuditAction.MCP_SERVER_UPDATED,
|
||||
resourceType: AuditResourceType.MCP_SERVER,
|
||||
resourceId: serverId,
|
||||
description: `Updated tool "${updatedTool.toolName}" in MCP server`,
|
||||
metadata: { toolId, toolName: updatedTool.toolName },
|
||||
request,
|
||||
})
|
||||
|
||||
return createMcpSuccessResponse({ tool: updatedTool })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error updating tool:`, error)
|
||||
@@ -134,7 +152,11 @@ export const PATCH = withMcpAuth<RouteParams>('write')(
|
||||
* DELETE - Remove a tool from an MCP server
|
||||
*/
|
||||
export const DELETE = withMcpAuth<RouteParams>('write')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
|
||||
async (
|
||||
request: NextRequest,
|
||||
{ userId, userName, userEmail, workspaceId, requestId },
|
||||
{ params }
|
||||
) => {
|
||||
try {
|
||||
const { id: serverId, toolId } = await params
|
||||
|
||||
@@ -165,6 +187,19 @@ export const DELETE = withMcpAuth<RouteParams>('write')(
|
||||
|
||||
mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId })
|
||||
|
||||
recordAudit({
|
||||
workspaceId,
|
||||
actorId: userId,
|
||||
actorName: userName,
|
||||
actorEmail: userEmail,
|
||||
action: AuditAction.MCP_SERVER_UPDATED,
|
||||
resourceType: AuditResourceType.MCP_SERVER,
|
||||
resourceId: serverId,
|
||||
description: `Removed tool "${deletedTool.toolName}" from MCP server`,
|
||||
metadata: { toolId, toolName: deletedTool.toolName },
|
||||
request,
|
||||
})
|
||||
|
||||
return createMcpSuccessResponse({ message: `Tool ${toolId} deleted successfully` })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error deleting tool:`, error)
|
||||
|
||||
@@ -3,6 +3,7 @@ import { workflow, workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { mcpPubSub } from '@/lib/mcp/pubsub'
|
||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||
@@ -76,7 +77,11 @@ export const GET = withMcpAuth<RouteParams>('read')(
|
||||
* POST - Add a workflow as a tool to an MCP server
|
||||
*/
|
||||
export const POST = withMcpAuth<RouteParams>('write')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
|
||||
async (
|
||||
request: NextRequest,
|
||||
{ userId, userName, userEmail, workspaceId, requestId },
|
||||
{ params }
|
||||
) => {
|
||||
try {
|
||||
const { id: serverId } = await params
|
||||
const body = getParsedBody(request) || (await request.json())
|
||||
@@ -197,6 +202,19 @@ export const POST = withMcpAuth<RouteParams>('write')(
|
||||
|
||||
mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId })
|
||||
|
||||
recordAudit({
|
||||
workspaceId,
|
||||
actorId: userId,
|
||||
actorName: userName,
|
||||
actorEmail: userEmail,
|
||||
action: AuditAction.MCP_SERVER_UPDATED,
|
||||
resourceType: AuditResourceType.MCP_SERVER,
|
||||
resourceId: serverId,
|
||||
description: `Added tool "${toolName}" to MCP server`,
|
||||
metadata: { toolId, toolName, workflowId: body.workflowId },
|
||||
request,
|
||||
})
|
||||
|
||||
return createMcpSuccessResponse({ tool }, 201)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error adding tool:`, error)
|
||||
|
||||
@@ -3,6 +3,7 @@ import { workflow, workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq, inArray, sql } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { mcpPubSub } from '@/lib/mcp/pubsub'
|
||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||
@@ -85,7 +86,7 @@ export const GET = withMcpAuth('read')(
|
||||
* POST - Create a new workflow MCP server
|
||||
*/
|
||||
export const POST = withMcpAuth('write')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }) => {
|
||||
async (request: NextRequest, { userId, userName, userEmail, workspaceId, requestId }) => {
|
||||
try {
|
||||
const body = getParsedBody(request) || (await request.json())
|
||||
|
||||
@@ -188,6 +189,19 @@ export const POST = withMcpAuth('write')(
|
||||
`[${requestId}] Successfully created workflow MCP server: ${body.name} (ID: ${serverId})`
|
||||
)
|
||||
|
||||
recordAudit({
|
||||
workspaceId,
|
||||
actorId: userId,
|
||||
actorName: userName,
|
||||
actorEmail: userEmail,
|
||||
action: AuditAction.MCP_SERVER_ADDED,
|
||||
resourceType: AuditResourceType.MCP_SERVER,
|
||||
resourceId: serverId,
|
||||
resourceName: body.name.trim(),
|
||||
description: `Published workflow MCP server "${body.name.trim()}" with ${addedTools.length} tool(s)`,
|
||||
request,
|
||||
})
|
||||
|
||||
return createMcpSuccessResponse({ server, addedTools }, 201)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error creating workflow MCP server:`, error)
|
||||
|
||||
@@ -18,6 +18,7 @@ import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getEmailSubject, renderInvitationEmail } from '@/components/emails'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasAccessControlAccess } from '@/lib/billing'
|
||||
import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage'
|
||||
@@ -552,6 +553,25 @@ export async function PUT(
|
||||
email: orgInvitation.email,
|
||||
})
|
||||
|
||||
const auditActionMap = {
|
||||
accepted: AuditAction.ORG_INVITATION_ACCEPTED,
|
||||
rejected: AuditAction.ORG_INVITATION_REJECTED,
|
||||
cancelled: AuditAction.ORG_INVITATION_CANCELLED,
|
||||
} as const
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: auditActionMap[status],
|
||||
resourceType: AuditResourceType.ORGANIZATION,
|
||||
resourceId: organizationId,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
description: `Organization invitation ${status} for ${orgInvitation.email}`,
|
||||
metadata: { invitationId, email: orgInvitation.email, status },
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: `Invitation ${status} successfully`,
|
||||
|
||||
@@ -17,6 +17,7 @@ import {
|
||||
renderBatchInvitationEmail,
|
||||
renderInvitationEmail,
|
||||
} from '@/components/emails'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import {
|
||||
validateBulkInvitations,
|
||||
@@ -411,6 +412,22 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
workspaceInvitationCount: workspaceInvitationIds.length,
|
||||
})
|
||||
|
||||
for (const inv of invitationsToCreate) {
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.ORG_INVITATION_CREATED,
|
||||
resourceType: AuditResourceType.ORGANIZATION,
|
||||
resourceId: organizationId,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
resourceName: organizationEntry[0]?.name,
|
||||
description: `Invited ${inv.email} to organization as ${role}`,
|
||||
metadata: { invitationId: inv.id, email: inv.email, role },
|
||||
request,
|
||||
})
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: `${invitationsToCreate.length} invitation(s) sent successfully`,
|
||||
@@ -532,6 +549,19 @@ export async function DELETE(
|
||||
email: result[0].email,
|
||||
})
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.ORG_INVITATION_REVOKED,
|
||||
resourceType: AuditResourceType.ORGANIZATION,
|
||||
resourceId: organizationId,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
description: `Revoked organization invitation for ${result[0].email}`,
|
||||
metadata: { invitationId, email: result[0].email },
|
||||
request,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'Invitation cancelled successfully',
|
||||
|
||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getUserUsageData } from '@/lib/billing/core/usage'
|
||||
import { removeUserFromOrganization } from '@/lib/billing/organizations/membership'
|
||||
@@ -213,6 +214,19 @@ export async function PUT(
|
||||
updatedBy: session.user.id,
|
||||
})
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.ORG_MEMBER_ROLE_CHANGED,
|
||||
resourceType: AuditResourceType.ORGANIZATION,
|
||||
resourceId: organizationId,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
description: `Changed role for member ${memberId} to ${role}`,
|
||||
metadata: { targetUserId: memberId, newRole: role },
|
||||
request,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'Member role updated successfully',
|
||||
@@ -305,6 +319,22 @@ export async function DELETE(
|
||||
billingActions: result.billingActions,
|
||||
})
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.ORG_MEMBER_REMOVED,
|
||||
resourceType: AuditResourceType.ORGANIZATION,
|
||||
resourceId: organizationId,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
description:
|
||||
session.user.id === targetUserId
|
||||
? 'Left the organization'
|
||||
: `Removed member ${targetUserId} from organization`,
|
||||
metadata: { targetUserId, wasSelfRemoval: session.user.id === targetUserId },
|
||||
request,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message:
|
||||
|
||||
@@ -5,6 +5,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getEmailSubject, renderInvitationEmail } from '@/components/emails'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getUserUsageData } from '@/lib/billing/core/usage'
|
||||
import { validateSeatAvailability } from '@/lib/billing/validation/seat-management'
|
||||
@@ -285,6 +286,19 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
// Don't fail the request if email fails
|
||||
}
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.ORG_INVITATION_CREATED,
|
||||
resourceType: AuditResourceType.ORGANIZATION,
|
||||
resourceId: organizationId,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
description: `Invited ${normalizedEmail} to organization as ${role}`,
|
||||
metadata: { invitationId, email: normalizedEmail, role },
|
||||
request,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: `Invitation sent to ${normalizedEmail}`,
|
||||
|
||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, eq, ne } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import {
|
||||
getOrganizationSeatAnalytics,
|
||||
@@ -192,6 +193,20 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
changes: { name, slug, logo },
|
||||
})
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.ORGANIZATION_UPDATED,
|
||||
resourceType: AuditResourceType.ORGANIZATION,
|
||||
resourceId: organizationId,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
resourceName: updatedOrg[0].name,
|
||||
description: `Updated organization settings`,
|
||||
metadata: { changes: { name, slug, logo } },
|
||||
request,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'Organization updated successfully',
|
||||
|
||||
@@ -3,6 +3,7 @@ import { member, organization } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, or } from 'drizzle-orm'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createOrganizationForTeamPlan } from '@/lib/billing/organization'
|
||||
|
||||
@@ -115,6 +116,19 @@ export async function POST(request: Request) {
|
||||
organizationId,
|
||||
})
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: user.id,
|
||||
action: AuditAction.ORGANIZATION_CREATED,
|
||||
resourceType: AuditResourceType.ORGANIZATION,
|
||||
resourceId: organizationId,
|
||||
actorName: user.name ?? undefined,
|
||||
actorEmail: user.email ?? undefined,
|
||||
resourceName: organizationName ?? undefined,
|
||||
description: `Created organization "${organizationName}"`,
|
||||
request,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
organizationId,
|
||||
|
||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasAccessControlAccess } from '@/lib/billing'
|
||||
|
||||
@@ -13,6 +14,7 @@ async function getPermissionGroupWithAccess(groupId: string, userId: string) {
|
||||
const [group] = await db
|
||||
.select({
|
||||
id: permissionGroup.id,
|
||||
name: permissionGroup.name,
|
||||
organizationId: permissionGroup.organizationId,
|
||||
})
|
||||
.from(permissionGroup)
|
||||
@@ -151,6 +153,20 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
assignedBy: session.user.id,
|
||||
})
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.PERMISSION_GROUP_MEMBER_ADDED,
|
||||
resourceType: AuditResourceType.PERMISSION_GROUP,
|
||||
resourceId: id,
|
||||
resourceName: result.group.name,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
description: `Added member ${userId} to permission group "${result.group.name}"`,
|
||||
metadata: { targetUserId: userId, permissionGroupId: id },
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({ member: newMember }, { status: 201 })
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
@@ -221,6 +237,20 @@ export async function DELETE(req: NextRequest, { params }: { params: Promise<{ i
|
||||
userId: session.user.id,
|
||||
})
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.PERMISSION_GROUP_MEMBER_REMOVED,
|
||||
resourceType: AuditResourceType.PERMISSION_GROUP,
|
||||
resourceId: id,
|
||||
resourceName: result.group.name,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
description: `Removed member ${memberToRemove.userId} from permission group "${result.group.name}"`,
|
||||
metadata: { targetUserId: memberToRemove.userId, memberId, permissionGroupId: id },
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
logger.error('Error removing member from permission group', error)
|
||||
|
||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasAccessControlAccess } from '@/lib/billing'
|
||||
import {
|
||||
@@ -18,6 +19,7 @@ const configSchema = z.object({
|
||||
allowedModelProviders: z.array(z.string()).nullable().optional(),
|
||||
hideTraceSpans: z.boolean().optional(),
|
||||
hideKnowledgeBaseTab: z.boolean().optional(),
|
||||
hideTablesTab: z.boolean().optional(),
|
||||
hideCopilot: z.boolean().optional(),
|
||||
hideApiKeysTab: z.boolean().optional(),
|
||||
hideEnvironmentTab: z.boolean().optional(),
|
||||
@@ -181,6 +183,19 @@ export async function PUT(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
.where(eq(permissionGroup.id, id))
|
||||
.limit(1)
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.PERMISSION_GROUP_UPDATED,
|
||||
resourceType: AuditResourceType.PERMISSION_GROUP,
|
||||
resourceId: id,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
resourceName: updated.name,
|
||||
description: `Updated permission group "${updated.name}"`,
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
permissionGroup: {
|
||||
...updated,
|
||||
@@ -229,6 +244,19 @@ export async function DELETE(req: NextRequest, { params }: { params: Promise<{ i
|
||||
|
||||
logger.info('Deleted permission group', { permissionGroupId: id, userId: session.user.id })
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.PERMISSION_GROUP_DELETED,
|
||||
resourceType: AuditResourceType.PERMISSION_GROUP,
|
||||
resourceId: id,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
resourceName: result.group.name,
|
||||
description: `Deleted permission group "${result.group.name}"`,
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
logger.error('Error deleting permission group', error)
|
||||
|
||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, count, desc, eq } from 'drizzle-orm'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasAccessControlAccess } from '@/lib/billing'
|
||||
import {
|
||||
@@ -19,6 +20,7 @@ const configSchema = z.object({
|
||||
allowedModelProviders: z.array(z.string()).nullable().optional(),
|
||||
hideTraceSpans: z.boolean().optional(),
|
||||
hideKnowledgeBaseTab: z.boolean().optional(),
|
||||
hideTablesTab: z.boolean().optional(),
|
||||
hideCopilot: z.boolean().optional(),
|
||||
hideApiKeysTab: z.boolean().optional(),
|
||||
hideEnvironmentTab: z.boolean().optional(),
|
||||
@@ -198,6 +200,19 @@ export async function POST(req: Request) {
|
||||
userId: session.user.id,
|
||||
})
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.PERMISSION_GROUP_CREATED,
|
||||
resourceType: AuditResourceType.PERMISSION_GROUP,
|
||||
resourceId: newGroup.id,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
resourceName: name,
|
||||
description: `Created permission group "${name}"`,
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({ permissionGroup: newGroup }, { status: 201 })
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { databaseMock, loggerMock } from '@sim/testing'
|
||||
import { auditMock, databaseMock, loggerMock, requestUtilsMock } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
@@ -31,12 +31,12 @@ vi.mock('drizzle-orm', () => ({
|
||||
eq: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: () => 'test-request-id',
|
||||
}))
|
||||
vi.mock('@/lib/core/utils/request', () => requestUtilsMock)
|
||||
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
vi.mock('@/lib/audit/log', () => auditMock)
|
||||
|
||||
import { PUT } from './route'
|
||||
|
||||
function createRequest(body: Record<string, unknown>): NextRequest {
|
||||
|
||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { validateCronExpression } from '@/lib/workflows/schedules/utils'
|
||||
@@ -106,6 +107,18 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
|
||||
logger.info(`[${requestId}] Reactivated schedule: ${scheduleId}`)
|
||||
|
||||
recordAudit({
|
||||
workspaceId: authorization.workflow.workspaceId ?? null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.SCHEDULE_UPDATED,
|
||||
resourceType: AuditResourceType.SCHEDULE,
|
||||
resourceId: scheduleId,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
description: `Reactivated schedule for workflow ${schedule.workflowId}`,
|
||||
request,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
message: 'Schedule activated successfully',
|
||||
nextRunAt,
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { databaseMock, loggerMock } from '@sim/testing'
|
||||
import { databaseMock, loggerMock, requestUtilsMock } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
@@ -43,9 +43,7 @@ vi.mock('drizzle-orm', () => ({
|
||||
isNull: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: () => 'test-request-id',
|
||||
}))
|
||||
vi.mock('@/lib/core/utils/request', () => requestUtilsMock)
|
||||
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
|
||||
14
apps/sim/app/api/settings/allowed-integrations/route.ts
Normal file
14
apps/sim/app/api/settings/allowed-integrations/route.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getAllowedIntegrationsFromEnv } from '@/lib/core/config/feature-flags'
|
||||
|
||||
export async function GET() {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
allowedIntegrations: getAllowedIntegrationsFromEnv(),
|
||||
})
|
||||
}
|
||||
27
apps/sim/app/api/settings/allowed-mcp-domains/route.ts
Normal file
27
apps/sim/app/api/settings/allowed-mcp-domains/route.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getAllowedMcpDomainsFromEnv } from '@/lib/core/config/feature-flags'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
|
||||
export async function GET() {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const configuredDomains = getAllowedMcpDomainsFromEnv()
|
||||
if (configuredDomains === null) {
|
||||
return NextResponse.json({ allowedMcpDomains: null })
|
||||
}
|
||||
|
||||
try {
|
||||
const platformHostname = new URL(getBaseUrl()).hostname.toLowerCase()
|
||||
if (!configuredDomains.includes(platformHostname)) {
|
||||
return NextResponse.json({
|
||||
allowedMcpDomains: [...configuredDomains, platformHostname],
|
||||
})
|
||||
}
|
||||
} catch {}
|
||||
|
||||
return NextResponse.json({ allowedMcpDomains: configuredDomains })
|
||||
}
|
||||
138
apps/sim/app/api/table/[tableId]/route.ts
Normal file
138
apps/sim/app/api/table/[tableId]/route.ts
Normal file
@@ -0,0 +1,138 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { deleteTable, type TableSchema } from '@/lib/table'
|
||||
import { accessError, checkAccess, normalizeColumn, verifyTableWorkspace } from '../utils'
|
||||
|
||||
const logger = createLogger('TableDetailAPI')
|
||||
|
||||
const GetTableSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
})
|
||||
|
||||
interface TableRouteParams {
|
||||
params: Promise<{ tableId: string }>
|
||||
}
|
||||
|
||||
/** GET /api/table/[tableId] - Retrieves a single table's details. */
|
||||
export async function GET(request: NextRequest, { params }: TableRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized table access attempt`)
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const validated = GetTableSchema.parse({
|
||||
workspaceId: searchParams.get('workspaceId'),
|
||||
})
|
||||
|
||||
const result = await checkAccess(tableId, authResult.userId, 'read')
|
||||
if (!result.ok) return accessError(result, requestId, tableId)
|
||||
|
||||
const { table } = result
|
||||
|
||||
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
|
||||
if (!isValidWorkspace) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Retrieved table ${tableId} for user ${authResult.userId}`)
|
||||
|
||||
const schemaData = table.schema as TableSchema
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
table: {
|
||||
id: table.id,
|
||||
name: table.name,
|
||||
description: table.description,
|
||||
schema: {
|
||||
columns: schemaData.columns.map(normalizeColumn),
|
||||
},
|
||||
rowCount: table.rowCount,
|
||||
maxRows: table.maxRows,
|
||||
createdAt:
|
||||
table.createdAt instanceof Date
|
||||
? table.createdAt.toISOString()
|
||||
: String(table.createdAt),
|
||||
updatedAt:
|
||||
table.updatedAt instanceof Date
|
||||
? table.updatedAt.toISOString()
|
||||
: String(table.updatedAt),
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error getting table:`, error)
|
||||
return NextResponse.json({ error: 'Failed to get table' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/** DELETE /api/table/[tableId] - Deletes a table and all its rows. */
|
||||
export async function DELETE(request: NextRequest, { params }: TableRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized table delete attempt`)
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const validated = GetTableSchema.parse({
|
||||
workspaceId: searchParams.get('workspaceId'),
|
||||
})
|
||||
|
||||
const result = await checkAccess(tableId, authResult.userId, 'write')
|
||||
if (!result.ok) return accessError(result, requestId, tableId)
|
||||
|
||||
const { table } = result
|
||||
|
||||
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
|
||||
if (!isValidWorkspace) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
await deleteTable(tableId, requestId)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
message: 'Table deleted successfully',
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error deleting table:`, error)
|
||||
return NextResponse.json({ error: 'Failed to delete table' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user