Compare commits

..

29 Commits

Author SHA1 Message Date
waleed
3c1c366202 ops 2026-02-18 23:29:53 -08:00
waleed
a42a62005e more 2026-02-18 23:20:39 -08:00
waleed
2466d7ea7e ack comment 2026-02-18 22:58:29 -08:00
waleed
5d5e6f8028 add more tools 2026-02-18 22:44:09 -08:00
waleed
5a3cda5e0c addded desc for dataverse 2026-02-18 20:00:52 -08:00
waleed
c987f25dc4 added advanced fields for vercel and youtube, added cloudflare and dataverse block 2026-02-18 19:59:56 -08:00
waleed
bdcbfd739a style(youtube): mark optional params as advanced mode
Hide pagination, sort order, and filter fields behind the advanced
toggle for a cleaner default UX across all YouTube operations.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-18 19:14:32 -08:00
waleed
0335ffd892 refactor(vercel): mark optional fields as advanced mode
Move optional/power-user fields behind the advanced toggle:
- List Deployments: project filter, target, state
- Create Deployment: project ID override, redeploy from, target
- List Projects: search
- Create/Update Project: framework, build/output/install commands
- Env Vars: variable type
- Webhooks: project IDs filter
- Checks: path, details URL
- Team Members: role filter
- All operations: team ID scope

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-18 19:10:41 -08:00
Waleed
ab48787422 chore(deps): upgrade next.js from 16.1.0-canary.21 to 16.1.6 (#3254) 2026-02-18 16:25:28 -08:00
Waleed
91aa1f9a52 feat(tools): added vercel block & tools (#3252)
* feat(vercel): add complete Vercel integration with 42 API tools

Add Vercel platform management integration covering deployments, projects,
environment variables, domains, DNS records, aliases, edge configs, and
team/user management. All tools use API key authentication with Bearer tokens.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* feat(vercel): add webhook and deployment check tools

Add 8 new Vercel API tools:
- Webhooks: list, create, delete
- Deployment Checks: create, get, list, update, rerequest

Brings total Vercel tools to 50.

* fix(vercel): expand all object and array output definitions

Expand unexpanded output types:
- get_deployment: meta and gitSource objects now have properties
- list_deployment_files: children array now has items definition
- get_team: teamRoles and teamPermissions arrays now have items

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* update icon size, update docs

---------

Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-18 15:54:49 -08:00
Waleed
2979269ac3 fix(sidebar): unify workflow and folder insertion ordering (#3250)
* fix(sidebar): unify workflow and folder insertion ordering

* ack comments

* ack comments

* ack

* ack comment

* upgrade turbo

* fix build
2026-02-18 14:41:55 -08:00
Waleed
cf28822a1c fix(shortlink): remove isHosted guard from redirects, not available at build time on ECS (#3251)
* fix(shortlink): remove isHosted guard from redirects, not available at build time on ECS

* fix(shortlink): use rewrite instead of redirect for Beluga tracking
2026-02-18 14:00:25 -08:00
Waleed
86ca984926 fix(normalization): update allowed integrations checks to be fully lowercase (#3248) 2026-02-18 12:08:03 -08:00
Emir Karabeg
e3964624ac feat(sub): hide usage limits and seats info from enterprise members (non-admin) (#3243)
- Add isEnterpriseMember and canViewUsageInfo flags to subscription permissions
- Hide UsageHeader, CreditBalance, billing date, and usage notifications from enterprise members
- Show only plan name in subscription tab for enterprise members (non-admin)
- Hide usage indicator details (amount, progress pills) from enterprise members
- Team tab already hidden via requiresTeam check in settings modal

Closes #6882

Co-authored-by: Cursor Agent <cursoragent@cursor.com>
Co-authored-by: Emir Karabeg <emir-karabeg@users.noreply.github.com>
2026-02-18 12:01:47 -08:00
Waleed
7c7c0fd955 feat(audit-log): add audit events for templates, billing, credentials, env, deployments, passwords (#3246)
* feat(audit-log): add audit events for templates, billing, credentials, env, deployments, passwords

* improvement(audit-log): add actorName/actorEmail to all recordAudit calls

* fix(audit-log): resolve user for password reset, add CREDENTIAL_SET_INVITATION_RESENT action

* fix(audit-log): add workspaceId to deployment activation audit

* improvement(audit-log): use better-auth callback for password reset audit, remove cast

- Move password reset audit to onPasswordReset callback in auth config
  instead of coupling to better-auth's verification table internals
- Remove ugly double-cast on workflowData.workspaceId in deployment activation

* fix(audit-log): add missing actorName/actorEmail to workflow duplicate

* improvement(audit-log): add resourceName to credential set invitation accept
2026-02-18 11:53:08 -08:00
Waleed
e37b4a926d feat(audit-log): add persistent audit log system with comprehensive route instrumentation (#3242)
* feat(audit-log): add persistent audit log system with comprehensive route instrumentation

* fix(audit-log): address PR review — nullable workspaceId, enum usage, remove redundant queries

- Make audit_log.workspace_id nullable with ON DELETE SET NULL (logs survive workspace/user deletion)
- Make audit_log.actor_id nullable with ON DELETE SET NULL
- Replace all 53 routes' string literal action/resourceType with AuditAction.X and AuditResourceType.X enums
- Fix empty workspaceId ('') → null for OAuth, form, and org routes to avoid FK violations
- Remove redundant DB queries in chat manage route (use checkChatAccess return data)
- Fix organization routes to pass workspaceId: null instead of organizationId

* fix(audit-log): replace remaining workspaceId '' fallbacks with null

* fix(audit-log): credential-set org IDs, workspace deletion FK, actorId fallback, string literal action

* reran migrations

* fix(mcp,audit): tighten env var domain bypass, add post-resolution check, form workspaceId

- Only bypass MCP domain check when env var is in hostname/authority, not path/query
- Add post-resolution validateMcpDomain call in test-connection endpoint
- Match client-side isDomainAllowed to same hostname-only bypass logic
- Return workspaceId from checkFormAccess, use in form audit logs
- Add 49 comprehensive domain-check tests covering all edge cases

* fix(mcp): stateful regex lastIndex bug, RFC 3986 authority parsing

- Remove /g flag from module-level ENV_VAR_PATTERN to avoid lastIndex state
- Create fresh regex instances per call in server-side hasEnvVarInHostname
- Fix authority extraction to terminate at /, ?, or # per RFC 3986
- Prevents bypass via https://evil.com?token={{SECRET}} (no path)
- Add test cases for query-only and fragment-only env var URLs (53 total)

* fix(audit-log): try/catch for never-throw contract, accept null actorName/Email, fix misleading action

- Wrap recordAudit body in try/catch so nanoid() or header extraction can't throw
- Accept string | null for actorName and actorEmail (session.user.name can be null)
- Normalize null -> undefined before insert to match DB column types
- Fix org members route: ORG_MEMBER_ADDED -> ORG_INVITATION_CREATED (sends invite, not adds member)

* improvement(audit-log): add resource names and specific invitation actions

* fix(audit-log): use validated chat record, add mock sync tests
2026-02-18 00:54:52 -08:00
Waleed
11f3a14c02 fix(lock): prevent socket crash when locking agent blocks (#3245) 2026-02-18 00:32:09 -08:00
Emir Karabeg
eab01e0272 fix(copilot): copilot shortcut conflict (#3219)
* fix: prevent copilot keyboard shortcuts from triggering when panel is inactive

The OptionsSelector component was capturing keyboard events (1-9 number keys and Enter)
globally on the document, causing accidental option selections when users were
interacting with other parts of the application.

This fix adds a check to only handle keyboard shortcuts when the copilot panel
is the active tab, preventing the shortcuts from interfering with other workflows.

Co-authored-by: Emir Karabeg <emir-karabeg@users.noreply.github.com>

* lint

---------

Co-authored-by: Cursor Agent <cursoragent@cursor.com>
Co-authored-by: Emir Karabeg <emir-karabeg@users.noreply.github.com>
Co-authored-by: Waleed Latif <walif6@gmail.com>
2026-02-17 18:47:07 -08:00
Waleed
bbcef7ce5c feat(access-control): add ALLOWED_INTEGRATIONS env var for self-hosted block restrictions (#3238)
* feat(access-control): add ALLOWED_INTEGRATIONS env var for self-hosted block restrictions

* fix(tests): add getAllowedIntegrationsFromEnv mock to agent-handler tests

* fix(access-control): add auth to allowlist endpoint, fix loading state race, use accurate error message

* fix(access-control): remove auth from allowed-integrations endpoint to match models endpoint pattern

* fix(access-control): normalize blockType to lowercase before env allowlist check

* fix(access-control): expose merged allowedIntegrations on config to prevent bypass via direct access

* consolidate merging of allowed blocks so all callers have it by default

* normalize to lower case

* added tests

* added tests, normalize to lower case

* added safety incase userId is missing

* fix failing tests
2026-02-17 18:46:24 -08:00
Emir Karabeg
0ee52df5a7 feat(canvas): allow locked block outbound connections (#3229)
* Allow outbound connections from locked blocks to be modified

- Modified isEdgeProtected to only check target block protection
- Outbound connections (from locked blocks) can now be added/removed
- Inbound connections (to locked blocks) remain protected
- Updated notification messages and comments to reflect the change

Co-authored-by: Emir Karabeg <emir-karabeg@users.noreply.github.com>

* update notif msg

---------

Co-authored-by: Cursor Agent <cursoragent@cursor.com>
Co-authored-by: Emir Karabeg <emir-karabeg@users.noreply.github.com>
Co-authored-by: waleed <walif6@gmail.com>
2026-02-17 18:16:17 -08:00
Waleed
6421b1a0ca feat(mcp): add ALLOWED_MCP_DOMAINS env var for domain allowlist (#3240)
* feat(mcp): add ALLOWED_MCP_DOMAINS env var for domain allowlist

* ack PR comments

* cleanup
2026-02-17 18:01:52 -08:00
Waleed
61a5c98717 fix(shortlink): use redirect instead of rewrite for Beluga tracking (#3239) 2026-02-17 16:27:20 -08:00
Waleed
a0afb5d03e feat(pipedrive): added sort order to endpoints that support it, upgraded turborepo (#3237)
* feat(pipedrive): added sort order to endpoints that support it

* upgraded turborepo

* fix
2026-02-17 14:58:54 -08:00
Waleed
cdacb796a8 improvement(providers): replace @ts-ignore with typed ProviderError class (#3235) 2026-02-17 14:20:31 -08:00
Waleed
3ce54147e6 fix(pagination): add missing next_page to response interfaces and operator comments (#3236) 2026-02-17 14:13:45 -08:00
Waleed
08690b2906 feat(pagination): update pagination for remaining integrations that support it (#3233)
* feat(pagination): update pagination for remaining integrations that support it

* fixed remaining

* ack comments
2026-02-17 13:34:46 -08:00
Waleed
299cc26694 improvement(lint): fix react-doctor errors and warnings (#3232)
* improvement(lint): fix react-doctor errors and warnings

* remove separators
2026-02-17 11:40:47 -08:00
Emir Karabeg
48715ff013 improvement(copilot): scrolling stickiness (#3218)
- Changed default stickinessThreshold from 100 to 30 in use-scroll-management.ts
- Removed explicit stickinessThreshold override (40) from copilot.tsx
- Both copilot and chat now use the same default value of 30
- This makes scrolling less sticky across all copilot message interactions

Co-authored-by: Cursor Agent <cursoragent@cursor.com>
Co-authored-by: Emir Karabeg <emir-karabeg@users.noreply.github.com>
2026-02-17 10:33:10 -08:00
Waleed
ad0d0ed1f1 feat(shortlink): add Beluga short link rewrite for hosted campaigns (#3231) 2026-02-17 10:32:32 -08:00
404 changed files with 22927 additions and 12135 deletions

View File

@@ -1,261 +0,0 @@
---
description: Add a knowledge base connector for syncing documents from an external source
argument-hint: <service-name> [api-docs-url]
---
# Add Connector Skill
You are an expert at adding knowledge base connectors to Sim. A connector syncs documents from an external source (Confluence, Google Drive, Notion, etc.) into a knowledge base.
## Your Task
When the user asks you to create a connector:
1. Use Context7 or WebFetch to read the service's API documentation
2. Create the connector directory and config
3. Register it in the connector registry
## Directory Structure
Create files in `apps/sim/connectors/{service}/`:
```
connectors/{service}/
├── index.ts # Barrel export
└── {service}.ts # ConnectorConfig definition
```
## ConnectorConfig Structure
```typescript
import { createLogger } from '@sim/logger'
import { {Service}Icon } from '@/components/icons'
import { fetchWithRetry } from '@/lib/knowledge/documents/utils'
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
const logger = createLogger('{Service}Connector')
export const {service}Connector: ConnectorConfig = {
id: '{service}',
name: '{Service}',
description: 'Sync documents from {Service} into your knowledge base',
version: '1.0.0',
icon: {Service}Icon,
oauth: {
required: true,
provider: '{service}', // Must match OAuthService in lib/oauth/types.ts
requiredScopes: ['read:...'],
},
configFields: [
// Rendered dynamically by the add-connector modal UI
// Supports 'short-input' and 'dropdown' types
],
listDocuments: async (accessToken, sourceConfig, cursor) => {
// Paginate via cursor, extract text, compute SHA-256 hash
// Return { documents: ExternalDocument[], nextCursor?, hasMore }
},
getDocument: async (accessToken, sourceConfig, externalId) => {
// Return ExternalDocument or null
},
validateConfig: async (accessToken, sourceConfig) => {
// Return { valid: true } or { valid: false, error: 'message' }
},
// Optional: map source metadata to semantic tag keys (translated to slots by sync engine)
mapTags: (metadata) => {
// Return Record<string, unknown> with keys matching tagDefinitions[].id
},
}
```
## ConfigField Types
The add-connector modal renders these automatically — no custom UI needed.
```typescript
// Text input
{
id: 'domain',
title: 'Domain',
type: 'short-input',
placeholder: 'yoursite.example.com',
required: true,
}
// Dropdown (static options)
{
id: 'contentType',
title: 'Content Type',
type: 'dropdown',
required: false,
options: [
{ label: 'Pages only', id: 'page' },
{ label: 'Blog posts only', id: 'blogpost' },
{ label: 'All content', id: 'all' },
],
}
```
## ExternalDocument Shape
Every document returned from `listDocuments`/`getDocument` must include:
```typescript
{
externalId: string // Source-specific unique ID
title: string // Document title
content: string // Extracted plain text
mimeType: 'text/plain' // Always text/plain (content is extracted)
contentHash: string // SHA-256 of content (change detection)
sourceUrl?: string // Link back to original (stored on document record)
metadata?: Record<string, unknown> // Source-specific data (fed to mapTags)
}
```
## Content Hashing (Required)
The sync engine uses content hashes for change detection:
```typescript
async function computeContentHash(content: string): Promise<string> {
const data = new TextEncoder().encode(content)
const hashBuffer = await crypto.subtle.digest('SHA-256', data)
return Array.from(new Uint8Array(hashBuffer)).map(b => b.toString(16).padStart(2, '0')).join('')
}
```
## tagDefinitions — Declared Tag Definitions
Declare which tags the connector populates using semantic IDs. Shown in the add-connector modal as opt-out checkboxes.
On connector creation, slots are **dynamically assigned** via `getNextAvailableSlot` — connectors never hardcode slot names.
```typescript
tagDefinitions: [
{ id: 'labels', displayName: 'Labels', fieldType: 'text' },
{ id: 'version', displayName: 'Version', fieldType: 'number' },
{ id: 'lastModified', displayName: 'Last Modified', fieldType: 'date' },
],
```
Each entry has:
- `id`: Semantic key matching a key returned by `mapTags` (e.g. `'labels'`, `'version'`)
- `displayName`: Human-readable name shown in the UI (e.g. "Labels", "Last Modified")
- `fieldType`: `'text'` | `'number'` | `'date'` | `'boolean'` — determines which slot pool to draw from
Users can opt out of specific tags in the modal. Disabled IDs are stored in `sourceConfig.disabledTagIds`.
The assigned mapping (`semantic id → slot`) is stored in `sourceConfig.tagSlotMapping`.
## mapTags — Metadata to Semantic Keys
Maps source metadata to semantic tag keys. Required if `tagDefinitions` is set.
The sync engine calls this automatically and translates semantic keys to actual DB slots
using the `tagSlotMapping` stored on the connector.
Return keys must match the `id` values declared in `tagDefinitions`.
```typescript
mapTags: (metadata: Record<string, unknown>): Record<string, unknown> => {
const result: Record<string, unknown> = {}
// Validate arrays before casting — metadata may be malformed
const labels = Array.isArray(metadata.labels) ? (metadata.labels as string[]) : []
if (labels.length > 0) result.labels = labels.join(', ')
// Validate numbers — guard against NaN
if (metadata.version != null) {
const num = Number(metadata.version)
if (!Number.isNaN(num)) result.version = num
}
// Validate dates — guard against Invalid Date
if (typeof metadata.lastModified === 'string') {
const date = new Date(metadata.lastModified)
if (!Number.isNaN(date.getTime())) result.lastModified = date
}
return result
}
```
## External API Calls — Use `fetchWithRetry`
All external API calls must use `fetchWithRetry` from `@/lib/knowledge/documents/utils` instead of raw `fetch()`. This provides exponential backoff with retries on 429/502/503/504 errors. It returns a standard `Response` — all `.ok`, `.json()`, `.text()` checks work unchanged.
For `validateConfig` (user-facing, called on save), pass `VALIDATE_RETRY_OPTIONS` to cap wait time at ~7s. Background operations (`listDocuments`, `getDocument`) use the built-in defaults (5 retries, ~31s max).
```typescript
import { VALIDATE_RETRY_OPTIONS, fetchWithRetry } from '@/lib/knowledge/documents/utils'
// Background sync — use defaults
const response = await fetchWithRetry(url, {
method: 'GET',
headers: { Authorization: `Bearer ${accessToken}` },
})
// validateConfig — tighter retry budget
const response = await fetchWithRetry(url, { ... }, VALIDATE_RETRY_OPTIONS)
```
## sourceUrl
If `ExternalDocument.sourceUrl` is set, the sync engine stores it on the document record. Always construct the full URL (not a relative path).
## Sync Engine Behavior (Do Not Modify)
The sync engine (`lib/knowledge/connectors/sync-engine.ts`) is connector-agnostic. It:
1. Calls `listDocuments` with pagination until `hasMore` is false
2. Compares `contentHash` to detect new/changed/unchanged documents
3. Stores `sourceUrl` and calls `mapTags` on insert/update automatically
4. Handles soft-delete of removed documents
You never need to modify the sync engine when adding a connector.
## OAuth Credential Reuse
Connectors reuse the existing OAuth infrastructure. The `oauth.provider` must match an `OAuthService` from `apps/sim/lib/oauth/types.ts`. Check existing providers before adding a new one.
## Icon
The `icon` field on `ConnectorConfig` is used throughout the UI — in the connector list, the add-connector modal, and as the document icon in the knowledge base table (replacing the generic file type icon for connector-sourced documents). The icon is read from `CONNECTOR_REGISTRY[connectorType].icon` at runtime — no separate icon map to maintain.
If the service already has an icon in `apps/sim/components/icons.tsx` (from a tool integration), reuse it. Otherwise, ask the user to provide the SVG.
## Registering
Add one line to `apps/sim/connectors/registry.ts`:
```typescript
import { {service}Connector } from '@/connectors/{service}'
export const CONNECTOR_REGISTRY: ConnectorRegistry = {
// ... existing connectors ...
{service}: {service}Connector,
}
```
## Reference Implementation
See `apps/sim/connectors/confluence/confluence.ts` for a complete example with:
- Multiple config field types (text + dropdown)
- Label fetching and CQL search filtering
- Blogpost + page content types
- `mapTags` mapping labels, version, and dates to semantic keys
## Checklist
- [ ] Created `connectors/{service}/{service}.ts` with full ConnectorConfig
- [ ] Created `connectors/{service}/index.ts` barrel export
- [ ] `oauth.provider` matches an existing OAuthService in `lib/oauth/types.ts`
- [ ] `listDocuments` handles pagination and computes content hashes
- [ ] `sourceUrl` set on each ExternalDocument (full URL, not relative)
- [ ] `metadata` includes source-specific data for tag mapping
- [ ] `tagDefinitions` declared for each semantic key returned by `mapTags`
- [ ] `mapTags` implemented if source has useful metadata (labels, dates, versions)
- [ ] `validateConfig` verifies the source is accessible
- [ ] All external API calls use `fetchWithRetry` (not raw `fetch`)
- [ ] All optional config fields validated in `validateConfig`
- [ ] Icon exists in `components/icons.tsx` (or asked user to provide SVG)
- [ ] Registered in `connectors/registry.ts`

View File

@@ -59,12 +59,6 @@ body {
--content-gap: 1.75rem;
}
/* Remove custom layout variable overrides to fallback to fumadocs defaults */
/* ============================================
Navbar Light Mode Styling
============================================ */
/* Light mode navbar and search styling */
:root:not(.dark) nav {
background-color: hsla(0, 0%, 96%, 0.85) !important;
@@ -88,10 +82,6 @@ body {
-webkit-backdrop-filter: blur(25px) saturate(180%) brightness(0.6) !important;
}
/* ============================================
Custom Sidebar Styling (Turborepo-inspired)
============================================ */
/* Floating sidebar appearance - remove background */
[data-sidebar-container],
#nd-sidebar {
@@ -468,10 +458,6 @@ aside[data-sidebar],
writing-mode: horizontal-tb !important;
}
/* ============================================
Code Block Styling (Improved)
============================================ */
/* Apply Geist Mono to code elements */
code,
pre,
@@ -532,10 +518,6 @@ pre code .line {
color: var(--color-fd-primary);
}
/* ============================================
TOC (Table of Contents) Styling
============================================ */
/* Remove the thin border-left on nested TOC items (keeps main indicator only) */
#nd-toc a[style*="padding-inline-start"] {
border-left: none !important;
@@ -554,10 +536,6 @@ main article,
padding-bottom: 4rem;
}
/* ============================================
Center and Constrain Main Content Width
============================================ */
/* Main content area - center and constrain like turborepo/raindrop */
/* Note: --sidebar-offset and --toc-offset are now applied at #nd-docs-layout level */
main[data-main] {

View File

@@ -4407,6 +4407,161 @@ export function DatadogIcon(props: SVGProps<SVGSVGElement>) {
)
}
export function MicrosoftDataverseIcon(props: SVGProps<SVGSVGElement>) {
const id = useId()
const clip0 = `dataverse_clip0_${id}`
const clip1 = `dataverse_clip1_${id}`
const clip2 = `dataverse_clip2_${id}`
const paint0 = `dataverse_paint0_${id}`
const paint1 = `dataverse_paint1_${id}`
const paint2 = `dataverse_paint2_${id}`
const paint3 = `dataverse_paint3_${id}`
const paint4 = `dataverse_paint4_${id}`
const paint5 = `dataverse_paint5_${id}`
const paint6 = `dataverse_paint6_${id}`
return (
<svg
{...props}
width='96'
height='96'
viewBox='0 0 96 96'
fill='none'
xmlns='http://www.w3.org/2000/svg'
>
<g clipPath={`url(#${clip0})`}>
<g clipPath={`url(#${clip1})`}>
<g clipPath={`url(#${clip2})`}>
<path
d='M13.8776 21.8242C29.1033 8.13791 49.7501 8.1861 62.955 18.9134C74.9816 28.6836 77.4697 44.3159 70.851 55.7801C64.2321 67.2443 52.5277 70.1455 39.5011 62.6247L31.7286 76.087L31.7234 76.0862C27.4181 83.5324 17.8937 86.0828 10.4437 81.7817C7.45394 80.0556 5.25322 77.4879 3.96665 74.551L3.96096 74.5511C-4.07832 55.7804 0.200745 34.1184 13.8776 21.8242Z'
fill={`url(#${paint0})`}
/>
<path
d='M13.8776 21.8242C29.1033 8.13791 49.7501 8.1861 62.955 18.9134C74.9816 28.6836 77.4697 44.3159 70.851 55.7801C64.2321 67.2443 52.5277 70.1455 39.5011 62.6247L31.7286 76.087L31.7234 76.0862C27.4181 83.5324 17.8937 86.0828 10.4437 81.7817C7.45394 80.0556 5.25322 77.4879 3.96665 74.551L3.96096 74.5511C-4.07832 55.7804 0.200745 34.1184 13.8776 21.8242Z'
fill={`url(#${paint1})`}
fillOpacity='0.8'
/>
<path
d='M85.4327 14.2231C88.4528 15.9668 90.6686 18.569 91.9494 21.5433L91.9533 21.5444C99.9406 40.2943 95.6533 61.9068 81.9983 74.1814C66.7726 87.8677 46.1257 87.8196 32.9209 77.0923C20.8945 67.3221 18.4062 51.6897 25.0249 40.2256C31.6438 28.7614 43.3482 25.8601 56.3748 33.381L64.1434 19.9255L64.1482 19.9249C68.4516 12.4736 77.9805 9.92084 85.4327 14.2231Z'
fill={`url(#${paint2})`}
/>
<path
d='M85.4327 14.2231C88.4528 15.9668 90.6686 18.569 91.9494 21.5433L91.9533 21.5444C99.9406 40.2943 95.6533 61.9068 81.9983 74.1814C66.7726 87.8677 46.1257 87.8196 32.9209 77.0923C20.8945 67.3221 18.4062 51.6897 25.0249 40.2256C31.6438 28.7614 43.3482 25.8601 56.3748 33.381L64.1434 19.9255L64.1482 19.9249C68.4516 12.4736 77.9805 9.92084 85.4327 14.2231Z'
fill={`url(#${paint3})`}
fillOpacity='0.9'
/>
<path
d='M39.5041 62.6261C52.5307 70.1469 64.2352 67.2456 70.8541 55.7814C77.2488 44.7055 75.1426 29.7389 64.147 19.9271L56.3791 33.3814L39.5041 62.6261Z'
fill={`url(#${paint4})`}
/>
<path
d='M56.3794 33.3815C43.3528 25.8607 31.6482 28.762 25.0294 40.2262C18.6347 51.3021 20.7409 66.2687 31.7364 76.0806L39.5043 62.6262L56.3794 33.3815Z'
fill={`url(#${paint5})`}
/>
<path
d='M33.3215 56.4453C37.9837 64.5204 48.3094 67.2872 56.3846 62.625C64.4598 57.9628 67.2266 47.6371 62.5643 39.5619C57.9021 31.4867 47.5764 28.72 39.5013 33.3822C31.4261 38.0444 28.6593 48.3701 33.3215 56.4453Z'
fill={`url(#${paint6})`}
/>
</g>
</g>
</g>
<defs>
<radialGradient
id={paint0}
cx='0'
cy='0'
r='1'
gradientUnits='userSpaceOnUse'
gradientTransform='translate(46.0001 49.4996) rotate(-148.717) scale(46.2195 47.5359)'
>
<stop offset='0.465088' stopColor='#09442A' />
<stop offset='0.70088' stopColor='#136C6C' />
<stop offset='1' stopColor='#22918B' />
</radialGradient>
<radialGradient
id={paint1}
cx='0'
cy='0'
r='1'
gradientUnits='userSpaceOnUse'
gradientTransform='translate(50.0001 32.4996) rotate(123.57) scale(66.0095 46.5498)'
>
<stop offset='0.718705' stopColor='#1A7F7C' stopOpacity='0' />
<stop offset='1' stopColor='#16BBDA' />
</radialGradient>
<radialGradient
id={paint2}
cx='0'
cy='0'
r='1'
gradientUnits='userSpaceOnUse'
gradientTransform='translate(50.4999 44.5001) rotate(30.75) scale(45.9618 44.5095)'
>
<stop offset='0.358097' stopColor='#136C6C' />
<stop offset='0.789474' stopColor='#42B870' />
<stop offset='1' stopColor='#76D45E' />
</radialGradient>
<radialGradient
id={paint3}
cx='0'
cy='0'
r='1'
gradientTransform='matrix(42.5 -36.0002 31.1824 36.8127 49.4998 55.5001)'
gradientUnits='userSpaceOnUse'
>
<stop offset='0.583166' stopColor='#76D45E' stopOpacity='0' />
<stop offset='1' stopColor='#C8F5B7' />
</radialGradient>
<radialGradient
id={paint4}
cx='0'
cy='0'
r='1'
gradientUnits='userSpaceOnUse'
gradientTransform='translate(47.5 48) rotate(-58.9042) scale(32.6898)'
>
<stop offset='0.486266' stopColor='#22918B' />
<stop offset='0.729599' stopColor='#42B870' />
<stop offset='1' stopColor='#43E5CA' />
</radialGradient>
<radialGradient
id={paint5}
cx='0'
cy='0'
r='1'
gradientUnits='userSpaceOnUse'
gradientTransform='translate(47.3833 49.0077) rotate(119.859) scale(31.1328 29.4032)'
>
<stop offset='0.459553' stopColor='#08494E' />
<stop offset='0.742242' stopColor='#1A7F7C' />
<stop offset='1' stopColor='#309C61' />
</radialGradient>
<radialGradient
id={paint6}
cx='0'
cy='0'
r='1'
gradientUnits='userSpaceOnUse'
gradientTransform='translate(52.5 40) rotate(120.784) scale(27.3542)'
>
<stop stopColor='#C8F5B7' />
<stop offset='0.24583' stopColor='#98F0B0' />
<stop offset='0.643961' stopColor='#52D17C' />
<stop offset='1' stopColor='#119FC5' />
</radialGradient>
<clipPath id={clip0}>
<rect width='96' height='96' fill='white' />
</clipPath>
<clipPath id={clip1}>
<rect width='96' height='96' fill='white' />
</clipPath>
<clipPath id={clip2}>
<rect width='95.9998' height='96' fill='white' />
</clipPath>
</defs>
</svg>
)
}
export function KalshiIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} viewBox='0 0 78 20' fill='currentColor' xmlns='http://www.w3.org/2000/svg'>
@@ -5532,3 +5687,33 @@ export function OnePasswordIcon(props: SVGProps<SVGSVGElement>) {
</svg>
)
}
export function VercelIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
{...props}
viewBox='0 0 256 222'
xmlns='http://www.w3.org/2000/svg'
preserveAspectRatio='xMidYMid'
>
<g transform='translate(19.2 16.63) scale(0.85)'>
<polygon fill='#fafafa' points='128 0 256 221.705007 0 221.705007' />
</g>
</svg>
)
}
export function CloudflareIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 512 512'>
<path
fill='#f38020'
d='M331 326c11-26-4-38-19-38l-148-2c-4 0-4-6 1-7l150-2c17-1 37-15 43-33 0 0 10-21 9-24a97 97 0 0 0-187-11c-38-25-78 9-69 46-48 3-65 46-60 72 0 1 1 2 3 2h274c1 0 3-1 3-3z'
/>
<path
fill='#faae40'
d='M381 224c-4 0-6-1-7 1l-5 21c-5 16 3 30 20 31l32 2c4 0 4 6-1 7l-33 1c-36 4-46 39-46 39 0 2 0 3 2 3h113l3-2a81 81 0 0 0-78-103'
/>
</svg>
)
}

View File

@@ -19,6 +19,7 @@ import {
CirclebackIcon,
ClayIcon,
ClerkIcon,
CloudflareIcon,
ConfluenceIcon,
CursorIcon,
DatadogIcon,
@@ -71,6 +72,7 @@ import {
MailgunIcon,
MailServerIcon,
Mem0Icon,
MicrosoftDataverseIcon,
MicrosoftExcelIcon,
MicrosoftOneDriveIcon,
MicrosoftPlannerIcon,
@@ -125,6 +127,7 @@ import {
TTSIcon,
TwilioIcon,
TypeformIcon,
VercelIcon,
VideoIcon,
WealthboxIcon,
WebflowIcon,
@@ -155,6 +158,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
circleback: CirclebackIcon,
clay: ClayIcon,
clerk: ClerkIcon,
cloudflare: CloudflareIcon,
confluence_v2: ConfluenceIcon,
cursor_v2: CursorIcon,
datadog: DatadogIcon,
@@ -208,6 +212,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
mailgun: MailgunIcon,
mem0: Mem0Icon,
memory: BrainIcon,
microsoft_dataverse: MicrosoftDataverseIcon,
microsoft_excel_v2: MicrosoftExcelIcon,
microsoft_planner: MicrosoftPlannerIcon,
microsoft_teams: MicrosoftTeamsIcon,
@@ -262,6 +267,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
twilio_sms: TwilioIcon,
twilio_voice: TwilioIcon,
typeform: TypeformIcon,
vercel: VercelIcon,
video_generator_v2: VideoIcon,
vision_v2: EyeIcon,
wealthbox: WealthboxIcon,

View File

@@ -130,37 +130,4 @@ Update multiple existing records in an Airtable table
| `records` | json | Array of updated Airtable records |
| `metadata` | json | Operation metadata including record count and updated record IDs |
### `airtable_list_bases`
List all bases the authenticated user has access to
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `bases` | json | Array of Airtable bases with id, name, and permissionLevel |
| `metadata` | json | Operation metadata including total bases count |
### `airtable_get_base_schema`
Get the schema of all tables, fields, and views in an Airtable base
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `baseId` | string | Yes | Airtable base ID \(starts with "app", e.g., "appXXXXXXXXXXXXXX"\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `tables` | json | Array of table schemas with fields and views |
| `metadata` | json | Operation metadata including total tables count |

View File

@@ -0,0 +1,569 @@
---
title: Cloudflare
description: Manage DNS, domains, certificates, and cache
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="cloudflare"
color="#F5F6FA"
/>
{/* MANUAL-CONTENT-START:intro */}
[Cloudflare](https://cloudflare.com/) is a global cloud platform that provides content delivery, domain management, cybersecurity, and performance services for websites and applications.
In Sim, the Cloudflare integration empowers your agents to automate the management of DNS records, SSL/TLS certificates, domains (zones), cache, zone settings, and more through easy-to-use API tools. Agents can securely list and edit domains, update DNS records, monitor analytics, and manage security and performance—all as part of your automated workflows.
With Cloudflare, you can:
- **Manage DNS and Domains**: List all your domains (zones), view zone details, and fully control DNS records from your automated agent workflows.
- **Handle SSL/TLS Certificates and Settings**: Issue, renew, or list certificates and adjust security and performance settings for your sites.
- **Purge Cache and Analyze Traffic**: Instantly purge edge cache and review real-time DNS analytics directly within your Sim agent processes.
- **Automate Security and Operations**: Use agents to programmatically manage zones, update settings, and streamline repetitive Cloudflare tasks.
This integration enables streamlined, secure management of your site's infrastructure from within Sim. Your agents can integrate Cloudflare operations directly into processes—keeping DNS records up-to-date, responding to security events, improving site performance, and automating large-scale site and account administration.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate Cloudflare into the workflow. Manage zones (domains), DNS records, SSL/TLS certificates, zone settings, DNS analytics, and cache purging via the Cloudflare API.
## Tools
### `cloudflare_list_zones`
Lists all zones (domains) in the Cloudflare account.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `name` | string | No | Filter zones by domain name \(e.g., "example.com"\) |
| `status` | string | No | Filter by zone status: "initializing", "pending", "active", or "moved" |
| `page` | number | No | Page number for pagination \(default: 1\) |
| `per_page` | number | No | Number of zones per page \(default: 20, max: 50\) |
| `accountId` | string | No | Filter zones by account ID |
| `order` | string | No | Sort field \(name, status, account.id, account.name\) |
| `direction` | string | No | Sort direction \(asc, desc\) |
| `match` | string | No | Match logic for filters \(any, all\). Default: all |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `zones` | array | List of zones/domains |
| ↳ `id` | string | Zone ID |
| ↳ `name` | string | Domain name |
| ↳ `status` | string | Zone status \(initializing, pending, active, moved\) |
| ↳ `paused` | boolean | Whether the zone is paused |
| ↳ `type` | string | Zone type \(full, partial, or secondary\) |
| ↳ `name_servers` | array | Assigned Cloudflare name servers |
| ↳ `original_name_servers` | array | Original name servers before moving to Cloudflare |
| ↳ `created_on` | string | ISO 8601 date when the zone was created |
| ↳ `modified_on` | string | ISO 8601 date when the zone was last modified |
| ↳ `activated_on` | string | ISO 8601 date when the zone was activated |
| ↳ `development_mode` | number | Seconds remaining in development mode \(0 = off\) |
| ↳ `plan` | object | Zone plan information |
| ↳ `id` | string | Plan identifier |
| ↳ `name` | string | Plan name |
| ↳ `price` | number | Plan price |
| ↳ `is_subscribed` | boolean | Whether the zone is subscribed to the plan |
| ↳ `frequency` | string | Plan billing frequency |
| ↳ `currency` | string | Plan currency |
| ↳ `legacy_id` | string | Legacy plan identifier |
| ↳ `account` | object | Account the zone belongs to |
| ↳ `id` | string | Account identifier |
| ↳ `name` | string | Account name |
| ↳ `owner` | object | Zone owner information |
| ↳ `id` | string | Owner identifier |
| ↳ `name` | string | Owner name |
| ↳ `type` | string | Owner type |
| ↳ `meta` | object | Zone metadata |
| ↳ `cdn_only` | boolean | Whether the zone is CDN only |
| ↳ `custom_certificate_quota` | number | Custom certificate quota |
| ↳ `dns_only` | boolean | Whether the zone is DNS only |
| ↳ `foundation_dns` | boolean | Whether foundation DNS is enabled |
| ↳ `page_rule_quota` | number | Page rule quota |
| ↳ `phishing_detected` | boolean | Whether phishing was detected |
| ↳ `step` | number | Current setup step |
| ↳ `vanity_name_servers` | array | Custom vanity name servers |
| ↳ `permissions` | array | User permissions for the zone |
| `total_count` | number | Total number of zones matching the query |
### `cloudflare_get_zone`
Gets details for a specific zone (domain) by its ID.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID to retrieve details for |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Zone ID |
| `name` | string | Domain name |
| `status` | string | Zone status \(initializing, pending, active, moved\) |
| `paused` | boolean | Whether the zone is paused |
| `type` | string | Zone type \(full, partial, or secondary\) |
| `name_servers` | array | Assigned Cloudflare name servers |
| `original_name_servers` | array | Original name servers before moving to Cloudflare |
| `created_on` | string | ISO 8601 date when the zone was created |
| `modified_on` | string | ISO 8601 date when the zone was last modified |
| `activated_on` | string | ISO 8601 date when the zone was activated |
| `development_mode` | number | Seconds remaining in development mode \(0 = off\) |
| `plan` | object | Zone plan information |
| ↳ `id` | string | Plan identifier |
| ↳ `name` | string | Plan name |
| ↳ `price` | number | Plan price |
| ↳ `is_subscribed` | boolean | Whether the zone is subscribed to the plan |
| ↳ `frequency` | string | Plan billing frequency |
| ↳ `currency` | string | Plan currency |
| ↳ `legacy_id` | string | Legacy plan identifier |
| `account` | object | Account the zone belongs to |
| ↳ `id` | string | Account identifier |
| ↳ `name` | string | Account name |
| `owner` | object | Zone owner information |
| ↳ `id` | string | Owner identifier |
| ↳ `name` | string | Owner name |
| ↳ `type` | string | Owner type |
| `meta` | object | Zone metadata |
| ↳ `cdn_only` | boolean | Whether the zone is CDN only |
| ↳ `custom_certificate_quota` | number | Custom certificate quota |
| ↳ `dns_only` | boolean | Whether the zone is DNS only |
| ↳ `foundation_dns` | boolean | Whether foundation DNS is enabled |
| ↳ `page_rule_quota` | number | Page rule quota |
| ↳ `phishing_detected` | boolean | Whether phishing was detected |
| ↳ `step` | number | Current setup step |
| `vanity_name_servers` | array | Custom vanity name servers |
| `permissions` | array | User permissions for the zone |
### `cloudflare_create_zone`
Adds a new zone (domain) to the Cloudflare account.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `name` | string | Yes | The domain name to add \(e.g., "example.com"\) |
| `accountId` | string | Yes | The Cloudflare account ID |
| `type` | string | No | Zone type: "full" \(Cloudflare manages DNS\), "partial" \(CNAME setup\), or "secondary" \(secondary DNS\) |
| `jump_start` | boolean | No | Automatically attempt to fetch existing DNS records when creating the zone |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Created zone ID |
| `name` | string | Domain name |
| `status` | string | Zone status \(initializing, pending, active, moved\) |
| `paused` | boolean | Whether the zone is paused |
| `type` | string | Zone type \(full, partial, or secondary\) |
| `name_servers` | array | Assigned Cloudflare name servers |
| `original_name_servers` | array | Original name servers before moving to Cloudflare |
| `created_on` | string | ISO 8601 date when the zone was created |
| `modified_on` | string | ISO 8601 date when the zone was last modified |
| `activated_on` | string | ISO 8601 date when the zone was activated |
| `development_mode` | number | Seconds remaining in development mode \(0 = off\) |
| `plan` | object | Zone plan information |
| ↳ `id` | string | Plan identifier |
| ↳ `name` | string | Plan name |
| ↳ `price` | number | Plan price |
| ↳ `is_subscribed` | boolean | Whether the zone is subscribed to the plan |
| ↳ `frequency` | string | Plan billing frequency |
| ↳ `currency` | string | Plan currency |
| ↳ `legacy_id` | string | Legacy plan identifier |
| `account` | object | Account the zone belongs to |
| ↳ `id` | string | Account identifier |
| ↳ `name` | string | Account name |
| `owner` | object | Zone owner information |
| ↳ `id` | string | Owner identifier |
| ↳ `name` | string | Owner name |
| ↳ `type` | string | Owner type |
| `meta` | object | Zone metadata |
| ↳ `cdn_only` | boolean | Whether the zone is CDN only |
| ↳ `custom_certificate_quota` | number | Custom certificate quota |
| ↳ `dns_only` | boolean | Whether the zone is DNS only |
| ↳ `foundation_dns` | boolean | Whether foundation DNS is enabled |
| ↳ `page_rule_quota` | number | Page rule quota |
| ↳ `phishing_detected` | boolean | Whether phishing was detected |
| ↳ `step` | number | Current setup step |
| `vanity_name_servers` | array | Custom vanity name servers |
| `permissions` | array | User permissions for the zone |
### `cloudflare_delete_zone`
Deletes a zone (domain) from the Cloudflare account.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID to delete |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Deleted zone ID |
### `cloudflare_list_dns_records`
Lists DNS records for a specific zone.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID to list DNS records for |
| `type` | string | No | Filter by record type \(e.g., "A", "AAAA", "CNAME", "MX", "TXT"\) |
| `name` | string | No | Filter by record name \(exact match\) |
| `content` | string | No | Filter by record content \(exact match\) |
| `page` | number | No | Page number for pagination \(default: 1\) |
| `per_page` | number | No | Number of records per page \(default: 100, max: 5000000\) |
| `direction` | string | No | Sort direction \(asc or desc\) |
| `match` | string | No | Match logic for filters: any or all \(default: all\) |
| `order` | string | No | Sort field \(type, name, content, ttl, proxied\) |
| `proxied` | boolean | No | Filter by proxy status |
| `search` | string | No | Free-text search across record name, content, and value |
| `tag` | string | No | Filter by tags \(comma-separated\) |
| `tag_match` | string | No | Tag filter match logic: any or all |
| `commentFilter` | string | No | Filter records by comment content \(substring match\) |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `records` | array | List of DNS records |
| ↳ `id` | string | Unique identifier for the DNS record |
| ↳ `zone_id` | string | The ID of the zone the record belongs to |
| ↳ `zone_name` | string | The name of the zone |
| ↳ `type` | string | Record type \(A, AAAA, CNAME, MX, TXT, etc.\) |
| ↳ `name` | string | Record name \(e.g., example.com\) |
| ↳ `content` | string | Record content \(e.g., IP address\) |
| ↳ `proxiable` | boolean | Whether the record can be proxied |
| ↳ `proxied` | boolean | Whether Cloudflare proxy is enabled |
| ↳ `ttl` | number | TTL in seconds \(1 = automatic\) |
| ↳ `locked` | boolean | Whether the record is locked |
| ↳ `priority` | number | MX/SRV record priority |
| ↳ `comment` | string | Comment associated with the record |
| ↳ `tags` | array | Tags associated with the record |
| ↳ `comment_modified_on` | string | ISO 8601 timestamp when the comment was last modified |
| ↳ `tags_modified_on` | string | ISO 8601 timestamp when tags were last modified |
| ↳ `meta` | object | Record metadata |
| ↳ `source` | string | Source of the DNS record |
| ↳ `created_on` | string | ISO 8601 timestamp when the record was created |
| ↳ `modified_on` | string | ISO 8601 timestamp when the record was last modified |
| `total_count` | number | Total number of DNS records matching the query |
### `cloudflare_create_dns_record`
Creates a new DNS record for a zone.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID to create the DNS record in |
| `type` | string | Yes | DNS record type \(e.g., "A", "AAAA", "CNAME", "MX", "TXT", "NS", "SRV"\) |
| `name` | string | Yes | DNS record name \(e.g., "example.com" or "subdomain.example.com"\) |
| `content` | string | Yes | DNS record content \(e.g., IP address for A records, target for CNAME\) |
| `ttl` | number | No | Time to live in seconds \(1 = automatic, default: 1\) |
| `proxied` | boolean | No | Whether to enable Cloudflare proxy \(default: false\) |
| `priority` | number | No | Priority for MX and SRV records |
| `comment` | string | No | Comment for the DNS record |
| `tags` | string | No | Comma-separated tags for the DNS record |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Unique identifier for the created DNS record |
| `zone_id` | string | The ID of the zone the record belongs to |
| `zone_name` | string | The name of the zone |
| `type` | string | DNS record type \(A, AAAA, CNAME, MX, TXT, etc.\) |
| `name` | string | DNS record hostname |
| `content` | string | DNS record value \(e.g., IP address, target hostname\) |
| `proxiable` | boolean | Whether the record can be proxied through Cloudflare |
| `proxied` | boolean | Whether Cloudflare proxy is enabled |
| `ttl` | number | Time to live in seconds \(1 = automatic\) |
| `locked` | boolean | Whether the record is locked |
| `priority` | number | Priority for MX and SRV records |
| `comment` | string | Comment associated with the record |
| `tags` | array | Tags associated with the record |
| `comment_modified_on` | string | ISO 8601 timestamp when the comment was last modified |
| `tags_modified_on` | string | ISO 8601 timestamp when tags were last modified |
| `meta` | object | Record metadata |
| ↳ `source` | string | Source of the DNS record |
| `created_on` | string | ISO 8601 timestamp when the record was created |
| `modified_on` | string | ISO 8601 timestamp when the record was last modified |
### `cloudflare_update_dns_record`
Updates an existing DNS record for a zone.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID containing the DNS record |
| `recordId` | string | Yes | The DNS record ID to update |
| `type` | string | No | DNS record type \(e.g., "A", "AAAA", "CNAME", "MX", "TXT"\) |
| `name` | string | No | DNS record name |
| `content` | string | No | DNS record content \(e.g., IP address\) |
| `ttl` | number | No | Time to live in seconds \(1 = automatic\) |
| `proxied` | boolean | No | Whether to enable Cloudflare proxy |
| `priority` | number | No | Priority for MX and SRV records |
| `comment` | string | No | Comment for the DNS record |
| `tags` | string | No | Comma-separated tags for the DNS record |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Unique identifier for the updated DNS record |
| `zone_id` | string | The ID of the zone the record belongs to |
| `zone_name` | string | The name of the zone |
| `type` | string | DNS record type \(A, AAAA, CNAME, MX, TXT, etc.\) |
| `name` | string | DNS record hostname |
| `content` | string | DNS record value \(e.g., IP address, target hostname\) |
| `proxiable` | boolean | Whether the record can be proxied through Cloudflare |
| `proxied` | boolean | Whether Cloudflare proxy is enabled |
| `ttl` | number | Time to live in seconds \(1 = automatic\) |
| `locked` | boolean | Whether the record is locked |
| `priority` | number | Priority for MX and SRV records |
| `comment` | string | Comment associated with the record |
| `tags` | array | Tags associated with the record |
| `comment_modified_on` | string | ISO 8601 timestamp when the comment was last modified |
| `tags_modified_on` | string | ISO 8601 timestamp when tags were last modified |
| `meta` | object | Record metadata |
| ↳ `source` | string | Source of the DNS record |
| `created_on` | string | ISO 8601 timestamp when the record was created |
| `modified_on` | string | ISO 8601 timestamp when the record was last modified |
### `cloudflare_delete_dns_record`
Deletes a DNS record from a zone.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID containing the DNS record |
| `recordId` | string | Yes | The DNS record ID to delete |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Deleted record ID |
### `cloudflare_list_certificates`
Lists SSL/TLS certificate packs for a zone.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID to list certificates for |
| `status` | string | No | Filter certificate packs by status \(e.g., "all", "active", "pending"\) |
| `page` | number | No | Page number of paginated results \(default: 1\) |
| `per_page` | number | No | Number of certificate packs per page \(default: 20, min: 5, max: 50\) |
| `deploy` | string | No | Filter by deployment environment: "staging" or "production" |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `certificates` | array | List of SSL/TLS certificate packs |
| ↳ `id` | string | Certificate pack ID |
| ↳ `type` | string | Certificate type \(e.g., "universal", "advanced"\) |
| ↳ `hosts` | array | Hostnames covered by this certificate pack |
| ↳ `primary_certificate` | string | ID of the primary certificate in the pack |
| ↳ `status` | string | Certificate pack status \(e.g., "active", "pending"\) |
| ↳ `certificates` | array | Individual certificates within the pack |
| ↳ `id` | string | Certificate ID |
| ↳ `hosts` | array | Hostnames covered by this certificate |
| ↳ `issuer` | string | Certificate issuer |
| ↳ `signature` | string | Signature algorithm \(e.g., "ECDSAWithSHA256"\) |
| ↳ `status` | string | Certificate status |
| ↳ `bundle_method` | string | Bundle method \(e.g., "ubiquitous"\) |
| ↳ `zone_id` | string | Zone ID the certificate belongs to |
| ↳ `uploaded_on` | string | Upload date \(ISO 8601\) |
| ↳ `modified_on` | string | Last modified date \(ISO 8601\) |
| ↳ `expires_on` | string | Expiration date \(ISO 8601\) |
| ↳ `priority` | number | Certificate priority order |
| ↳ `geo_restrictions` | object | Geographic restrictions for the certificate |
| ↳ `label` | string | Geographic restriction label |
| ↳ `cloudflare_branding` | boolean | Whether Cloudflare branding is enabled on the certificate |
| ↳ `validation_method` | string | Validation method \(e.g., "txt", "http", "cname"\) |
| ↳ `validity_days` | number | Validity period in days |
| ↳ `certificate_authority` | string | Certificate authority \(e.g., "lets_encrypt", "google"\) |
| ↳ `validation_errors` | array | Validation issues for the certificate pack |
| ↳ `message` | string | Validation error message |
| ↳ `validation_records` | array | Validation records for the certificate pack |
| ↳ `cname` | string | CNAME record name |
| ↳ `cname_target` | string | CNAME record target |
| ↳ `emails` | array | Email addresses for validation |
| ↳ `http_body` | string | HTTP validation body content |
| ↳ `http_url` | string | HTTP validation URL |
| ↳ `status` | string | Validation record status |
| ↳ `txt_name` | string | TXT record name |
| ↳ `txt_value` | string | TXT record value |
| ↳ `dcv_delegation_records` | array | Domain control validation delegation records |
| ↳ `cname` | string | CNAME record name |
| ↳ `cname_target` | string | CNAME record target |
| ↳ `emails` | array | Email addresses for validation |
| ↳ `http_body` | string | HTTP validation body content |
| ↳ `http_url` | string | HTTP validation URL |
| ↳ `status` | string | Delegation record status |
| ↳ `txt_name` | string | TXT record name |
| ↳ `txt_value` | string | TXT record value |
| `total_count` | number | Total number of certificate packs |
### `cloudflare_get_zone_settings`
Gets all settings for a zone including SSL mode, minification, caching level, and security settings.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID to get settings for |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `settings` | array | List of zone settings |
| ↳ `id` | string | Setting identifier \(e.g., ssl, minify, cache_level, security_level, always_use_https\) |
| ↳ `value` | string | Setting value as a string. Simple values returned as-is \(e.g., "full", "on"\). Complex values are JSON-stringified \(e.g., \ |
| ↳ `editable` | boolean | Whether the setting can be modified for the current zone plan |
| ↳ `modified_on` | string | ISO 8601 timestamp when the setting was last modified |
| ↳ `time_remaining` | number | Seconds remaining until the setting can be modified again \(only present for rate-limited settings\) |
### `cloudflare_update_zone_setting`
Updates a specific zone setting such as SSL mode, security level, cache level, minification, or other configuration.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID to update settings for |
| `settingId` | string | Yes | Setting to update \(e.g., "ssl", "security_level", "cache_level", "minify", "always_use_https", "browser_cache_ttl", "http3", "min_tls_version", "ciphers"\) |
| `value` | string | Yes | New value for the setting as a string or JSON string for complex values \(e.g., "full" for SSL, "medium" for security_level, "aggressive" for cache_level, \'\{"css":"on","html":"on","js":"on"\}\' for minify, \'\["ECDHE-RSA-AES128-GCM-SHA256"\]\' for ciphers\) |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Setting identifier \(e.g., ssl, minify, cache_level\) |
| `value` | string | Updated setting value as a string. Simple values returned as-is \(e.g., "full", "on"\). Complex values are JSON-stringified. |
| `editable` | boolean | Whether the setting can be modified for the current zone plan |
| `modified_on` | string | ISO 8601 timestamp when the setting was last modified |
| `time_remaining` | number | Seconds remaining until the setting can be modified again \(only present for rate-limited settings\) |
### `cloudflare_dns_analytics`
Gets DNS analytics report for a zone including query counts and trends.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID to get DNS analytics for |
| `since` | string | No | Start date for analytics \(ISO 8601, e.g., "2024-01-01T00:00:00Z"\) or relative \(e.g., "-6h"\) |
| `until` | string | No | End date for analytics \(ISO 8601, e.g., "2024-01-31T23:59:59Z"\) or relative \(e.g., "now"\) |
| `metrics` | string | Yes | Comma-separated metrics to retrieve \(e.g., "queryCount,uncachedCount,staleCount,responseTimeAvg,responseTimeMedian,responseTime90th,responseTime99th"\) |
| `dimensions` | string | No | Comma-separated dimensions to group by \(e.g., "queryName,queryType,responseCode,responseCached,coloName,origin,dayOfWeek,tcp,ipVersion,querySizeBucket,responseSizeBucket"\) |
| `filters` | string | No | Filters to apply to the data \(e.g., "queryType==A"\) |
| `sort` | string | No | Sort order for the result set. Fields must be included in metrics or dimensions \(e.g., "+queryCount" or "-responseTimeAvg"\) |
| `limit` | number | No | Maximum number of results to return |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `totals` | object | Aggregate DNS analytics totals for the entire queried period |
| ↳ `queryCount` | number | Total number of DNS queries |
| ↳ `uncachedCount` | number | Number of uncached DNS queries |
| ↳ `staleCount` | number | Number of stale DNS queries |
| ↳ `responseTimeAvg` | number | Average response time in milliseconds |
| ↳ `responseTimeMedian` | number | Median response time in milliseconds |
| ↳ `responseTime90th` | number | 90th percentile response time in milliseconds |
| ↳ `responseTime99th` | number | 99th percentile response time in milliseconds |
| `min` | object | Minimum values across the analytics period |
| ↳ `queryCount` | number | Minimum number of DNS queries |
| ↳ `uncachedCount` | number | Minimum number of uncached DNS queries |
| ↳ `staleCount` | number | Minimum number of stale DNS queries |
| ↳ `responseTimeAvg` | number | Minimum average response time in milliseconds |
| ↳ `responseTimeMedian` | number | Minimum median response time in milliseconds |
| ↳ `responseTime90th` | number | Minimum 90th percentile response time in milliseconds |
| ↳ `responseTime99th` | number | Minimum 99th percentile response time in milliseconds |
| `max` | object | Maximum values across the analytics period |
| ↳ `queryCount` | number | Maximum number of DNS queries |
| ↳ `uncachedCount` | number | Maximum number of uncached DNS queries |
| ↳ `staleCount` | number | Maximum number of stale DNS queries |
| ↳ `responseTimeAvg` | number | Maximum average response time in milliseconds |
| ↳ `responseTimeMedian` | number | Maximum median response time in milliseconds |
| ↳ `responseTime90th` | number | Maximum 90th percentile response time in milliseconds |
| ↳ `responseTime99th` | number | Maximum 99th percentile response time in milliseconds |
| `data` | array | Raw analytics data rows returned by the Cloudflare DNS analytics report |
| ↳ `dimensions` | array | Dimension values for this data row, parallel to the requested dimensions list |
| ↳ `metrics` | array | Metric values for this data row, parallel to the requested metrics list |
| `data_lag` | number | Processing lag in seconds before analytics data becomes available |
| `rows` | number | Total number of rows in the result set |
| `query` | object | Echo of the query parameters sent to the API |
| ↳ `since` | string | Start date of the analytics query |
| ↳ `until` | string | End date of the analytics query |
| ↳ `metrics` | array | Metrics requested in the query |
| ↳ `dimensions` | array | Dimensions requested in the query |
| ↳ `filters` | string | Filters applied to the query |
| ↳ `sort` | array | Sort order applied to the query |
| ↳ `limit` | number | Maximum number of results requested |
### `cloudflare_purge_cache`
Purges cached content for a zone. Can purge everything or specific files/tags/hosts/prefixes.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID to purge cache for |
| `purge_everything` | boolean | No | Set to true to purge all cached content. Mutually exclusive with files, tags, hosts, and prefixes |
| `files` | string | No | Comma-separated list of URLs to purge from cache |
| `tags` | string | No | Comma-separated list of cache tags to purge \(Enterprise only\) |
| `hosts` | string | No | Comma-separated list of hostnames to purge \(Enterprise only\) |
| `prefixes` | string | No | Comma-separated list of URL prefixes to purge \(Enterprise only\) |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Purge request identifier returned by Cloudflare |

View File

@@ -234,7 +234,6 @@ List actions from incident.io. Optionally filter by incident ID.
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | incident.io API Key |
| `incident_id` | string | No | Filter actions by incident ID \(e.g., "01FCNDV6P870EA6S7TK1DSYDG0"\) |
| `page_size` | number | No | Number of actions to return per page \(e.g., 10, 25, 50\) |
#### Output
@@ -309,7 +308,6 @@ List follow-ups from incident.io. Optionally filter by incident ID.
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | incident.io API Key |
| `incident_id` | string | No | Filter follow-ups by incident ID \(e.g., "01FCNDV6P870EA6S7TK1DSYDG0"\) |
| `page_size` | number | No | Number of follow-ups to return per page \(e.g., 10, 25, 50\) |
#### Output
@@ -396,6 +394,7 @@ List all users in your Incident.io workspace. Returns user details including id,
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Incident.io API Key |
| `page_size` | number | No | Number of results to return per page \(e.g., 10, 25, 50\). Default: 25 |
| `after` | string | No | Pagination cursor to fetch the next page of results |
#### Output
@@ -406,6 +405,10 @@ List all users in your Incident.io workspace. Returns user details including id,
| ↳ `name` | string | Full name of the user |
| ↳ `email` | string | Email address of the user |
| ↳ `role` | string | Role of the user in the workspace |
| `pagination_meta` | object | Pagination metadata |
| ↳ `after` | string | Cursor for next page |
| ↳ `page_size` | number | Number of items per page |
| ↳ `total_record_count` | number | Total number of records |
### `incidentio_users_show`
@@ -644,7 +647,6 @@ List all escalation policies in incident.io
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | incident.io API Key |
| `page_size` | number | No | Number of results per page \(e.g., 10, 25, 50\). Default: 25 |
#### Output

View File

@@ -29,7 +29,7 @@ In Sim, the Knowledge Base block enables your agents to perform intelligent sema
## Usage Instructions
Integrate Knowledge into the workflow. Perform full CRUD operations on documents, chunks, and tags.
Integrate Knowledge into the workflow. Can search, upload chunks, and create documents.
@@ -126,161 +126,4 @@ Create a new document in a knowledge base
| `message` | string | Success or error message describing the operation result |
| `documentId` | string | ID of the created document |
### `knowledge_list_tags`
List all tag definitions for a knowledge base
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `knowledgeBaseId` | string | Yes | ID of the knowledge base to list tags for |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `knowledgeBaseId` | string | ID of the knowledge base |
| `tags` | array | Array of tag definitions for the knowledge base |
| ↳ `id` | string | Tag definition ID |
| ↳ `tagSlot` | string | Internal tag slot \(e.g. tag1, number1\) |
| ↳ `displayName` | string | Human-readable tag name |
| ↳ `fieldType` | string | Tag field type \(text, number, date, boolean\) |
| ↳ `createdAt` | string | Creation timestamp |
| ↳ `updatedAt` | string | Last update timestamp |
| `totalTags` | number | Total number of tag definitions |
### `knowledge_list_documents`
List documents in a knowledge base with optional filtering, search, and pagination
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `knowledgeBaseId` | string | Yes | ID of the knowledge base to list documents from |
| `search` | string | No | Search query to filter documents by filename |
| `enabledFilter` | string | No | Filter by enabled status: "all", "enabled", or "disabled" |
| `limit` | number | No | Maximum number of documents to return \(default: 50\) |
| `offset` | number | No | Number of documents to skip for pagination \(default: 0\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `knowledgeBaseId` | string | ID of the knowledge base |
| `documents` | array | Array of documents in the knowledge base |
| ↳ `id` | string | Document ID |
| ↳ `filename` | string | Document filename |
| ↳ `fileSize` | number | File size in bytes |
| ↳ `mimeType` | string | MIME type of the document |
| ↳ `enabled` | boolean | Whether the document is enabled |
| ↳ `processingStatus` | string | Processing status \(pending, processing, completed, failed\) |
| ↳ `chunkCount` | number | Number of chunks in the document |
| ↳ `tokenCount` | number | Total token count across chunks |
| ↳ `uploadedAt` | string | Upload timestamp |
| ↳ `updatedAt` | string | Last update timestamp |
| `totalDocuments` | number | Total number of documents matching the filter |
| `limit` | number | Page size used |
| `offset` | number | Offset used for pagination |
### `knowledge_delete_document`
Delete a document from a knowledge base
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `knowledgeBaseId` | string | Yes | ID of the knowledge base containing the document |
| `documentId` | string | Yes | ID of the document to delete |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `documentId` | string | ID of the deleted document |
| `message` | string | Confirmation message |
### `knowledge_list_chunks`
List chunks for a document in a knowledge base with optional filtering and pagination
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `knowledgeBaseId` | string | Yes | ID of the knowledge base |
| `documentId` | string | Yes | ID of the document to list chunks from |
| `search` | string | No | Search query to filter chunks by content |
| `enabled` | string | No | Filter by enabled status: "true", "false", or "all" \(default: "all"\) |
| `limit` | number | No | Maximum number of chunks to return \(1-100, default: 50\) |
| `offset` | number | No | Number of chunks to skip for pagination \(default: 0\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `knowledgeBaseId` | string | ID of the knowledge base |
| `documentId` | string | ID of the document |
| `chunks` | array | Array of chunks in the document |
| ↳ `id` | string | Chunk ID |
| ↳ `chunkIndex` | number | Index of the chunk within the document |
| ↳ `content` | string | Chunk text content |
| ↳ `contentLength` | number | Content length in characters |
| ↳ `tokenCount` | number | Token count for the chunk |
| ↳ `enabled` | boolean | Whether the chunk is enabled |
| ↳ `createdAt` | string | Creation timestamp |
| ↳ `updatedAt` | string | Last update timestamp |
| `totalChunks` | number | Total number of chunks matching the filter |
| `limit` | number | Page size used |
| `offset` | number | Offset used for pagination |
### `knowledge_update_chunk`
Update the content or enabled status of a chunk in a knowledge base
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `knowledgeBaseId` | string | Yes | ID of the knowledge base |
| `documentId` | string | Yes | ID of the document containing the chunk |
| `chunkId` | string | Yes | ID of the chunk to update |
| `content` | string | No | New content for the chunk |
| `enabled` | boolean | No | Whether the chunk should be enabled or disabled |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `documentId` | string | ID of the parent document |
| `id` | string | Chunk ID |
| `chunkIndex` | number | Index of the chunk within the document |
| `content` | string | Updated chunk content |
| `contentLength` | number | Content length in characters |
| `tokenCount` | number | Token count for the chunk |
| `enabled` | boolean | Whether the chunk is enabled |
| `updatedAt` | string | Last update timestamp |
### `knowledge_delete_chunk`
Delete a chunk from a document in a knowledge base
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `knowledgeBaseId` | string | Yes | ID of the knowledge base |
| `documentId` | string | Yes | ID of the document containing the chunk |
| `chunkId` | string | Yes | ID of the chunk to delete |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `chunkId` | string | ID of the deleted chunk |
| `documentId` | string | ID of the parent document |
| `message` | string | Confirmation message |

View File

@@ -15,6 +15,7 @@
"circleback",
"clay",
"clerk",
"cloudflare",
"confluence",
"cursor",
"datadog",
@@ -68,6 +69,7 @@
"mailgun",
"mem0",
"memory",
"microsoft_dataverse",
"microsoft_excel",
"microsoft_planner",
"microsoft_teams",
@@ -122,6 +124,7 @@
"twilio_sms",
"twilio_voice",
"typeform",
"vercel",
"video_generator",
"vision",
"wealthbox",

View File

@@ -0,0 +1,426 @@
---
title: Microsoft Dataverse
description: Manage records in Microsoft Dataverse tables
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="microsoft_dataverse"
color="#E0E0E0"
/>
{/* MANUAL-CONTENT-START:intro */}
[Microsoft Dataverse](https://learn.microsoft.com/en-us/power-apps/maker/data-platform/data-platform-intro) is a powerful cloud data platform for securely storing, managing, and interacting with structured business data. The Microsoft Dataverse integration enables you to programmatically create, read, update, delete, and link records in Dataverse tables as part of your workflow and automation needs.
With Microsoft Dataverse integration, you can:
- **List and query records:** Access lists of records or query with advanced filters to find the data you need from any Dataverse table.
- **Create and update records:** Add new records or update existing ones in any table for use across Power Platform, Dynamics 365, and custom apps.
- **Delete and manage records:** Remove records as part of data lifecycle management directly from your automation flows.
- **Associate and disassociate records:** Link related items together or remove associations using entity relationships and navigation properties—essential for reflecting complex business processes.
- **Work with any Dataverse environment:** Connect to your organizations environments, including production, sandbox, or Dynamics 365 tenants, for maximum flexibility.
- **Integrate with Power Platform and Dynamics 365:** Automate tasks ranging from sales and marketing data updates to custom app workflows—all powered by Dataverse's security and governance.
The Dataverse integration empowers solution builders and business users to automate business processes, maintain accurate and up-to-date information, create system integrations, trigger actions, and drive insights—all with robust security and governance.
Connect Microsoft Dataverse to your automations to unlock sophisticated data management, orchestration, and business logic across your apps, teams, and cloud services.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate Microsoft Dataverse into your workflow. Create, read, update, delete, upsert, associate, query, search, and execute actions and functions against Dataverse tables using the Web API. Supports bulk operations, FetchXML, file uploads, and relevance search. Works with Dynamics 365, Power Platform, and custom Dataverse environments.
## Tools
### `microsoft_dataverse_associate`
Associate two records in Microsoft Dataverse via a navigation property. Creates a relationship between a source record and a target record. Supports both collection-valued (POST) and single-valued (PUT) navigation properties.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Source entity set name \(e.g., accounts\) |
| `recordId` | string | Yes | Source record GUID |
| `navigationProperty` | string | Yes | Navigation property name \(e.g., contact_customer_accounts for collection-valued, or parentcustomerid_account for single-valued\) |
| `targetEntitySetName` | string | Yes | Target entity set name \(e.g., contacts\) |
| `targetRecordId` | string | Yes | Target record GUID to associate |
| `navigationType` | string | No | Type of navigation property: "collection" \(default, uses POST\) or "single" \(uses PUT for lookup fields\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the association was created successfully |
| `entitySetName` | string | Source entity set name used in the association |
| `recordId` | string | Source record GUID that was associated |
| `navigationProperty` | string | Navigation property used for the association |
| `targetEntitySetName` | string | Target entity set name used in the association |
| `targetRecordId` | string | Target record GUID that was associated |
### `microsoft_dataverse_create_multiple`
Create multiple records of the same table type in a single request. Each record in the Targets array must include an @odata.type annotation. Recommended batch size: 100-1000 records for standard tables.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `entityLogicalName` | string | Yes | Table logical name for @odata.type annotation \(e.g., account, contact\). Used to set Microsoft.Dynamics.CRM.\{entityLogicalName\} on each record. |
| `records` | object | Yes | Array of record objects to create. Each record should contain column logical names as keys. The @odata.type annotation is added automatically. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ids` | array | Array of GUIDs for the created records |
| `count` | number | Number of records created |
| `success` | boolean | Whether all records were created successfully |
### `microsoft_dataverse_create_record`
Create a new record in a Microsoft Dataverse table. Requires the entity set name (plural table name) and record data as a JSON object.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `data` | object | Yes | Record data as a JSON object with column names as keys |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `recordId` | string | The ID of the created record |
| `record` | object | Dataverse record object. Contains dynamic columns based on the queried table, plus OData metadata fields. |
| `success` | boolean | Whether the record was created successfully |
### `microsoft_dataverse_delete_record`
Delete a record from a Microsoft Dataverse table by its ID.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `recordId` | string | Yes | The unique identifier \(GUID\) of the record to delete |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `recordId` | string | The ID of the deleted record |
| `success` | boolean | Operation success status |
### `microsoft_dataverse_disassociate`
Remove an association between two records in Microsoft Dataverse. For collection-valued navigation properties, provide the target record ID. For single-valued navigation properties, only the navigation property name is needed.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Source entity set name \(e.g., accounts\) |
| `recordId` | string | Yes | Source record GUID |
| `navigationProperty` | string | Yes | Navigation property name \(e.g., contact_customer_accounts for collection-valued, or parentcustomerid_account for single-valued\) |
| `targetRecordId` | string | No | Target record GUID \(required for collection-valued navigation properties, omit for single-valued\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the disassociation was completed successfully |
| `entitySetName` | string | Source entity set name used in the disassociation |
| `recordId` | string | Source record GUID that was disassociated |
| `navigationProperty` | string | Navigation property used for the disassociation |
| `targetRecordId` | string | Target record GUID that was disassociated |
### `microsoft_dataverse_download_file`
Download a file from a file or image column on a Dataverse record. Returns the file content as a base64-encoded string along with file metadata from response headers.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `recordId` | string | Yes | Record GUID to download the file from |
| `fileColumn` | string | Yes | File or image column logical name \(e.g., entityimage, cr_document\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `fileContent` | string | Base64-encoded file content |
| `fileName` | string | Name of the downloaded file |
| `fileSize` | number | File size in bytes |
| `mimeType` | string | MIME type of the file |
| `success` | boolean | Whether the file was downloaded successfully |
### `microsoft_dataverse_execute_action`
Execute a bound or unbound Dataverse action. Actions perform operations with side effects (e.g., Merge, GrantAccess, SendEmail, QualifyLead). For bound actions, provide the entity set name and record ID.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `actionName` | string | Yes | Action name \(e.g., Merge, GrantAccess, SendEmail\). Do not include the Microsoft.Dynamics.CRM. namespace prefix for unbound actions. |
| `entitySetName` | string | No | Entity set name for bound actions \(e.g., accounts\). Leave empty for unbound actions. |
| `recordId` | string | No | Record GUID for bound actions. Leave empty for unbound or collection-bound actions. |
| `parameters` | object | No | Action parameters as a JSON object. For entity references, include @odata.type annotation \(e.g., \{"Target": \{"@odata.type": "Microsoft.Dynamics.CRM.account", "accountid": "..."\}\}\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `result` | object | Action response data. Structure varies by action. Null for actions that return 204 No Content. |
| `success` | boolean | Whether the action executed successfully |
### `microsoft_dataverse_execute_function`
Execute a bound or unbound Dataverse function. Functions are read-only operations (e.g., RetrievePrincipalAccess, RetrieveTotalRecordCount, InitializeFrom). For bound functions, provide the entity set name and record ID.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `functionName` | string | Yes | Function name \(e.g., RetrievePrincipalAccess, RetrieveTotalRecordCount\). Do not include the Microsoft.Dynamics.CRM. namespace prefix for unbound functions. |
| `entitySetName` | string | No | Entity set name for bound functions \(e.g., systemusers\). Leave empty for unbound functions. |
| `recordId` | string | No | Record GUID for bound functions. Leave empty for unbound functions. |
| `parameters` | string | No | Function parameters as a comma-separated list of name=value pairs for the URL \(e.g., "LocalizedStandardName=\'Pacific Standard Time\ |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `result` | object | Function response data. Structure varies by function. |
| `success` | boolean | Whether the function executed successfully |
### `microsoft_dataverse_fetchxml_query`
Execute a FetchXML query against a Microsoft Dataverse table. FetchXML supports aggregation, grouping, linked-entity joins, and complex filtering beyond OData capabilities.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `fetchXml` | string | Yes | FetchXML query string. Must include &lt;fetch&gt; root element and &lt;entity&gt; child element matching the table logical name. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `records` | array | Array of Dataverse records. Each record has dynamic columns based on the table schema. |
| `count` | number | Number of records returned in the current page |
| `fetchXmlPagingCookie` | string | Paging cookie for retrieving the next page of results |
| `moreRecords` | boolean | Whether more records are available beyond the current page |
| `success` | boolean | Operation success status |
### `microsoft_dataverse_get_record`
Retrieve a single record from a Microsoft Dataverse table by its ID. Supports $select and $expand OData query options.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `recordId` | string | Yes | The unique identifier \(GUID\) of the record to retrieve |
| `select` | string | No | Comma-separated list of columns to return \(OData $select\) |
| `expand` | string | No | Navigation properties to expand \(OData $expand\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `record` | object | Dataverse record object. Contains dynamic columns based on the queried table, plus OData metadata fields. |
| `recordId` | string | The record primary key ID \(auto-detected from response\) |
| `success` | boolean | Whether the record was retrieved successfully |
### `microsoft_dataverse_list_records`
Query and list records from a Microsoft Dataverse table. Supports OData query options for filtering, selecting columns, ordering, and pagination.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `select` | string | No | Comma-separated list of columns to return \(OData $select\) |
| `filter` | string | No | OData $filter expression \(e.g., statecode eq 0\) |
| `orderBy` | string | No | OData $orderby expression \(e.g., name asc, createdon desc\) |
| `top` | number | No | Maximum number of records to return \(OData $top\) |
| `expand` | string | No | Navigation properties to expand \(OData $expand\) |
| `count` | string | No | Set to "true" to include total record count in response \(OData $count\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `records` | array | Array of Dataverse records. Each record has dynamic columns based on the table schema. |
| `count` | number | Number of records returned in the current page |
| `totalCount` | number | Total number of matching records server-side \(requires $count=true\) |
| `nextLink` | string | URL for the next page of results |
| `success` | boolean | Operation success status |
### `microsoft_dataverse_search`
Perform a full-text relevance search across Microsoft Dataverse tables. Requires Dataverse Search to be enabled on the environment. Supports simple and Lucene query syntax.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `searchTerm` | string | Yes | Search text \(1-100 chars\). Supports simple syntax: + \(AND\), \| \(OR\), - \(NOT\), * \(wildcard\), "exact phrase" |
| `entities` | string | No | JSON array of search entity configs. Each object: \{"Name":"account","SelectColumns":\["name"\],"SearchColumns":\["name"\],"Filter":"statecode eq 0"\} |
| `filter` | string | No | Global OData filter applied across all entities \(e.g., "createdon gt 2024-01-01"\) |
| `facets` | string | No | JSON array of facet specifications \(e.g., \["entityname,count:100","ownerid,count:100"\]\) |
| `top` | number | No | Maximum number of results \(default: 50, max: 100\) |
| `skip` | number | No | Number of results to skip for pagination |
| `orderBy` | string | No | JSON array of sort expressions \(e.g., \["createdon desc"\]\) |
| `searchMode` | string | No | Search mode: "any" \(default, match any term\) or "all" \(match all terms\) |
| `searchType` | string | No | Query type: "simple" \(default\) or "lucene" \(enables regex, fuzzy, proximity, boosting\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `results` | array | Array of search result objects |
| ↳ `Id` | string | Record GUID |
| ↳ `EntityName` | string | Table logical name \(e.g., account, contact\) |
| ↳ `ObjectTypeCode` | number | Entity type code |
| ↳ `Attributes` | object | Record attributes matching the search. Keys are column logical names. |
| ↳ `Highlights` | object | Highlighted search matches. Keys are column names, values are arrays of strings with \{crmhit\}/\{/crmhit\} markers. |
| ↳ `Score` | number | Relevance score for this result |
| `totalCount` | number | Total number of matching records across all tables |
| `count` | number | Number of results returned in this page |
| `facets` | object | Facet results when facets were requested. Keys are facet names, values are arrays of facet value objects with count and value properties. |
| `success` | boolean | Operation success status |
### `microsoft_dataverse_update_multiple`
Update multiple records of the same table type in a single request. Each record must include its primary key. Only include columns that need to be changed. Recommended batch size: 100-1000 records.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `entityLogicalName` | string | Yes | Table logical name for @odata.type annotation \(e.g., account, contact\). Used to set Microsoft.Dynamics.CRM.\{entityLogicalName\} on each record. |
| `records` | object | Yes | Array of record objects to update. Each record must include its primary key \(e.g., accountid\) and only the columns being changed. The @odata.type annotation is added automatically. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether all records were updated successfully |
### `microsoft_dataverse_update_record`
Update an existing record in a Microsoft Dataverse table. Only send the columns you want to change.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `recordId` | string | Yes | The unique identifier \(GUID\) of the record to update |
| `data` | object | Yes | Record data to update as a JSON object with column names as keys |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `recordId` | string | The ID of the updated record |
| `success` | boolean | Operation success status |
### `microsoft_dataverse_upload_file`
Upload a file to a file or image column on a Dataverse record. Supports single-request upload for files up to 128 MB. The file content must be provided as a base64-encoded string.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `recordId` | string | Yes | Record GUID to upload the file to |
| `fileColumn` | string | Yes | File or image column logical name \(e.g., entityimage, cr_document\) |
| `fileName` | string | Yes | Name of the file being uploaded \(e.g., document.pdf\) |
| `file` | file | No | File to upload \(UserFile object\) |
| `fileContent` | string | No | Base64-encoded file content \(legacy\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `recordId` | string | Record GUID the file was uploaded to |
| `fileColumn` | string | File column the file was uploaded to |
| `fileName` | string | Name of the uploaded file |
| `success` | boolean | Whether the file was uploaded successfully |
### `microsoft_dataverse_upsert_record`
Create or update a record in a Microsoft Dataverse table. If a record with the given ID exists, it is updated; otherwise, a new record is created.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `recordId` | string | Yes | The unique identifier \(GUID\) of the record to upsert |
| `data` | object | Yes | Record data as a JSON object with column names as keys |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `recordId` | string | The ID of the upserted record |
| `created` | boolean | True if the record was created, false if updated |
| `record` | object | Dataverse record object. Contains dynamic columns based on the queried table, plus OData metadata fields. |
| `success` | boolean | Operation success status |
### `microsoft_dataverse_whoami`
Retrieve the current authenticated user information from Microsoft Dataverse. Useful for testing connectivity and getting the user ID, business unit ID, and organization ID.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `userId` | string | The authenticated user ID |
| `businessUnitId` | string | The business unit ID |
| `organizationId` | string | The organization ID |
| `success` | boolean | Operation success status |

View File

@@ -49,6 +49,7 @@ Retrieve all deals from Pipedrive with optional filters
| `pipeline_id` | string | No | If supplied, only deals in the specified pipeline are returned \(e.g., "1"\) |
| `updated_since` | string | No | If set, only deals updated after this time are returned. Format: 2025-01-01T10:20:00Z |
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
| `cursor` | string | No | For pagination, the marker representing the first item on the next page |
#### Output
@@ -74,6 +75,8 @@ Retrieve all deals from Pipedrive with optional filters
| `metadata` | object | Pagination metadata for the response |
| ↳ `total_items` | number | Total number of items |
| ↳ `has_more` | boolean | Whether more items are available |
| ↳ `next_cursor` | string | Cursor for fetching the next page \(v2 endpoints\) |
| ↳ `next_start` | number | Offset for fetching the next page \(v1 endpoints\) |
| `success` | boolean | Operation success status |
### `pipedrive_get_deal`
@@ -148,10 +151,9 @@ Retrieve files from Pipedrive with optional filters
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `deal_id` | string | No | Filter files by deal ID \(e.g., "123"\) |
| `person_id` | string | No | Filter files by person ID \(e.g., "456"\) |
| `org_id` | string | No | Filter files by organization ID \(e.g., "789"\) |
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
| `sort` | string | No | Sort files by field \(supported: "id", "update_time"\) |
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 100\) |
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
| `downloadFiles` | boolean | No | Download file contents into file outputs |
#### Output
@@ -171,6 +173,8 @@ Retrieve files from Pipedrive with optional filters
| ↳ `url` | string | File download URL |
| `downloadedFiles` | file[] | Downloaded files from Pipedrive |
| `total_items` | number | Total number of files returned |
| `has_more` | boolean | Whether more files are available |
| `next_start` | number | Offset for fetching the next page |
| `success` | boolean | Operation success status |
### `pipedrive_get_mail_messages`
@@ -183,6 +187,7 @@ Retrieve mail threads from Pipedrive mailbox
| --------- | ---- | -------- | ----------- |
| `folder` | string | No | Filter by folder: inbox, drafts, sent, archive \(default: inbox\) |
| `limit` | string | No | Number of results to return \(e.g., "25", default: 50\) |
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
#### Output
@@ -190,6 +195,8 @@ Retrieve mail threads from Pipedrive mailbox
| --------- | ---- | ----------- |
| `messages` | array | Array of mail thread objects from Pipedrive mailbox |
| `total_items` | number | Total number of mail threads returned |
| `has_more` | boolean | Whether more messages are available |
| `next_start` | number | Offset for fetching the next page |
| `success` | boolean | Operation success status |
### `pipedrive_get_mail_thread`
@@ -221,7 +228,7 @@ Retrieve all pipelines from Pipedrive
| `sort_by` | string | No | Field to sort by: id, update_time, add_time \(default: id\) |
| `sort_direction` | string | No | Sorting direction: asc, desc \(default: asc\) |
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
| `cursor` | string | No | For pagination, the marker representing the first item on the next page |
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
#### Output
@@ -237,6 +244,8 @@ Retrieve all pipelines from Pipedrive
| ↳ `add_time` | string | When the pipeline was created |
| ↳ `update_time` | string | When the pipeline was last updated |
| `total_items` | number | Total number of pipelines returned |
| `has_more` | boolean | Whether more pipelines are available |
| `next_start` | number | Offset for fetching the next page |
| `success` | boolean | Operation success status |
### `pipedrive_get_pipeline_deals`
@@ -249,8 +258,8 @@ Retrieve all deals in a specific pipeline
| --------- | ---- | -------- | ----------- |
| `pipeline_id` | string | Yes | The ID of the pipeline \(e.g., "1"\) |
| `stage_id` | string | No | Filter by specific stage within the pipeline \(e.g., "2"\) |
| `status` | string | No | Filter by deal status: open, won, lost |
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
#### Output
@@ -271,6 +280,7 @@ Retrieve all projects or a specific project from Pipedrive
| `project_id` | string | No | Optional: ID of a specific project to retrieve \(e.g., "123"\) |
| `status` | string | No | Filter by project status: open, completed, deleted \(only for listing all\) |
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500, only for listing all\) |
| `cursor` | string | No | For pagination, the marker representing the first item on the next page |
#### Output
@@ -279,6 +289,8 @@ Retrieve all projects or a specific project from Pipedrive
| `projects` | array | Array of project objects \(when listing all\) |
| `project` | object | Single project object \(when project_id is provided\) |
| `total_items` | number | Total number of projects returned |
| `has_more` | boolean | Whether more projects are available |
| `next_cursor` | string | Cursor for fetching the next page |
| `success` | boolean | Operation success status |
### `pipedrive_create_project`
@@ -309,12 +321,11 @@ Retrieve activities (tasks) from Pipedrive with optional filters
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `deal_id` | string | No | Filter activities by deal ID \(e.g., "123"\) |
| `person_id` | string | No | Filter activities by person ID \(e.g., "456"\) |
| `org_id` | string | No | Filter activities by organization ID \(e.g., "789"\) |
| `user_id` | string | No | Filter activities by user ID \(e.g., "123"\) |
| `type` | string | No | Filter by activity type \(call, meeting, task, deadline, email, lunch\) |
| `done` | string | No | Filter by completion status: 0 for not done, 1 for done |
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
#### Output
@@ -335,6 +346,8 @@ Retrieve activities (tasks) from Pipedrive with optional filters
| ↳ `add_time` | string | When the activity was created |
| ↳ `update_time` | string | When the activity was last updated |
| `total_items` | number | Total number of activities returned |
| `has_more` | boolean | Whether more activities are available |
| `next_start` | number | Offset for fetching the next page |
| `success` | boolean | Operation success status |
### `pipedrive_create_activity`
@@ -399,6 +412,7 @@ Retrieve all leads or a specific lead from Pipedrive
| `person_id` | string | No | Filter by person ID \(e.g., "456"\) |
| `organization_id` | string | No | Filter by organization ID \(e.g., "789"\) |
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
#### Output
@@ -433,6 +447,8 @@ Retrieve all leads or a specific lead from Pipedrive
| ↳ `add_time` | string | When the lead was created \(ISO 8601\) |
| ↳ `update_time` | string | When the lead was last updated \(ISO 8601\) |
| `total_items` | number | Total number of leads returned |
| `has_more` | boolean | Whether more leads are available |
| `next_start` | number | Offset for fetching the next page |
| `success` | boolean | Operation success status |
### `pipedrive_create_lead`

View File

@@ -57,6 +57,7 @@ Query data from a Supabase table
| `filter` | string | No | PostgREST filter \(e.g., "id=eq.123"\) |
| `orderBy` | string | No | Column to order by \(add DESC for descending\) |
| `limit` | number | No | Maximum number of rows to return |
| `offset` | number | No | Number of rows to skip \(for pagination\) |
| `apiKey` | string | Yes | Your Supabase service role secret key |
#### Output
@@ -211,6 +212,7 @@ Perform full-text search on a Supabase table
| `searchType` | string | No | Search type: plain, phrase, or websearch \(default: websearch\) |
| `language` | string | No | Language for text search configuration \(default: english\) |
| `limit` | number | No | Maximum number of rows to return |
| `offset` | number | No | Number of rows to skip \(for pagination\) |
| `apiKey` | string | Yes | Your Supabase service role secret key |
#### Output

View File

@@ -43,6 +43,8 @@ Retrieve form responses from Typeform
| `formId` | string | Yes | Typeform form ID \(e.g., "abc123XYZ"\) |
| `apiKey` | string | Yes | Typeform Personal Access Token |
| `pageSize` | number | No | Number of responses to retrieve \(e.g., 10, 25, 50\) |
| `before` | string | No | Cursor token for fetching the next page of older responses |
| `after` | string | No | Cursor token for fetching the next page of newer responses |
| `since` | string | No | Retrieve responses submitted after this date \(e.g., "2024-01-01T00:00:00Z"\) |
| `until` | string | No | Retrieve responses submitted before this date \(e.g., "2024-12-31T23:59:59Z"\) |
| `completed` | string | No | Filter by completion status \(e.g., "true", "false", "all"\) |

File diff suppressed because it is too large Load Diff

View File

@@ -67,10 +67,9 @@ Retrieve a list of tickets from Zendesk with optional filtering
| `type` | string | No | Filter by type: "problem", "incident", "question", or "task" |
| `assigneeId` | string | No | Filter by assignee user ID as a numeric string \(e.g., "12345"\) |
| `organizationId` | string | No | Filter by organization ID as a numeric string \(e.g., "67890"\) |
| `sortBy` | string | No | Sort field: "created_at", "updated_at", "priority", or "status" |
| `sortOrder` | string | No | Sort order: "asc" or "desc" |
| `sort` | string | No | Sort field for ticket listing \(only applies without filters\): "updated_at", "id", or "status". Prefix with "-" for descending \(e.g., "-updated_at"\) |
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
| `pageAfter` | string | No | Cursor from a previous response to fetch the next page of results |
#### Output
@@ -129,10 +128,10 @@ Retrieve a list of tickets from Zendesk with optional filtering
| ↳ `from_messaging_channel` | boolean | Whether the ticket originated from a messaging channel |
| ↳ `ticket_form_id` | number | Ticket form ID |
| ↳ `generated_timestamp` | number | Unix timestamp of the ticket generation |
| `paging` | object | Pagination information |
| `paging` | object | Cursor-based pagination information |
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
| ↳ `has_more` | boolean | Whether more results are available |
| ↳ `next_page` | string | URL for next page of results |
| ↳ `previous_page` | string | URL for previous page of results |
| ↳ `count` | number | Total count of items |
| `metadata` | object | Response metadata |
| ↳ `total_returned` | number | Number of items returned in this response |
| ↳ `has_more` | boolean | Whether more items are available |
@@ -515,7 +514,7 @@ Retrieve a list of users from Zendesk with optional filtering
| `role` | string | No | Filter by role: "end-user", "agent", or "admin" |
| `permissionSet` | string | No | Filter by permission set ID as a numeric string \(e.g., "12345"\) |
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
| `pageAfter` | string | No | Cursor from a previous response to fetch the next page of results |
#### Output
@@ -563,10 +562,10 @@ Retrieve a list of users from Zendesk with optional filtering
| ↳ `shared` | boolean | Whether the user is shared from a different Zendesk |
| ↳ `shared_agent` | boolean | Whether the agent is shared from a different Zendesk |
| ↳ `remote_photo_url` | string | URL to a remote photo |
| `paging` | object | Pagination information |
| `paging` | object | Cursor-based pagination information |
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
| ↳ `has_more` | boolean | Whether more results are available |
| ↳ `next_page` | string | URL for next page of results |
| ↳ `previous_page` | string | URL for previous page of results |
| ↳ `count` | number | Total count of items |
| `metadata` | object | Response metadata |
| ↳ `total_returned` | number | Number of items returned in this response |
| ↳ `has_more` | boolean | Whether more items are available |
@@ -706,7 +705,7 @@ Search for users in Zendesk using a query string
| `query` | string | No | Search query string \(e.g., user name or email\) |
| `externalId` | string | No | External ID to search by \(your system identifier\) |
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
| `page` | string | No | Page number for pagination \(1-based\) |
#### Output
@@ -754,10 +753,10 @@ Search for users in Zendesk using a query string
| ↳ `shared` | boolean | Whether the user is shared from a different Zendesk |
| ↳ `shared_agent` | boolean | Whether the agent is shared from a different Zendesk |
| ↳ `remote_photo_url` | string | URL to a remote photo |
| `paging` | object | Pagination information |
| `paging` | object | Cursor-based pagination information |
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
| ↳ `has_more` | boolean | Whether more results are available |
| ↳ `next_page` | string | URL for next page of results |
| ↳ `previous_page` | string | URL for previous page of results |
| ↳ `count` | number | Total count of items |
| `metadata` | object | Response metadata |
| ↳ `total_returned` | number | Number of items returned in this response |
| ↳ `has_more` | boolean | Whether more items are available |
@@ -999,7 +998,7 @@ Retrieve a list of organizations from Zendesk
| `apiToken` | string | Yes | Zendesk API token |
| `subdomain` | string | Yes | Your Zendesk subdomain \(e.g., "mycompany" for mycompany.zendesk.com\) |
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
| `pageAfter` | string | No | Cursor from a previous response to fetch the next page of results |
#### Output
@@ -1020,10 +1019,10 @@ Retrieve a list of organizations from Zendesk
| ↳ `created_at` | string | When the organization was created \(ISO 8601 format\) |
| ↳ `updated_at` | string | When the organization was last updated \(ISO 8601 format\) |
| ↳ `external_id` | string | External ID for linking to external records |
| `paging` | object | Pagination information |
| `paging` | object | Cursor-based pagination information |
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
| ↳ `has_more` | boolean | Whether more results are available |
| ↳ `next_page` | string | URL for next page of results |
| ↳ `previous_page` | string | URL for previous page of results |
| ↳ `count` | number | Total count of items |
| `metadata` | object | Response metadata |
| ↳ `total_returned` | number | Number of items returned in this response |
| ↳ `has_more` | boolean | Whether more items are available |
@@ -1075,7 +1074,7 @@ Autocomplete organizations in Zendesk by name prefix (for name matching/autocomp
| `subdomain` | string | Yes | Your Zendesk subdomain |
| `name` | string | Yes | Organization name prefix to search for \(e.g., "Acme"\) |
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
| `page` | string | No | Page number for pagination \(1-based\) |
#### Output
@@ -1096,10 +1095,10 @@ Autocomplete organizations in Zendesk by name prefix (for name matching/autocomp
| ↳ `created_at` | string | When the organization was created \(ISO 8601 format\) |
| ↳ `updated_at` | string | When the organization was last updated \(ISO 8601 format\) |
| ↳ `external_id` | string | External ID for linking to external records |
| `paging` | object | Pagination information |
| `paging` | object | Cursor-based pagination information |
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
| ↳ `has_more` | boolean | Whether more results are available |
| ↳ `next_page` | string | URL for next page of results |
| ↳ `previous_page` | string | URL for previous page of results |
| ↳ `count` | number | Total count of items |
| `metadata` | object | Response metadata |
| ↳ `total_returned` | number | Number of items returned in this response |
| ↳ `has_more` | boolean | Whether more items are available |
@@ -1249,19 +1248,18 @@ Unified search across tickets, users, and organizations in Zendesk
| `apiToken` | string | Yes | Zendesk API token |
| `subdomain` | string | Yes | Your Zendesk subdomain |
| `query` | string | Yes | Search query string using Zendesk search syntax \(e.g., "type:ticket status:open"\) |
| `sortBy` | string | No | Sort field: "relevance", "created_at", "updated_at", "priority", "status", or "ticket_type" |
| `sortOrder` | string | No | Sort order: "asc" or "desc" |
| `filterType` | string | Yes | Resource type to search for: "ticket", "user", "organization", or "group" |
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
| `pageAfter` | string | No | Cursor from a previous response to fetch the next page of results |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `paging` | object | Pagination information |
| `paging` | object | Cursor-based pagination information |
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
| ↳ `has_more` | boolean | Whether more results are available |
| ↳ `next_page` | string | URL for next page of results |
| ↳ `previous_page` | string | URL for previous page of results |
| ↳ `count` | number | Total count of items |
| `metadata` | object | Response metadata |
| ↳ `total_returned` | number | Number of items returned in this response |
| ↳ `has_more` | boolean | Whether more items are available |

View File

@@ -21,7 +21,7 @@
"fumadocs-mdx": "14.1.0",
"fumadocs-ui": "16.2.3",
"lucide-react": "^0.511.0",
"next": "16.1.0-canary.21",
"next": "16.1.6",
"next-themes": "^0.4.6",
"postgres": "^3.4.5",
"react": "19.2.1",

View File

@@ -1,5 +1,3 @@
'use server'
import { env } from '@/lib/core/config/env'
import { isProd } from '@/lib/core/config/feature-flags'

View File

@@ -85,7 +85,7 @@ export const LandingNode = React.memo(function LandingNode({ data }: { data: Lan
transform: isAnimated ? 'translateY(0) scale(1)' : 'translateY(8px) scale(0.98)',
transition:
'opacity 0.6s cubic-bezier(0.22, 1, 0.36, 1), transform 0.6s cubic-bezier(0.22, 1, 0.36, 1)',
willChange: 'transform, opacity',
willChange: isAnimated ? 'auto' : 'transform, opacity',
}}
>
<LandingBlock icon={data.icon} color={data.color} name={data.name} tags={data.tags} />

View File

@@ -67,7 +67,6 @@ export const LandingEdge = React.memo(function LandingEdge(props: EdgeProps) {
strokeLinejoin: 'round',
pointerEvents: 'none',
animation: `landing-edge-dash-${id} 1s linear infinite`,
willChange: 'stroke-dashoffset',
...style,
}}
/>

View File

@@ -754,3 +754,100 @@ input[type="search"]::-ms-clear {
text-decoration: none !important;
color: inherit !important;
}
/**
* Respect user's prefers-reduced-motion setting (WCAG 2.3.3)
* Disables animations and transitions for users who prefer reduced motion.
*/
@media (prefers-reduced-motion: reduce) {
*,
*::before,
*::after {
animation-duration: 0.01ms !important;
animation-iteration-count: 1 !important;
transition-duration: 0.01ms !important;
scroll-behavior: auto !important;
}
}
/* WandPromptBar status indicator */
@keyframes smoke-pulse {
0%,
100% {
transform: scale(0.8);
opacity: 0.4;
}
50% {
transform: scale(1.1);
opacity: 0.8;
}
}
.status-indicator {
position: relative;
width: 12px;
height: 12px;
border-radius: 50%;
overflow: hidden;
background-color: hsl(var(--muted-foreground) / 0.5);
transition: background-color 0.3s ease;
}
.status-indicator.streaming {
background-color: transparent;
}
.status-indicator.streaming::before {
content: "";
position: absolute;
inset: 0;
border-radius: 50%;
background: radial-gradient(
circle,
hsl(var(--primary) / 0.9) 0%,
hsl(var(--primary) / 0.4) 60%,
transparent 80%
);
animation: smoke-pulse 1.8s ease-in-out infinite;
opacity: 0.9;
}
.dark .status-indicator.streaming::before {
background: #6b7280;
opacity: 0.9;
animation: smoke-pulse 1.8s ease-in-out infinite;
}
/* MessageContainer loading dot */
@keyframes growShrink {
0%,
100% {
transform: scale(0.9);
}
50% {
transform: scale(1.1);
}
}
.loading-dot {
animation: growShrink 1.5s infinite ease-in-out;
}
/* Subflow node z-index and drag-over styles */
.workflow-container .react-flow__node-subflowNode {
z-index: -1 !important;
}
.workflow-container .react-flow__node-subflowNode:has([data-subflow-selected="true"]) {
z-index: 10 !important;
}
.loop-node-drag-over,
.parallel-node-drag-over {
box-shadow: 0 0 0 1.75px var(--brand-secondary) !important;
border-radius: 8px !important;
}
.react-flow__node[data-parent-node-id] .react-flow__handle {
z-index: 30;
}

View File

@@ -3,7 +3,7 @@
*
* @vitest-environment node
*/
import { createMockLogger, createMockRequest } from '@sim/testing'
import { auditMock, createMockLogger, createMockRequest } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
describe('OAuth Disconnect API Route', () => {
@@ -67,6 +67,8 @@ describe('OAuth Disconnect API Route', () => {
vi.doMock('@/lib/webhooks/utils.server', () => ({
syncAllWebhooksForCredentialSet: mockSyncAllWebhooksForCredentialSet,
}))
vi.doMock('@/lib/audit/log', () => auditMock)
})
afterEach(() => {

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { and, eq, like, or } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { syncAllWebhooksForCredentialSet } from '@/lib/webhooks/utils.server'
@@ -118,6 +119,20 @@ export async function POST(request: NextRequest) {
}
}
recordAudit({
workspaceId: null,
actorId: session.user.id,
action: AuditAction.OAUTH_DISCONNECTED,
resourceType: AuditResourceType.OAUTH,
resourceId: providerId ?? provider,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
resourceName: provider,
description: `Disconnected OAuth provider: ${provider}`,
metadata: { provider, providerId },
request,
})
return NextResponse.json({ success: true }, { status: 200 })
} catch (error) {
logger.error(`[${requestId}] Error disconnecting OAuth provider`, error)

View File

@@ -1,6 +1,7 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { getCreditBalance } from '@/lib/billing/credits/balance'
import { purchaseCredits } from '@/lib/billing/credits/purchase'
@@ -57,6 +58,17 @@ export async function POST(request: NextRequest) {
return NextResponse.json({ error: result.error }, { status: 400 })
}
recordAudit({
actorId: session.user.id,
actorName: session.user.name,
actorEmail: session.user.email,
action: AuditAction.CREDIT_PURCHASED,
resourceType: AuditResourceType.BILLING,
description: `Purchased $${validation.data.amount} in credits`,
metadata: { amount: validation.data.amount, requestId: validation.data.requestId },
request,
})
return NextResponse.json({ success: true })
} catch (error) {
logger.error('Failed to purchase credits', { error, userId: session.user.id })

View File

@@ -3,10 +3,12 @@
*
* @vitest-environment node
*/
import { loggerMock } from '@sim/testing'
import { auditMock, loggerMock } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
vi.mock('@/lib/audit/log', () => auditMock)
vi.mock('@/lib/core/config/feature-flags', () => ({
isDev: true,
isHosted: false,
@@ -216,8 +218,11 @@ describe('Chat Edit API Route', () => {
workflowId: 'workflow-123',
}
mockCheckChatAccess.mockResolvedValue({ hasAccess: true, chat: mockChat })
mockLimit.mockResolvedValueOnce([]) // No identifier conflict
mockCheckChatAccess.mockResolvedValue({
hasAccess: true,
chat: mockChat,
workspaceId: 'workspace-123',
})
const req = new NextRequest('http://localhost:3000/api/chat/manage/chat-123', {
method: 'PATCH',
@@ -311,8 +316,11 @@ describe('Chat Edit API Route', () => {
workflowId: 'workflow-123',
}
mockCheckChatAccess.mockResolvedValue({ hasAccess: true, chat: mockChat })
mockLimit.mockResolvedValueOnce([])
mockCheckChatAccess.mockResolvedValue({
hasAccess: true,
chat: mockChat,
workspaceId: 'workspace-123',
})
const req = new NextRequest('http://localhost:3000/api/chat/manage/chat-123', {
method: 'PATCH',
@@ -371,8 +379,11 @@ describe('Chat Edit API Route', () => {
}),
}))
mockCheckChatAccess.mockResolvedValue({ hasAccess: true })
mockWhere.mockResolvedValue(undefined)
mockCheckChatAccess.mockResolvedValue({
hasAccess: true,
chat: { title: 'Test Chat', workflowId: 'workflow-123' },
workspaceId: 'workspace-123',
})
const req = new NextRequest('http://localhost:3000/api/chat/manage/chat-123', {
method: 'DELETE',
@@ -393,8 +404,11 @@ describe('Chat Edit API Route', () => {
}),
}))
mockCheckChatAccess.mockResolvedValue({ hasAccess: true })
mockWhere.mockResolvedValue(undefined)
mockCheckChatAccess.mockResolvedValue({
hasAccess: true,
chat: { title: 'Test Chat', workflowId: 'workflow-123' },
workspaceId: 'workspace-123',
})
const req = new NextRequest('http://localhost:3000/api/chat/manage/chat-123', {
method: 'DELETE',

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { isDev } from '@/lib/core/config/feature-flags'
import { encryptSecret } from '@/lib/core/security/encryption'
@@ -103,7 +104,11 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
try {
const validatedData = chatUpdateSchema.parse(body)
const { hasAccess, chat: existingChatRecord } = await checkChatAccess(chatId, session.user.id)
const {
hasAccess,
chat: existingChatRecord,
workspaceId: chatWorkspaceId,
} = await checkChatAccess(chatId, session.user.id)
if (!hasAccess || !existingChatRecord) {
return createErrorResponse('Chat not found or access denied', 404)
@@ -217,6 +222,19 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
logger.info(`Chat "${chatId}" updated successfully`)
recordAudit({
workspaceId: chatWorkspaceId || null,
actorId: session.user.id,
actorName: session.user.name,
actorEmail: session.user.email,
action: AuditAction.CHAT_UPDATED,
resourceType: AuditResourceType.CHAT,
resourceId: chatId,
resourceName: title || existingChatRecord.title,
description: `Updated chat deployment "${title || existingChatRecord.title}"`,
request,
})
return createSuccessResponse({
id: chatId,
chatUrl,
@@ -252,7 +270,11 @@ export async function DELETE(
return createErrorResponse('Unauthorized', 401)
}
const { hasAccess } = await checkChatAccess(chatId, session.user.id)
const {
hasAccess,
chat: chatRecord,
workspaceId: chatWorkspaceId,
} = await checkChatAccess(chatId, session.user.id)
if (!hasAccess) {
return createErrorResponse('Chat not found or access denied', 404)
@@ -262,6 +284,19 @@ export async function DELETE(
logger.info(`Chat "${chatId}" deleted successfully`)
recordAudit({
workspaceId: chatWorkspaceId || null,
actorId: session.user.id,
actorName: session.user.name,
actorEmail: session.user.email,
action: AuditAction.CHAT_DELETED,
resourceType: AuditResourceType.CHAT,
resourceId: chatId,
resourceName: chatRecord?.title || chatId,
description: `Deleted chat deployment "${chatRecord?.title || chatId}"`,
request: _request,
})
return createSuccessResponse({
message: 'Chat deployment deleted successfully',
})

View File

@@ -1,9 +1,10 @@
import { NextRequest } from 'next/server'
/**
* Tests for chat API route
*
* @vitest-environment node
*/
import { auditMock } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
describe('Chat API Route', () => {
@@ -30,6 +31,8 @@ describe('Chat API Route', () => {
mockInsert.mockReturnValue({ values: mockValues })
mockValues.mockReturnValue({ returning: mockReturning })
vi.doMock('@/lib/audit/log', () => auditMock)
vi.doMock('@sim/db', () => ({
db: {
select: mockSelect,

View File

@@ -5,6 +5,7 @@ import { eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { v4 as uuidv4 } from 'uuid'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { isDev } from '@/lib/core/config/feature-flags'
import { encryptSecret } from '@/lib/core/security/encryption'
@@ -42,7 +43,7 @@ const chatSchema = z.object({
.default([]),
})
export async function GET(request: NextRequest) {
export async function GET(_request: NextRequest) {
try {
const session = await getSession()
@@ -174,7 +175,7 @@ export async function POST(request: NextRequest) {
userId: session.user.id,
identifier,
title,
description: description || '',
description: description || null,
customizations: mergedCustomizations,
isActive: true,
authType,
@@ -224,6 +225,20 @@ export async function POST(request: NextRequest) {
// Silently fail
}
recordAudit({
workspaceId: workflowRecord.workspaceId || null,
actorId: session.user.id,
actorName: session.user.name,
actorEmail: session.user.email,
action: AuditAction.CHAT_DEPLOYED,
resourceType: AuditResourceType.CHAT,
resourceId: id,
resourceName: title,
description: `Deployed chat "${title}"`,
metadata: { workflowId, identifier, authType },
request,
})
return createSuccessResponse({
id,
chatUrl,

View File

@@ -52,7 +52,7 @@ export async function checkWorkflowAccessForChatCreation(
export async function checkChatAccess(
chatId: string,
userId: string
): Promise<{ hasAccess: boolean; chat?: any }> {
): Promise<{ hasAccess: boolean; chat?: any; workspaceId?: string }> {
const chatData = await db
.select({
chat: chat,
@@ -78,7 +78,9 @@ export async function checkChatAccess(
action: 'admin',
})
return authorization.allowed ? { hasAccess: true, chat: chatRecord } : { hasAccess: false }
return authorization.allowed
? { hasAccess: true, chat: chatRecord, workspaceId: workflowWorkspaceId }
: { hasAccess: false }
}
export async function validateChatAuth(

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getEmailSubject, renderPollingGroupInvitationEmail } from '@/components/emails'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { hasCredentialSetsAccess } from '@/lib/billing'
import { getBaseUrl } from '@/lib/core/utils/urls'
@@ -148,6 +149,19 @@ export async function POST(
userId: session.user.id,
})
recordAudit({
actorId: session.user.id,
actorName: session.user.name,
actorEmail: session.user.email,
action: AuditAction.CREDENTIAL_SET_INVITATION_RESENT,
resourceType: AuditResourceType.CREDENTIAL_SET,
resourceId: id,
resourceName: result.set.name,
description: `Resent credential set invitation to ${invitation.email}`,
metadata: { invitationId, email: invitation.email },
request: req,
})
return NextResponse.json({ success: true })
} catch (error) {
logger.error('Error resending invitation', error)

View File

@@ -5,6 +5,7 @@ import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getEmailSubject, renderPollingGroupInvitationEmail } from '@/components/emails'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { hasCredentialSetsAccess } from '@/lib/billing'
import { getBaseUrl } from '@/lib/core/utils/urls'
@@ -175,6 +176,19 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
emailSent: !!email,
})
recordAudit({
workspaceId: null,
actorId: session.user.id,
action: AuditAction.CREDENTIAL_SET_INVITATION_CREATED,
resourceType: AuditResourceType.CREDENTIAL_SET,
resourceId: id,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
resourceName: result.set.name,
description: `Created invitation for credential set "${result.set.name}"${email ? ` to ${email}` : ''}`,
request: req,
})
return NextResponse.json({
invitation: {
...invitation,
@@ -235,6 +249,19 @@ export async function DELETE(req: NextRequest, { params }: { params: Promise<{ i
)
)
recordAudit({
workspaceId: null,
actorId: session.user.id,
action: AuditAction.CREDENTIAL_SET_INVITATION_REVOKED,
resourceType: AuditResourceType.CREDENTIAL_SET,
resourceId: id,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
resourceName: result.set.name,
description: `Revoked invitation "${invitationId}" for credential set "${result.set.name}"`,
request: req,
})
return NextResponse.json({ success: true })
} catch (error) {
logger.error('Error cancelling invitation', error)

View File

@@ -3,6 +3,7 @@ import { account, credentialSet, credentialSetMember, member, user } from '@sim/
import { createLogger } from '@sim/logger'
import { and, eq, inArray } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { hasCredentialSetsAccess } from '@/lib/billing'
import { syncAllWebhooksForCredentialSet } from '@/lib/webhooks/utils.server'
@@ -13,6 +14,7 @@ async function getCredentialSetWithAccess(credentialSetId: string, userId: strin
const [set] = await db
.select({
id: credentialSet.id,
name: credentialSet.name,
organizationId: credentialSet.organizationId,
providerId: credentialSet.providerId,
})
@@ -177,6 +179,19 @@ export async function DELETE(req: NextRequest, { params }: { params: Promise<{ i
userId: session.user.id,
})
recordAudit({
workspaceId: null,
actorId: session.user.id,
action: AuditAction.CREDENTIAL_SET_MEMBER_REMOVED,
resourceType: AuditResourceType.CREDENTIAL_SET,
resourceId: id,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
resourceName: result.set.name,
description: `Removed member from credential set "${result.set.name}"`,
request: req,
})
return NextResponse.json({ success: true })
} catch (error) {
logger.error('Error removing member from credential set', error)

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { hasCredentialSetsAccess } from '@/lib/billing'
@@ -131,6 +132,19 @@ export async function PUT(req: NextRequest, { params }: { params: Promise<{ id:
const [updated] = await db.select().from(credentialSet).where(eq(credentialSet.id, id)).limit(1)
recordAudit({
workspaceId: null,
actorId: session.user.id,
action: AuditAction.CREDENTIAL_SET_UPDATED,
resourceType: AuditResourceType.CREDENTIAL_SET,
resourceId: id,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
resourceName: updated?.name ?? result.set.name,
description: `Updated credential set "${updated?.name ?? result.set.name}"`,
request: req,
})
return NextResponse.json({ credentialSet: updated })
} catch (error) {
if (error instanceof z.ZodError) {
@@ -175,6 +189,19 @@ export async function DELETE(req: NextRequest, { params }: { params: Promise<{ i
logger.info('Deleted credential set', { credentialSetId: id, userId: session.user.id })
recordAudit({
workspaceId: null,
actorId: session.user.id,
action: AuditAction.CREDENTIAL_SET_DELETED,
resourceType: AuditResourceType.CREDENTIAL_SET,
resourceId: id,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
resourceName: result.set.name,
description: `Deleted credential set "${result.set.name}"`,
request: req,
})
return NextResponse.json({ success: true })
} catch (error) {
logger.error('Error deleting credential set', error)

View File

@@ -8,6 +8,7 @@ import {
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { syncAllWebhooksForCredentialSet } from '@/lib/webhooks/utils.server'
@@ -78,6 +79,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ tok
status: credentialSetInvitation.status,
expiresAt: credentialSetInvitation.expiresAt,
invitedBy: credentialSetInvitation.invitedBy,
credentialSetName: credentialSet.name,
providerId: credentialSet.providerId,
})
.from(credentialSetInvitation)
@@ -125,7 +127,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ tok
const now = new Date()
const requestId = crypto.randomUUID().slice(0, 8)
// Use transaction to ensure membership + invitation update + webhook sync are atomic
await db.transaction(async (tx) => {
await tx.insert(credentialSetMember).values({
id: crypto.randomUUID(),
@@ -147,8 +148,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ tok
})
.where(eq(credentialSetInvitation.id, invitation.id))
// Clean up all other pending invitations for the same credential set and email
// This prevents duplicate invites from showing up after accepting one
if (invitation.email) {
await tx
.update(credentialSetInvitation)
@@ -166,7 +165,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ tok
)
}
// Sync webhooks within the transaction
const syncResult = await syncAllWebhooksForCredentialSet(
invitation.credentialSetId,
requestId,
@@ -184,6 +182,19 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ tok
userId: session.user.id,
})
recordAudit({
actorId: session.user.id,
actorName: session.user.name,
actorEmail: session.user.email,
action: AuditAction.CREDENTIAL_SET_INVITATION_ACCEPTED,
resourceType: AuditResourceType.CREDENTIAL_SET,
resourceId: invitation.credentialSetId,
resourceName: invitation.credentialSetName,
description: `Accepted credential set invitation`,
metadata: { invitationId: invitation.id },
request: req,
})
return NextResponse.json({
success: true,
credentialSetId: invitation.credentialSetId,

View File

@@ -3,6 +3,7 @@ import { credentialSet, credentialSetMember, organization } from '@sim/db/schema
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { syncAllWebhooksForCredentialSet } from '@/lib/webhooks/utils.server'
@@ -106,6 +107,17 @@ export async function DELETE(req: NextRequest) {
userId: session.user.id,
})
recordAudit({
actorId: session.user.id,
actorName: session.user.name,
actorEmail: session.user.email,
action: AuditAction.CREDENTIAL_SET_MEMBER_LEFT,
resourceType: AuditResourceType.CREDENTIAL_SET,
resourceId: credentialSetId,
description: `Left credential set`,
request: req,
})
return NextResponse.json({ success: true })
} catch (error) {
const message = error instanceof Error ? error.message : 'Failed to leave credential set'

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { and, count, desc, eq } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { hasCredentialSetsAccess } from '@/lib/billing'
@@ -165,6 +166,19 @@ export async function POST(req: Request) {
userId: session.user.id,
})
recordAudit({
workspaceId: null,
actorId: session.user.id,
action: AuditAction.CREDENTIAL_SET_CREATED,
resourceType: AuditResourceType.CREDENTIAL_SET,
resourceId: newCredentialSet.id,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
resourceName: name,
description: `Created credential set "${name}"`,
request: req,
})
return NextResponse.json({ credentialSet: newCredentialSet }, { status: 201 })
} catch (error) {
if (error instanceof z.ZodError) {

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption'
import { generateRequestId } from '@/lib/core/utils/request'
@@ -53,6 +54,17 @@ export async function POST(req: NextRequest) {
},
})
recordAudit({
actorId: session.user.id,
actorName: session.user.name,
actorEmail: session.user.email,
action: AuditAction.ENVIRONMENT_UPDATED,
resourceType: AuditResourceType.ENVIRONMENT,
description: 'Updated global environment variables',
metadata: { variableCount: Object.keys(variables).length },
request: req,
})
return NextResponse.json({ success: true })
} catch (validationError) {
if (validationError instanceof z.ZodError) {

View File

@@ -1,9 +1,10 @@
import { db } from '@sim/db'
import { workflow, workflowFolder } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { and, eq, isNull, min } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { duplicateWorkflow } from '@/lib/workflows/persistence/duplicate'
@@ -36,7 +37,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
logger.info(`[${requestId}] Duplicating folder ${sourceFolderId} for user ${session.user.id}`)
// Verify the source folder exists
const sourceFolder = await db
.select()
.from(workflowFolder)
@@ -47,7 +47,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
throw new Error('Source folder not found')
}
// Check if user has permission to access the source folder
const userPermission = await getUserEntityPermissions(
session.user.id,
'workspace',
@@ -60,26 +59,51 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
const targetWorkspaceId = workspaceId || sourceFolder.workspaceId
// Step 1: Duplicate folder structure
const { newFolderId, folderMapping } = await db.transaction(async (tx) => {
const newFolderId = crypto.randomUUID()
const now = new Date()
const targetParentId = parentId ?? sourceFolder.parentId
const folderParentCondition = targetParentId
? eq(workflowFolder.parentId, targetParentId)
: isNull(workflowFolder.parentId)
const workflowParentCondition = targetParentId
? eq(workflow.folderId, targetParentId)
: isNull(workflow.folderId)
const [[folderResult], [workflowResult]] = await Promise.all([
tx
.select({ minSortOrder: min(workflowFolder.sortOrder) })
.from(workflowFolder)
.where(and(eq(workflowFolder.workspaceId, targetWorkspaceId), folderParentCondition)),
tx
.select({ minSortOrder: min(workflow.sortOrder) })
.from(workflow)
.where(and(eq(workflow.workspaceId, targetWorkspaceId), workflowParentCondition)),
])
const minSortOrder = [folderResult?.minSortOrder, workflowResult?.minSortOrder].reduce<
number | null
>((currentMin, candidate) => {
if (candidate == null) return currentMin
if (currentMin == null) return candidate
return Math.min(currentMin, candidate)
}, null)
const sortOrder = minSortOrder != null ? minSortOrder - 1 : 0
// Create the new root folder
await tx.insert(workflowFolder).values({
id: newFolderId,
userId: session.user.id,
workspaceId: targetWorkspaceId,
name,
color: color || sourceFolder.color,
parentId: parentId || sourceFolder.parentId,
sortOrder: sourceFolder.sortOrder,
parentId: targetParentId,
sortOrder,
isExpanded: false,
createdAt: now,
updatedAt: now,
})
// Recursively duplicate child folders
const folderMapping = new Map<string, string>([[sourceFolderId, newFolderId]])
await duplicateFolderStructure(
tx,
@@ -95,7 +119,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
return { newFolderId, folderMapping }
})
// Step 2: Duplicate workflows
const workflowStats = await duplicateWorkflowsInFolderTree(
sourceFolder.workspaceId,
targetWorkspaceId,
@@ -115,6 +138,19 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
}
)
recordAudit({
workspaceId: targetWorkspaceId,
actorId: session.user.id,
action: AuditAction.FOLDER_DUPLICATED,
resourceType: AuditResourceType.FOLDER,
resourceId: newFolderId,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
resourceName: name,
description: `Duplicated folder "${sourceFolder.name}" as "${name}"`,
request: req,
})
return NextResponse.json(
{
id: newFolderId,
@@ -159,7 +195,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
}
}
// Helper to recursively duplicate folder structure
async function duplicateFolderStructure(
tx: any,
sourceFolderId: string,
@@ -170,7 +205,6 @@ async function duplicateFolderStructure(
timestamp: Date,
folderMapping: Map<string, string>
): Promise<void> {
// Get all child folders
const childFolders = await tx
.select()
.from(workflowFolder)
@@ -181,7 +215,6 @@ async function duplicateFolderStructure(
)
)
// Create each child folder and recurse
for (const childFolder of childFolders) {
const newChildFolderId = crypto.randomUUID()
folderMapping.set(childFolder.id, newChildFolderId)
@@ -199,7 +232,6 @@ async function duplicateFolderStructure(
updatedAt: timestamp,
})
// Recurse for this child's children
await duplicateFolderStructure(
tx,
childFolder.id,
@@ -213,7 +245,6 @@ async function duplicateFolderStructure(
}
}
// Helper to duplicate all workflows in a folder tree
async function duplicateWorkflowsInFolderTree(
sourceWorkspaceId: string,
targetWorkspaceId: string,
@@ -223,9 +254,7 @@ async function duplicateWorkflowsInFolderTree(
): Promise<{ total: number; succeeded: number; failed: number }> {
const stats = { total: 0, succeeded: 0, failed: 0 }
// Process each folder in the mapping
for (const [oldFolderId, newFolderId] of folderMapping.entries()) {
// Get workflows in this folder
const workflowsInFolder = await db
.select()
.from(workflow)
@@ -233,7 +262,6 @@ async function duplicateWorkflowsInFolderTree(
stats.total += workflowsInFolder.length
// Duplicate each workflow
for (const sourceWorkflow of workflowsInFolder) {
try {
await duplicateWorkflow({

View File

@@ -4,6 +4,7 @@
* @vitest-environment node
*/
import {
auditMock,
createMockRequest,
type MockUser,
mockAuth,
@@ -12,6 +13,8 @@ import {
} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
vi.mock('@/lib/audit/log', () => auditMock)
/** Type for captured folder values in tests */
interface CapturedFolderValues {
name?: string

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
@@ -167,6 +168,19 @@ export async function DELETE(
deletionStats,
})
recordAudit({
workspaceId: existingFolder.workspaceId,
actorId: session.user.id,
actorName: session.user.name,
actorEmail: session.user.email,
action: AuditAction.FOLDER_DELETED,
resourceType: AuditResourceType.FOLDER,
resourceId: id,
resourceName: existingFolder.name,
description: `Deleted folder "${existingFolder.name}"`,
request,
})
return NextResponse.json({
success: true,
deletedItems: deletionStats,

View File

@@ -3,9 +3,22 @@
*
* @vitest-environment node
*/
import { createMockRequest, mockAuth, mockConsoleLogger, setupCommonApiMocks } from '@sim/testing'
import {
auditMock,
createMockRequest,
mockAuth,
mockConsoleLogger,
setupCommonApiMocks,
} from '@sim/testing'
import { drizzleOrmMock } from '@sim/testing/mocks'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
vi.mock('@/lib/audit/log', () => auditMock)
vi.mock('drizzle-orm', () => ({
...drizzleOrmMock,
min: vi.fn((field) => ({ type: 'min', field })),
}))
interface CapturedFolderValues {
name?: string
color?: string
@@ -16,29 +29,35 @@ interface CapturedFolderValues {
}
function createMockTransaction(mockData: {
selectData?: Array<{ id: string; [key: string]: unknown }>
selectResults?: Array<Array<{ [key: string]: unknown }>>
insertResult?: Array<{ id: string; [key: string]: unknown }>
onInsertValues?: (values: CapturedFolderValues) => void
}) {
const { selectData = [], insertResult = [] } = mockData
return vi.fn().mockImplementation(async (callback: (tx: unknown) => Promise<unknown>) => {
const { selectResults = [[], []], insertResult = [], onInsertValues } = mockData
return async (callback: (tx: unknown) => Promise<unknown>) => {
const where = vi.fn()
for (const result of selectResults) {
where.mockReturnValueOnce(result)
}
where.mockReturnValue([])
const tx = {
select: vi.fn().mockReturnValue({
from: vi.fn().mockReturnValue({
where: vi.fn().mockReturnValue({
orderBy: vi.fn().mockReturnValue({
limit: vi.fn().mockReturnValue(selectData),
}),
}),
where,
}),
}),
insert: vi.fn().mockReturnValue({
values: vi.fn().mockReturnValue({
returning: vi.fn().mockReturnValue(insertResult),
values: vi.fn().mockImplementation((values: CapturedFolderValues) => {
onInsertValues?.(values)
return {
returning: vi.fn().mockReturnValue(insertResult),
}
}),
}),
}
return await callback(tx)
})
}
}
describe('Folders API Route', () => {
@@ -249,25 +268,12 @@ describe('Folders API Route', () => {
it('should create a new folder successfully', async () => {
mockAuthenticatedUser()
mockTransaction.mockImplementationOnce(async (callback: any) => {
const tx = {
select: vi.fn().mockReturnValue({
from: vi.fn().mockReturnValue({
where: vi.fn().mockReturnValue({
orderBy: vi.fn().mockReturnValue({
limit: vi.fn().mockReturnValue([]), // No existing folders
}),
}),
}),
}),
insert: vi.fn().mockReturnValue({
values: vi.fn().mockReturnValue({
returning: vi.fn().mockReturnValue([mockFolders[0]]),
}),
}),
}
return await callback(tx)
})
mockTransaction.mockImplementationOnce(
createMockTransaction({
selectResults: [[], []],
insertResult: [mockFolders[0]],
})
)
const req = createMockRequest('POST', {
name: 'New Test Folder',
@@ -277,12 +283,11 @@ describe('Folders API Route', () => {
const { POST } = await import('@/app/api/folders/route')
const response = await POST(req)
const responseBody = await response.json()
expect(response.status).toBe(200)
const data = await response.json()
expect(data).toHaveProperty('folder')
expect(data.folder).toMatchObject({
expect(responseBody).toHaveProperty('folder')
expect(responseBody.folder).toMatchObject({
id: 'folder-1',
name: 'Test Folder 1',
workspaceId: 'workspace-123',
@@ -291,26 +296,17 @@ describe('Folders API Route', () => {
it('should create folder with correct sort order', async () => {
mockAuthenticatedUser()
let capturedValues: CapturedFolderValues | null = null
mockTransaction.mockImplementationOnce(async (callback: any) => {
const tx = {
select: vi.fn().mockReturnValue({
from: vi.fn().mockReturnValue({
where: vi.fn().mockReturnValue({
orderBy: vi.fn().mockReturnValue({
limit: vi.fn().mockReturnValue([{ sortOrder: 5 }]), // Existing folder with sort order 5
}),
}),
}),
}),
insert: vi.fn().mockReturnValue({
values: vi.fn().mockReturnValue({
returning: vi.fn().mockReturnValue([{ ...mockFolders[0], sortOrder: 6 }]),
}),
}),
}
return await callback(tx)
})
mockTransaction.mockImplementationOnce(
createMockTransaction({
selectResults: [[{ minSortOrder: 5 }], [{ minSortOrder: 2 }]],
insertResult: [{ ...mockFolders[0], sortOrder: 1 }],
onInsertValues: (values) => {
capturedValues = values
},
})
)
const req = createMockRequest('POST', {
name: 'New Test Folder',
@@ -324,8 +320,10 @@ describe('Folders API Route', () => {
const data = await response.json()
expect(data.folder).toMatchObject({
sortOrder: 6,
sortOrder: 1,
})
expect(capturedValues).not.toBeNull()
expect(capturedValues!.sortOrder).toBe(1)
})
it('should create subfolder with parent reference', async () => {
@@ -333,7 +331,7 @@ describe('Folders API Route', () => {
mockTransaction.mockImplementationOnce(
createMockTransaction({
selectData: [], // No existing folders
selectResults: [[], []],
insertResult: [{ ...mockFolders[1] }],
})
)
@@ -394,25 +392,12 @@ describe('Folders API Route', () => {
mockAuthenticatedUser()
mockGetUserEntityPermissions.mockResolvedValue('write') // Write permissions
mockTransaction.mockImplementationOnce(async (callback: any) => {
const tx = {
select: vi.fn().mockReturnValue({
from: vi.fn().mockReturnValue({
where: vi.fn().mockReturnValue({
orderBy: vi.fn().mockReturnValue({
limit: vi.fn().mockReturnValue([]), // No existing folders
}),
}),
}),
}),
insert: vi.fn().mockReturnValue({
values: vi.fn().mockReturnValue({
returning: vi.fn().mockReturnValue([mockFolders[0]]),
}),
}),
}
return await callback(tx)
})
mockTransaction.mockImplementationOnce(
createMockTransaction({
selectResults: [[], []],
insertResult: [mockFolders[0]],
})
)
const req = createMockRequest('POST', {
name: 'Test Folder',
@@ -432,25 +417,12 @@ describe('Folders API Route', () => {
mockAuthenticatedUser()
mockGetUserEntityPermissions.mockResolvedValue('admin') // Admin permissions
mockTransaction.mockImplementationOnce(async (callback: any) => {
const tx = {
select: vi.fn().mockReturnValue({
from: vi.fn().mockReturnValue({
where: vi.fn().mockReturnValue({
orderBy: vi.fn().mockReturnValue({
limit: vi.fn().mockReturnValue([]), // No existing folders
}),
}),
}),
}),
insert: vi.fn().mockReturnValue({
values: vi.fn().mockReturnValue({
returning: vi.fn().mockReturnValue([mockFolders[0]]),
}),
}),
}
return await callback(tx)
})
mockTransaction.mockImplementationOnce(
createMockTransaction({
selectResults: [[], []],
insertResult: [mockFolders[0]],
})
)
const req = createMockRequest('POST', {
name: 'Test Folder',
@@ -519,28 +491,15 @@ describe('Folders API Route', () => {
let capturedValues: CapturedFolderValues | null = null
mockTransaction.mockImplementationOnce(async (callback: any) => {
const tx = {
select: vi.fn().mockReturnValue({
from: vi.fn().mockReturnValue({
where: vi.fn().mockReturnValue({
orderBy: vi.fn().mockReturnValue({
limit: vi.fn().mockReturnValue([]),
}),
}),
}),
}),
insert: vi.fn().mockReturnValue({
values: vi.fn().mockImplementation((values) => {
capturedValues = values
return {
returning: vi.fn().mockReturnValue([mockFolders[0]]),
}
}),
}),
}
return await callback(tx)
})
mockTransaction.mockImplementationOnce(
createMockTransaction({
selectResults: [[], []],
insertResult: [mockFolders[0]],
onInsertValues: (values) => {
capturedValues = values
},
})
)
const req = createMockRequest('POST', {
name: ' Test Folder With Spaces ',
@@ -559,28 +518,15 @@ describe('Folders API Route', () => {
let capturedValues: CapturedFolderValues | null = null
mockTransaction.mockImplementationOnce(async (callback: any) => {
const tx = {
select: vi.fn().mockReturnValue({
from: vi.fn().mockReturnValue({
where: vi.fn().mockReturnValue({
orderBy: vi.fn().mockReturnValue({
limit: vi.fn().mockReturnValue([]),
}),
}),
}),
}),
insert: vi.fn().mockReturnValue({
values: vi.fn().mockImplementation((values) => {
capturedValues = values
return {
returning: vi.fn().mockReturnValue([mockFolders[0]]),
}
}),
}),
}
return await callback(tx)
})
mockTransaction.mockImplementationOnce(
createMockTransaction({
selectResults: [[], []],
insertResult: [mockFolders[0]],
onInsertValues: (values) => {
capturedValues = values
},
})
)
const req = createMockRequest('POST', {
name: 'Test Folder',

View File

@@ -1,8 +1,9 @@
import { db } from '@sim/db'
import { workflowFolder } from '@sim/db/schema'
import { workflow, workflowFolder } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, asc, desc, eq, isNull } from 'drizzle-orm'
import { and, asc, eq, isNull, min } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
@@ -86,19 +87,33 @@ export async function POST(request: NextRequest) {
if (providedSortOrder !== undefined) {
sortOrder = providedSortOrder
} else {
const existingFolders = await tx
.select({ sortOrder: workflowFolder.sortOrder })
.from(workflowFolder)
.where(
and(
eq(workflowFolder.workspaceId, workspaceId),
parentId ? eq(workflowFolder.parentId, parentId) : isNull(workflowFolder.parentId)
)
)
.orderBy(desc(workflowFolder.sortOrder))
.limit(1)
const folderParentCondition = parentId
? eq(workflowFolder.parentId, parentId)
: isNull(workflowFolder.parentId)
const workflowParentCondition = parentId
? eq(workflow.folderId, parentId)
: isNull(workflow.folderId)
sortOrder = existingFolders.length > 0 ? existingFolders[0].sortOrder + 1 : 0
const [[folderResult], [workflowResult]] = await Promise.all([
tx
.select({ minSortOrder: min(workflowFolder.sortOrder) })
.from(workflowFolder)
.where(and(eq(workflowFolder.workspaceId, workspaceId), folderParentCondition)),
tx
.select({ minSortOrder: min(workflow.sortOrder) })
.from(workflow)
.where(and(eq(workflow.workspaceId, workspaceId), workflowParentCondition)),
])
const minSortOrder = [folderResult?.minSortOrder, workflowResult?.minSortOrder].reduce<
number | null
>((currentMin, candidate) => {
if (candidate == null) return currentMin
if (currentMin == null) return candidate
return Math.min(currentMin, candidate)
}, null)
sortOrder = minSortOrder != null ? minSortOrder - 1 : 0
}
const [folder] = await tx
@@ -119,6 +134,20 @@ export async function POST(request: NextRequest) {
logger.info('Created new folder:', { id, name, workspaceId, parentId })
recordAudit({
workspaceId,
actorId: session.user.id,
actorName: session.user.name,
actorEmail: session.user.email,
action: AuditAction.FOLDER_CREATED,
resourceType: AuditResourceType.FOLDER,
resourceId: id,
resourceName: name.trim(),
description: `Created folder "${name.trim()}"`,
metadata: { name: name.trim() },
request,
})
return NextResponse.json({ folder: newFolder })
} catch (error) {
logger.error('Error creating folder:', { error })

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { encryptSecret } from '@/lib/core/security/encryption'
import { checkFormAccess, DEFAULT_FORM_CUSTOMIZATIONS } from '@/app/api/form/utils'
@@ -102,7 +103,11 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
const { id } = await params
const { hasAccess, form: formRecord } = await checkFormAccess(id, session.user.id)
const {
hasAccess,
form: formRecord,
workspaceId: formWorkspaceId,
} = await checkFormAccess(id, session.user.id)
if (!hasAccess || !formRecord) {
return createErrorResponse('Form not found or access denied', 404)
@@ -184,6 +189,19 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
logger.info(`Form ${id} updated successfully`)
recordAudit({
workspaceId: formWorkspaceId ?? null,
actorId: session.user.id,
action: AuditAction.FORM_UPDATED,
resourceType: AuditResourceType.FORM,
resourceId: id,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
resourceName: formRecord.title ?? undefined,
description: `Updated form "${formRecord.title}"`,
request,
})
return createSuccessResponse({
message: 'Form updated successfully',
})
@@ -213,7 +231,11 @@ export async function DELETE(
const { id } = await params
const { hasAccess, form: formRecord } = await checkFormAccess(id, session.user.id)
const {
hasAccess,
form: formRecord,
workspaceId: formWorkspaceId,
} = await checkFormAccess(id, session.user.id)
if (!hasAccess || !formRecord) {
return createErrorResponse('Form not found or access denied', 404)
@@ -223,6 +245,19 @@ export async function DELETE(
logger.info(`Form ${id} deleted (soft delete)`)
recordAudit({
workspaceId: formWorkspaceId ?? null,
actorId: session.user.id,
action: AuditAction.FORM_DELETED,
resourceType: AuditResourceType.FORM,
resourceId: id,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
resourceName: formRecord.title ?? undefined,
description: `Deleted form "${formRecord.title}"`,
request,
})
return createSuccessResponse({
message: 'Form deleted successfully',
})

View File

@@ -5,6 +5,7 @@ import { eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { v4 as uuidv4 } from 'uuid'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { isDev } from '@/lib/core/config/feature-flags'
import { encryptSecret } from '@/lib/core/security/encryption'
@@ -178,7 +179,7 @@ export async function POST(request: NextRequest) {
userId: session.user.id,
identifier,
title,
description: description || '',
description: description || null,
customizations: mergedCustomizations,
isActive: true,
authType,
@@ -195,6 +196,19 @@ export async function POST(request: NextRequest) {
logger.info(`Form "${title}" deployed successfully at ${formUrl}`)
recordAudit({
workspaceId: workflowRecord.workspaceId ?? null,
actorId: session.user.id,
action: AuditAction.FORM_CREATED,
resourceType: AuditResourceType.FORM,
resourceId: id,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
resourceName: title,
description: `Created form "${title}" for workflow ${workflowId}`,
request,
})
return createSuccessResponse({
id,
formUrl,

View File

@@ -52,7 +52,7 @@ export async function checkWorkflowAccessForFormCreation(
export async function checkFormAccess(
formId: string,
userId: string
): Promise<{ hasAccess: boolean; form?: any }> {
): Promise<{ hasAccess: boolean; form?: any; workspaceId?: string }> {
const formData = await db
.select({ form: form, workflowWorkspaceId: workflow.workspaceId })
.from(form)
@@ -75,7 +75,9 @@ export async function checkFormAccess(
action: 'admin',
})
return authorization.allowed ? { hasAccess: true, form: formRecord } : { hasAccess: false }
return authorization.allowed
? { hasAccess: true, form: formRecord, workspaceId: workflowWorkspaceId }
: { hasAccess: false }
}
export async function validateFormAuth(

View File

@@ -1,300 +0,0 @@
/**
* @vitest-environment node
*/
import { createMockRequest, mockConsoleLogger, mockDrizzleOrm } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
vi.mock('@sim/db/schema', () => ({
document: {
id: 'id',
connectorId: 'connectorId',
deletedAt: 'deletedAt',
filename: 'filename',
externalId: 'externalId',
sourceUrl: 'sourceUrl',
enabled: 'enabled',
userExcluded: 'userExcluded',
uploadedAt: 'uploadedAt',
processingStatus: 'processingStatus',
},
knowledgeConnector: {
id: 'id',
knowledgeBaseId: 'knowledgeBaseId',
deletedAt: 'deletedAt',
},
}))
vi.mock('@/app/api/knowledge/utils', () => ({
checkKnowledgeBaseAccess: vi.fn(),
checkKnowledgeBaseWriteAccess: vi.fn(),
}))
vi.mock('@/lib/auth/hybrid', () => ({
checkSessionOrInternalAuth: vi.fn(),
}))
vi.mock('@/lib/core/utils/request', () => ({
generateRequestId: vi.fn().mockReturnValue('test-req-id'),
}))
mockDrizzleOrm()
mockConsoleLogger()
describe('Connector Documents API Route', () => {
/**
* The route chains db calls in sequence. We track call order
* to return different values for connector lookup vs document queries.
*/
let limitCallCount: number
let orderByCallCount: number
const mockDbChain = {
select: vi.fn().mockReturnThis(),
from: vi.fn().mockReturnThis(),
where: vi.fn().mockReturnThis(),
orderBy: vi.fn(() => {
orderByCallCount++
return Promise.resolve([])
}),
limit: vi.fn(() => {
limitCallCount++
return Promise.resolve([])
}),
update: vi.fn().mockReturnThis(),
set: vi.fn().mockReturnThis(),
returning: vi.fn().mockResolvedValue([]),
}
const mockParams = Promise.resolve({ id: 'kb-123', connectorId: 'conn-456' })
beforeEach(() => {
vi.clearAllMocks()
limitCallCount = 0
orderByCallCount = 0
mockDbChain.select.mockReturnThis()
mockDbChain.from.mockReturnThis()
mockDbChain.where.mockReturnThis()
mockDbChain.orderBy.mockImplementation(() => {
orderByCallCount++
return Promise.resolve([])
})
mockDbChain.limit.mockImplementation(() => {
limitCallCount++
return Promise.resolve([])
})
mockDbChain.update.mockReturnThis()
mockDbChain.set.mockReturnThis()
mockDbChain.returning.mockResolvedValue([])
vi.doMock('@sim/db', () => ({ db: mockDbChain }))
})
afterEach(() => {
vi.clearAllMocks()
})
describe('GET', () => {
it('returns 401 when unauthenticated', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
success: false,
userId: null,
} as never)
const req = createMockRequest('GET')
const { GET } = await import(
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
)
const response = await GET(req as never, { params: mockParams })
expect(response.status).toBe(401)
})
it('returns 404 when connector not found', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
success: true,
userId: 'user-1',
} as never)
vi.mocked(checkKnowledgeBaseAccess).mockResolvedValue({ hasAccess: true } as never)
mockDbChain.limit.mockResolvedValueOnce([])
const req = createMockRequest('GET')
const { GET } = await import(
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
)
const response = await GET(req as never, { params: mockParams })
expect(response.status).toBe(404)
})
it('returns documents list on success', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
success: true,
userId: 'user-1',
} as never)
vi.mocked(checkKnowledgeBaseAccess).mockResolvedValue({ hasAccess: true } as never)
const doc = { id: 'doc-1', filename: 'test.txt', userExcluded: false }
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }])
mockDbChain.orderBy.mockResolvedValueOnce([doc])
const url = 'http://localhost/api/knowledge/kb-123/connectors/conn-456/documents'
const req = createMockRequest('GET', undefined, undefined, url)
Object.assign(req, { nextUrl: new URL(url) })
const { GET } = await import(
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
)
const response = await GET(req as never, { params: mockParams })
const data = await response.json()
expect(response.status).toBe(200)
expect(data.data.documents).toHaveLength(1)
expect(data.data.counts.active).toBe(1)
expect(data.data.counts.excluded).toBe(0)
})
it('includes excluded documents when includeExcluded=true', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
success: true,
userId: 'user-1',
} as never)
vi.mocked(checkKnowledgeBaseAccess).mockResolvedValue({ hasAccess: true } as never)
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }])
mockDbChain.orderBy
.mockResolvedValueOnce([{ id: 'doc-1', userExcluded: false }])
.mockResolvedValueOnce([{ id: 'doc-2', userExcluded: true }])
const url =
'http://localhost/api/knowledge/kb-123/connectors/conn-456/documents?includeExcluded=true'
const req = createMockRequest('GET', undefined, undefined, url)
Object.assign(req, { nextUrl: new URL(url) })
const { GET } = await import(
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
)
const response = await GET(req as never, { params: mockParams })
const data = await response.json()
expect(response.status).toBe(200)
expect(data.data.documents).toHaveLength(2)
expect(data.data.counts.active).toBe(1)
expect(data.data.counts.excluded).toBe(1)
})
})
describe('PATCH', () => {
it('returns 401 when unauthenticated', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
success: false,
userId: null,
} as never)
const req = createMockRequest('PATCH', { operation: 'restore', documentIds: ['doc-1'] })
const { PATCH } = await import(
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
)
const response = await PATCH(req as never, { params: mockParams })
expect(response.status).toBe(401)
})
it('returns 400 for invalid body', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
success: true,
userId: 'user-1',
} as never)
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }])
const req = createMockRequest('PATCH', { documentIds: [] })
const { PATCH } = await import(
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
)
const response = await PATCH(req as never, { params: mockParams })
expect(response.status).toBe(400)
})
it('returns 404 when connector not found', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
success: true,
userId: 'user-1',
} as never)
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
mockDbChain.limit.mockResolvedValueOnce([])
const req = createMockRequest('PATCH', { operation: 'restore', documentIds: ['doc-1'] })
const { PATCH } = await import(
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
)
const response = await PATCH(req as never, { params: mockParams })
expect(response.status).toBe(404)
})
it('returns success for restore operation', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
success: true,
userId: 'user-1',
} as never)
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }])
mockDbChain.returning.mockResolvedValueOnce([{ id: 'doc-1' }])
const req = createMockRequest('PATCH', { operation: 'restore', documentIds: ['doc-1'] })
const { PATCH } = await import(
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
)
const response = await PATCH(req as never, { params: mockParams })
const data = await response.json()
expect(response.status).toBe(200)
expect(data.data.restoredCount).toBe(1)
})
it('returns success for exclude operation', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
success: true,
userId: 'user-1',
} as never)
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }])
mockDbChain.returning.mockResolvedValueOnce([{ id: 'doc-2' }, { id: 'doc-3' }])
const req = createMockRequest('PATCH', {
operation: 'exclude',
documentIds: ['doc-2', 'doc-3'],
})
const { PATCH } = await import(
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
)
const response = await PATCH(req as never, { params: mockParams })
const data = await response.json()
expect(response.status).toBe(200)
expect(data.data.excludedCount).toBe(2)
expect(data.data.documentIds).toEqual(['doc-2', 'doc-3'])
})
})
})

View File

@@ -1,210 +0,0 @@
import { db } from '@sim/db'
import { document, knowledgeConnector } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, inArray, isNull } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
const logger = createLogger('ConnectorDocumentsAPI')
type RouteParams = { params: Promise<{ id: string; connectorId: string }> }
/**
* GET /api/knowledge/[id]/connectors/[connectorId]/documents
* Returns documents for a connector, optionally including user-excluded ones.
*/
export async function GET(request: NextRequest, { params }: RouteParams) {
const requestId = generateRequestId()
const { id: knowledgeBaseId, connectorId } = await params
try {
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId)
if (!accessCheck.hasAccess) {
const status = 'notFound' in accessCheck && accessCheck.notFound ? 404 : 401
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
}
const connectorRows = await db
.select({ id: knowledgeConnector.id })
.from(knowledgeConnector)
.where(
and(
eq(knowledgeConnector.id, connectorId),
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
isNull(knowledgeConnector.deletedAt)
)
)
.limit(1)
if (connectorRows.length === 0) {
return NextResponse.json({ error: 'Connector not found' }, { status: 404 })
}
const includeExcluded = request.nextUrl.searchParams.get('includeExcluded') === 'true'
const activeDocs = await db
.select({
id: document.id,
filename: document.filename,
externalId: document.externalId,
sourceUrl: document.sourceUrl,
enabled: document.enabled,
userExcluded: document.userExcluded,
uploadedAt: document.uploadedAt,
processingStatus: document.processingStatus,
})
.from(document)
.where(
and(
eq(document.connectorId, connectorId),
isNull(document.deletedAt),
eq(document.userExcluded, false)
)
)
.orderBy(document.filename)
const excludedDocs = includeExcluded
? await db
.select({
id: document.id,
filename: document.filename,
externalId: document.externalId,
sourceUrl: document.sourceUrl,
enabled: document.enabled,
userExcluded: document.userExcluded,
uploadedAt: document.uploadedAt,
processingStatus: document.processingStatus,
})
.from(document)
.where(
and(
eq(document.connectorId, connectorId),
eq(document.userExcluded, true),
isNull(document.deletedAt)
)
)
.orderBy(document.filename)
: []
const docs = [...activeDocs, ...excludedDocs]
const activeCount = activeDocs.length
const excludedCount = excludedDocs.length
return NextResponse.json({
success: true,
data: {
documents: docs,
counts: { active: activeCount, excluded: excludedCount },
},
})
} catch (error) {
logger.error(`[${requestId}] Error fetching connector documents`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
const PatchSchema = z.object({
operation: z.enum(['restore', 'exclude']),
documentIds: z.array(z.string()).min(1),
})
/**
* PATCH /api/knowledge/[id]/connectors/[connectorId]/documents
* Restore or exclude connector documents.
*/
export async function PATCH(request: NextRequest, { params }: RouteParams) {
const requestId = generateRequestId()
const { id: knowledgeBaseId, connectorId } = await params
try {
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId)
if (!writeCheck.hasAccess) {
const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
}
const connectorRows = await db
.select({ id: knowledgeConnector.id })
.from(knowledgeConnector)
.where(
and(
eq(knowledgeConnector.id, connectorId),
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
isNull(knowledgeConnector.deletedAt)
)
)
.limit(1)
if (connectorRows.length === 0) {
return NextResponse.json({ error: 'Connector not found' }, { status: 404 })
}
const body = await request.json()
const parsed = PatchSchema.safeParse(body)
if (!parsed.success) {
return NextResponse.json(
{ error: 'Invalid request', details: parsed.error.flatten() },
{ status: 400 }
)
}
const { operation, documentIds } = parsed.data
if (operation === 'restore') {
const updated = await db
.update(document)
.set({ userExcluded: false, deletedAt: null, enabled: true })
.where(
and(
eq(document.connectorId, connectorId),
inArray(document.id, documentIds),
eq(document.userExcluded, true)
)
)
.returning({ id: document.id })
logger.info(`[${requestId}] Restored ${updated.length} excluded documents`, { connectorId })
return NextResponse.json({
success: true,
data: { restoredCount: updated.length, documentIds: updated.map((d) => d.id) },
})
}
const updated = await db
.update(document)
.set({ userExcluded: true })
.where(
and(
eq(document.connectorId, connectorId),
inArray(document.id, documentIds),
eq(document.userExcluded, false),
isNull(document.deletedAt)
)
)
.returning({ id: document.id })
logger.info(`[${requestId}] Excluded ${updated.length} documents`, { connectorId })
return NextResponse.json({
success: true,
data: { excludedCount: updated.length, documentIds: updated.map((d) => d.id) },
})
} catch (error) {
logger.error(`[${requestId}] Error updating connector documents`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -1,231 +0,0 @@
/**
* @vitest-environment node
*/
import { createMockRequest, mockConsoleLogger, mockDrizzleOrm } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
vi.mock('@/app/api/knowledge/utils', () => ({
checkKnowledgeBaseAccess: vi.fn(),
checkKnowledgeBaseWriteAccess: vi.fn(),
}))
vi.mock('@/lib/auth/hybrid', () => ({
checkSessionOrInternalAuth: vi.fn(),
}))
vi.mock('@/lib/core/utils/request', () => ({
generateRequestId: vi.fn().mockReturnValue('test-req-id'),
}))
vi.mock('@/app/api/auth/oauth/utils', () => ({
refreshAccessTokenIfNeeded: vi.fn(),
}))
vi.mock('@/connectors/registry', () => ({
CONNECTOR_REGISTRY: {
jira: { validateConfig: vi.fn() },
},
}))
vi.mock('@sim/db/schema', () => ({
knowledgeBase: { id: 'id', userId: 'userId' },
knowledgeConnector: {
id: 'id',
knowledgeBaseId: 'knowledgeBaseId',
deletedAt: 'deletedAt',
connectorType: 'connectorType',
credentialId: 'credentialId',
},
knowledgeConnectorSyncLog: { connectorId: 'connectorId', startedAt: 'startedAt' },
}))
mockDrizzleOrm()
mockConsoleLogger()
describe('Knowledge Connector By ID API Route', () => {
const mockDbChain = {
select: vi.fn().mockReturnThis(),
from: vi.fn().mockReturnThis(),
where: vi.fn().mockReturnThis(),
orderBy: vi.fn().mockReturnThis(),
limit: vi.fn().mockResolvedValue([]),
insert: vi.fn().mockReturnThis(),
values: vi.fn().mockResolvedValue(undefined),
update: vi.fn().mockReturnThis(),
set: vi.fn().mockReturnThis(),
returning: vi.fn().mockResolvedValue([]),
}
const mockParams = Promise.resolve({ id: 'kb-123', connectorId: 'conn-456' })
beforeEach(() => {
vi.clearAllMocks()
vi.resetModules()
mockDbChain.select.mockReturnThis()
mockDbChain.from.mockReturnThis()
mockDbChain.where.mockReturnThis()
mockDbChain.orderBy.mockReturnThis()
mockDbChain.limit.mockResolvedValue([])
mockDbChain.update.mockReturnThis()
mockDbChain.set.mockReturnThis()
vi.doMock('@sim/db', () => ({ db: mockDbChain }))
})
afterEach(() => {
vi.clearAllMocks()
})
describe('GET', () => {
it('returns 401 when unauthenticated', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: false, userId: null })
const req = createMockRequest('GET')
const { GET } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
const response = await GET(req, { params: mockParams })
expect(response.status).toBe(401)
})
it('returns 404 when KB not found', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
vi.mocked(checkKnowledgeBaseAccess).mockResolvedValue({ hasAccess: false, notFound: true })
const req = createMockRequest('GET')
const { GET } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
const response = await GET(req, { params: mockParams })
expect(response.status).toBe(404)
})
it('returns 404 when connector not found', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
vi.mocked(checkKnowledgeBaseAccess).mockResolvedValue({ hasAccess: true })
mockDbChain.limit.mockResolvedValueOnce([])
const req = createMockRequest('GET')
const { GET } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
const response = await GET(req, { params: mockParams })
expect(response.status).toBe(404)
})
it('returns connector with sync logs on success', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
vi.mocked(checkKnowledgeBaseAccess).mockResolvedValue({ hasAccess: true })
const mockConnector = { id: 'conn-456', connectorType: 'jira', status: 'active' }
const mockLogs = [{ id: 'log-1', status: 'completed' }]
mockDbChain.limit.mockResolvedValueOnce([mockConnector]).mockResolvedValueOnce(mockLogs)
const req = createMockRequest('GET')
const { GET } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
const response = await GET(req, { params: mockParams })
const data = await response.json()
expect(response.status).toBe(200)
expect(data.success).toBe(true)
expect(data.data.id).toBe('conn-456')
expect(data.data.syncLogs).toHaveLength(1)
})
})
describe('PATCH', () => {
it('returns 401 when unauthenticated', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: false, userId: null })
const req = createMockRequest('PATCH', { status: 'paused' })
const { PATCH } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
const response = await PATCH(req, { params: mockParams })
expect(response.status).toBe(401)
})
it('returns 400 for invalid body', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true })
const req = createMockRequest('PATCH', { syncIntervalMinutes: 'not a number' })
const { PATCH } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
const response = await PATCH(req, { params: mockParams })
const data = await response.json()
expect(response.status).toBe(400)
expect(data.error).toBe('Invalid request')
})
it('returns 404 when connector not found during sourceConfig validation', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true })
mockDbChain.limit.mockResolvedValueOnce([])
const req = createMockRequest('PATCH', { sourceConfig: { project: 'NEW' } })
const { PATCH } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
const response = await PATCH(req, { params: mockParams })
expect(response.status).toBe(404)
})
it('returns 200 and updates status', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true })
const updatedConnector = { id: 'conn-456', status: 'paused', syncIntervalMinutes: 120 }
mockDbChain.limit.mockResolvedValueOnce([updatedConnector])
const req = createMockRequest('PATCH', { status: 'paused', syncIntervalMinutes: 120 })
const { PATCH } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
const response = await PATCH(req, { params: mockParams })
const data = await response.json()
expect(response.status).toBe(200)
expect(data.success).toBe(true)
expect(data.data.status).toBe('paused')
})
})
describe('DELETE', () => {
it('returns 401 when unauthenticated', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: false, userId: null })
const req = createMockRequest('DELETE')
const { DELETE } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
const response = await DELETE(req, { params: mockParams })
expect(response.status).toBe(401)
})
it('returns 200 on successful soft-delete', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true })
const req = createMockRequest('DELETE')
const { DELETE } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
const response = await DELETE(req, { params: mockParams })
const data = await response.json()
expect(response.status).toBe(200)
expect(data.success).toBe(true)
})
})
})

View File

@@ -1,248 +0,0 @@
import { db } from '@sim/db'
import { knowledgeBase, knowledgeConnector, knowledgeConnectorSyncLog } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, desc, eq, isNull } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
const logger = createLogger('KnowledgeConnectorByIdAPI')
type RouteParams = { params: Promise<{ id: string; connectorId: string }> }
const UpdateConnectorSchema = z.object({
sourceConfig: z.record(z.unknown()).optional(),
syncIntervalMinutes: z.number().int().min(0).optional(),
status: z.enum(['active', 'paused']).optional(),
})
/**
* GET /api/knowledge/[id]/connectors/[connectorId] - Get connector details with recent sync logs
*/
export async function GET(request: NextRequest, { params }: RouteParams) {
const requestId = generateRequestId()
const { id: knowledgeBaseId, connectorId } = await params
try {
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId)
if (!accessCheck.hasAccess) {
const status = 'notFound' in accessCheck && accessCheck.notFound ? 404 : 401
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
}
const connectorRows = await db
.select()
.from(knowledgeConnector)
.where(
and(
eq(knowledgeConnector.id, connectorId),
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
isNull(knowledgeConnector.deletedAt)
)
)
.limit(1)
if (connectorRows.length === 0) {
return NextResponse.json({ error: 'Connector not found' }, { status: 404 })
}
const syncLogs = await db
.select()
.from(knowledgeConnectorSyncLog)
.where(eq(knowledgeConnectorSyncLog.connectorId, connectorId))
.orderBy(desc(knowledgeConnectorSyncLog.startedAt))
.limit(10)
return NextResponse.json({
success: true,
data: {
...connectorRows[0],
syncLogs,
},
})
} catch (error) {
logger.error(`[${requestId}] Error fetching connector`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
/**
* PATCH /api/knowledge/[id]/connectors/[connectorId] - Update a connector
*/
export async function PATCH(request: NextRequest, { params }: RouteParams) {
const requestId = generateRequestId()
const { id: knowledgeBaseId, connectorId } = await params
try {
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId)
if (!writeCheck.hasAccess) {
const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
}
const body = await request.json()
const parsed = UpdateConnectorSchema.safeParse(body)
if (!parsed.success) {
return NextResponse.json(
{ error: 'Invalid request', details: parsed.error.flatten() },
{ status: 400 }
)
}
if (parsed.data.sourceConfig !== undefined) {
const existingRows = await db
.select()
.from(knowledgeConnector)
.where(
and(
eq(knowledgeConnector.id, connectorId),
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
isNull(knowledgeConnector.deletedAt)
)
)
.limit(1)
if (existingRows.length === 0) {
return NextResponse.json({ error: 'Connector not found' }, { status: 404 })
}
const existing = existingRows[0]
const connectorConfig = CONNECTOR_REGISTRY[existing.connectorType]
if (!connectorConfig) {
return NextResponse.json(
{ error: `Unknown connector type: ${existing.connectorType}` },
{ status: 400 }
)
}
const kbRows = await db
.select({ userId: knowledgeBase.userId })
.from(knowledgeBase)
.where(eq(knowledgeBase.id, knowledgeBaseId))
.limit(1)
if (kbRows.length === 0) {
return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 })
}
const accessToken = await refreshAccessTokenIfNeeded(
existing.credentialId,
kbRows[0].userId,
`patch-${connectorId}`
)
if (!accessToken) {
return NextResponse.json(
{ error: 'Failed to refresh access token. Please reconnect your account.' },
{ status: 401 }
)
}
const validation = await connectorConfig.validateConfig(accessToken, parsed.data.sourceConfig)
if (!validation.valid) {
return NextResponse.json(
{ error: validation.error || 'Invalid source configuration' },
{ status: 400 }
)
}
}
const updates: Record<string, unknown> = { updatedAt: new Date() }
if (parsed.data.sourceConfig !== undefined) {
updates.sourceConfig = parsed.data.sourceConfig
}
if (parsed.data.syncIntervalMinutes !== undefined) {
updates.syncIntervalMinutes = parsed.data.syncIntervalMinutes
if (parsed.data.syncIntervalMinutes > 0) {
updates.nextSyncAt = new Date(Date.now() + parsed.data.syncIntervalMinutes * 60 * 1000)
} else {
updates.nextSyncAt = null
}
}
if (parsed.data.status !== undefined) {
updates.status = parsed.data.status
}
await db
.update(knowledgeConnector)
.set(updates)
.where(
and(
eq(knowledgeConnector.id, connectorId),
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
isNull(knowledgeConnector.deletedAt)
)
)
const updated = await db
.select()
.from(knowledgeConnector)
.where(
and(
eq(knowledgeConnector.id, connectorId),
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
isNull(knowledgeConnector.deletedAt)
)
)
.limit(1)
return NextResponse.json({ success: true, data: updated[0] })
} catch (error) {
logger.error(`[${requestId}] Error updating connector`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
/**
* DELETE /api/knowledge/[id]/connectors/[connectorId] - Soft-delete a connector
*/
export async function DELETE(request: NextRequest, { params }: RouteParams) {
const requestId = generateRequestId()
const { id: knowledgeBaseId, connectorId } = await params
try {
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId)
if (!writeCheck.hasAccess) {
const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
}
await db
.update(knowledgeConnector)
.set({ deletedAt: new Date(), status: 'paused', updatedAt: new Date() })
.where(
and(
eq(knowledgeConnector.id, connectorId),
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
isNull(knowledgeConnector.deletedAt)
)
)
logger.info(`[${requestId}] Soft-deleted connector ${connectorId}`)
return NextResponse.json({ success: true })
} catch (error) {
logger.error(`[${requestId}] Error deleting connector`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -1,133 +0,0 @@
/**
* @vitest-environment node
*/
import { createMockRequest, mockConsoleLogger, mockDrizzleOrm } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
vi.mock('@sim/db/schema', () => ({
knowledgeConnector: {
id: 'id',
knowledgeBaseId: 'knowledgeBaseId',
deletedAt: 'deletedAt',
status: 'status',
},
}))
vi.mock('@/app/api/knowledge/utils', () => ({
checkKnowledgeBaseWriteAccess: vi.fn(),
}))
vi.mock('@/lib/auth/hybrid', () => ({
checkSessionOrInternalAuth: vi.fn(),
}))
vi.mock('@/lib/core/utils/request', () => ({
generateRequestId: vi.fn().mockReturnValue('test-req-id'),
}))
vi.mock('@/lib/knowledge/connectors/sync-engine', () => ({
dispatchSync: vi.fn().mockResolvedValue(undefined),
}))
mockDrizzleOrm()
mockConsoleLogger()
describe('Connector Manual Sync API Route', () => {
const mockDbChain = {
select: vi.fn().mockReturnThis(),
from: vi.fn().mockReturnThis(),
where: vi.fn().mockReturnThis(),
orderBy: vi.fn().mockResolvedValue([]),
limit: vi.fn().mockResolvedValue([]),
update: vi.fn().mockReturnThis(),
set: vi.fn().mockReturnThis(),
}
const mockParams = Promise.resolve({ id: 'kb-123', connectorId: 'conn-456' })
beforeEach(() => {
vi.clearAllMocks()
mockDbChain.select.mockReturnThis()
mockDbChain.from.mockReturnThis()
mockDbChain.where.mockReturnThis()
mockDbChain.orderBy.mockResolvedValue([])
mockDbChain.limit.mockResolvedValue([])
mockDbChain.update.mockReturnThis()
mockDbChain.set.mockReturnThis()
vi.doMock('@sim/db', () => ({ db: mockDbChain }))
})
afterEach(() => {
vi.clearAllMocks()
})
it('returns 401 when unauthenticated', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
success: false,
userId: null,
} as never)
const req = createMockRequest('POST')
const { POST } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/sync/route')
const response = await POST(req as never, { params: mockParams })
expect(response.status).toBe(401)
})
it('returns 404 when connector not found', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
success: true,
userId: 'user-1',
} as never)
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
mockDbChain.limit.mockResolvedValueOnce([])
const req = createMockRequest('POST')
const { POST } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/sync/route')
const response = await POST(req as never, { params: mockParams })
expect(response.status).toBe(404)
})
it('returns 409 when connector is syncing', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
success: true,
userId: 'user-1',
} as never)
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456', status: 'syncing' }])
const req = createMockRequest('POST')
const { POST } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/sync/route')
const response = await POST(req as never, { params: mockParams })
expect(response.status).toBe(409)
})
it('dispatches sync on valid request', async () => {
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
const { dispatchSync } = await import('@/lib/knowledge/connectors/sync-engine')
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
success: true,
userId: 'user-1',
} as never)
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456', status: 'active' }])
const req = createMockRequest('POST')
const { POST } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/sync/route')
const response = await POST(req as never, { params: mockParams })
const data = await response.json()
expect(response.status).toBe(200)
expect(data.success).toBe(true)
expect(vi.mocked(dispatchSync)).toHaveBeenCalledWith('conn-456', { requestId: 'test-req-id' })
})
})

View File

@@ -1,71 +0,0 @@
import { db } from '@sim/db'
import { knowledgeConnector } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, isNull } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine'
import { checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
const logger = createLogger('ConnectorManualSyncAPI')
type RouteParams = { params: Promise<{ id: string; connectorId: string }> }
/**
* POST /api/knowledge/[id]/connectors/[connectorId]/sync - Trigger a manual sync
*/
export async function POST(request: NextRequest, { params }: RouteParams) {
const requestId = generateRequestId()
const { id: knowledgeBaseId, connectorId } = await params
try {
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId)
if (!writeCheck.hasAccess) {
const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
}
const connectorRows = await db
.select()
.from(knowledgeConnector)
.where(
and(
eq(knowledgeConnector.id, connectorId),
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
isNull(knowledgeConnector.deletedAt)
)
)
.limit(1)
if (connectorRows.length === 0) {
return NextResponse.json({ error: 'Connector not found' }, { status: 404 })
}
if (connectorRows[0].status === 'syncing') {
return NextResponse.json({ error: 'Sync already in progress' }, { status: 409 })
}
logger.info(`[${requestId}] Manual sync triggered for connector ${connectorId}`)
dispatchSync(connectorId, { requestId }).catch((error) => {
logger.error(
`[${requestId}] Failed to dispatch manual sync for connector ${connectorId}`,
error
)
})
return NextResponse.json({
success: true,
message: 'Sync triggered',
})
} catch (error) {
logger.error(`[${requestId}] Error triggering manual sync`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -1,204 +0,0 @@
import { db } from '@sim/db'
import { knowledgeBaseTagDefinitions, knowledgeConnector } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, desc, eq, isNull } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine'
import { allocateTagSlots } from '@/lib/knowledge/constants'
import { createTagDefinition } from '@/lib/knowledge/tags/service'
import { getCredential } from '@/app/api/auth/oauth/utils'
import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
const logger = createLogger('KnowledgeConnectorsAPI')
const CreateConnectorSchema = z.object({
connectorType: z.string().min(1),
credentialId: z.string().min(1),
sourceConfig: z.record(z.unknown()),
syncIntervalMinutes: z.number().int().min(0).default(1440),
})
/**
* GET /api/knowledge/[id]/connectors - List connectors for a knowledge base
*/
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId()
const { id: knowledgeBaseId } = await params
try {
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId)
if (!accessCheck.hasAccess) {
const status = 'notFound' in accessCheck && accessCheck.notFound ? 404 : 401
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
}
const connectors = await db
.select()
.from(knowledgeConnector)
.where(
and(
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
isNull(knowledgeConnector.deletedAt)
)
)
.orderBy(desc(knowledgeConnector.createdAt))
return NextResponse.json({ success: true, data: connectors })
} catch (error) {
logger.error(`[${requestId}] Error listing connectors`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
/**
* POST /api/knowledge/[id]/connectors - Create a new connector
*/
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId()
const { id: knowledgeBaseId } = await params
try {
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId)
if (!writeCheck.hasAccess) {
const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
}
const body = await request.json()
const parsed = CreateConnectorSchema.safeParse(body)
if (!parsed.success) {
return NextResponse.json(
{ error: 'Invalid request', details: parsed.error.flatten() },
{ status: 400 }
)
}
const { connectorType, credentialId, sourceConfig, syncIntervalMinutes } = parsed.data
const connectorConfig = CONNECTOR_REGISTRY[connectorType]
if (!connectorConfig) {
return NextResponse.json(
{ error: `Unknown connector type: ${connectorType}` },
{ status: 400 }
)
}
const credential = await getCredential(requestId, credentialId, auth.userId)
if (!credential) {
return NextResponse.json({ error: 'Credential not found' }, { status: 400 })
}
if (!credential.accessToken) {
return NextResponse.json(
{ error: 'Credential has no access token. Please reconnect your account.' },
{ status: 400 }
)
}
const validation = await connectorConfig.validateConfig(credential.accessToken, sourceConfig)
if (!validation.valid) {
return NextResponse.json(
{ error: validation.error || 'Invalid source configuration' },
{ status: 400 }
)
}
let finalSourceConfig: Record<string, unknown> = sourceConfig
const tagSlotMapping: Record<string, string> = {}
if (connectorConfig.tagDefinitions?.length) {
const disabledIds = new Set((sourceConfig.disabledTagIds as string[] | undefined) ?? [])
const enabledDefs = connectorConfig.tagDefinitions.filter((td) => !disabledIds.has(td.id))
const existingDefs = await db
.select({ tagSlot: knowledgeBaseTagDefinitions.tagSlot })
.from(knowledgeBaseTagDefinitions)
.where(eq(knowledgeBaseTagDefinitions.knowledgeBaseId, knowledgeBaseId))
const usedSlots = new Set<string>(existingDefs.map((d) => d.tagSlot))
const { mapping, skipped: skippedTags } = allocateTagSlots(enabledDefs, usedSlots)
Object.assign(tagSlotMapping, mapping)
for (const name of skippedTags) {
logger.warn(`[${requestId}] No available slots for "${name}"`)
}
if (skippedTags.length > 0 && Object.keys(tagSlotMapping).length === 0) {
return NextResponse.json(
{ error: `No available tag slots. Could not assign: ${skippedTags.join(', ')}` },
{ status: 422 }
)
}
finalSourceConfig = { ...sourceConfig, tagSlotMapping }
}
const now = new Date()
const connectorId = crypto.randomUUID()
const nextSyncAt =
syncIntervalMinutes > 0 ? new Date(now.getTime() + syncIntervalMinutes * 60 * 1000) : null
await db.transaction(async (tx) => {
for (const [semanticId, slot] of Object.entries(tagSlotMapping)) {
const td = connectorConfig.tagDefinitions!.find((d) => d.id === semanticId)!
await createTagDefinition(
{
knowledgeBaseId,
tagSlot: slot,
displayName: td.displayName,
fieldType: td.fieldType,
},
requestId,
tx
)
}
await tx.insert(knowledgeConnector).values({
id: connectorId,
knowledgeBaseId,
connectorType,
credentialId,
sourceConfig: finalSourceConfig,
syncIntervalMinutes,
status: 'active',
nextSyncAt,
createdAt: now,
updatedAt: now,
})
})
logger.info(`[${requestId}] Created connector ${connectorId} for KB ${knowledgeBaseId}`)
dispatchSync(connectorId, { requestId }).catch((error) => {
logger.error(
`[${requestId}] Failed to dispatch initial sync for connector ${connectorId}`,
error
)
})
const created = await db
.select()
.from(knowledgeConnector)
.where(eq(knowledgeConnector.id, connectorId))
.limit(1)
return NextResponse.json({ success: true, data: created[0] }, { status: 201 })
} catch (error) {
logger.error(`[${requestId}] Error creating connector`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -4,6 +4,7 @@
* @vitest-environment node
*/
import {
auditMock,
createMockRequest,
mockAuth,
mockConsoleLogger,
@@ -35,6 +36,8 @@ vi.mock('@/lib/knowledge/documents/service', () => ({
mockDrizzleOrm()
mockConsoleLogger()
vi.mock('@/lib/audit/log', () => auditMock)
describe('Document By ID API Route', () => {
const mockAuth$ = mockAuth()

View File

@@ -1,6 +1,7 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import {
@@ -197,6 +198,19 @@ export async function PUT(
`[${requestId}] Document updated: ${documentId} in knowledge base ${knowledgeBaseId}`
)
recordAudit({
workspaceId: accessCheck.knowledgeBase?.workspaceId ?? null,
actorId: userId,
actorName: auth.userName,
actorEmail: auth.userEmail,
action: AuditAction.DOCUMENT_UPDATED,
resourceType: AuditResourceType.DOCUMENT,
resourceId: documentId,
resourceName: validatedData.filename ?? accessCheck.document?.filename,
description: `Updated document "${documentId}" in knowledge base "${knowledgeBaseId}"`,
request: req,
})
return NextResponse.json({
success: true,
data: updatedDocument,
@@ -257,6 +271,19 @@ export async function DELETE(
`[${requestId}] Document deleted: ${documentId} from knowledge base ${knowledgeBaseId}`
)
recordAudit({
workspaceId: accessCheck.knowledgeBase?.workspaceId ?? null,
actorId: userId,
actorName: auth.userName,
actorEmail: auth.userEmail,
action: AuditAction.DOCUMENT_DELETED,
resourceType: AuditResourceType.DOCUMENT,
resourceId: documentId,
resourceName: accessCheck.document?.filename,
description: `Deleted document "${documentId}" from knowledge base "${knowledgeBaseId}"`,
request: req,
})
return NextResponse.json({
success: true,
data: result,

View File

@@ -4,6 +4,7 @@
* @vitest-environment node
*/
import {
auditMock,
createMockRequest,
mockAuth,
mockConsoleLogger,
@@ -40,6 +41,8 @@ vi.mock('@/lib/knowledge/documents/service', () => ({
mockDrizzleOrm()
mockConsoleLogger()
vi.mock('@/lib/audit/log', () => auditMock)
describe('Knowledge Base Documents API Route', () => {
const mockAuth$ = mockAuth()

View File

@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import {
@@ -12,7 +13,6 @@ import {
getDocuments,
getProcessingConfig,
processDocumentsWithQueue,
type TagFilterCondition,
} from '@/lib/knowledge/documents/service'
import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
@@ -131,21 +131,6 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
? (sortOrderParam as SortOrder)
: undefined
let tagFilters: TagFilterCondition[] | undefined
const tagFiltersParam = url.searchParams.get('tagFilters')
if (tagFiltersParam) {
try {
const parsed = JSON.parse(tagFiltersParam)
if (Array.isArray(parsed)) {
tagFilters = parsed.filter(
(f: TagFilterCondition) => f.tagSlot && f.operator && f.value !== undefined
)
}
} catch {
logger.warn(`[${requestId}] Invalid tagFilters param`)
}
}
const result = await getDocuments(
knowledgeBaseId,
{
@@ -155,7 +140,6 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
offset,
...(sortBy && { sortBy }),
...(sortOrder && { sortOrder }),
tagFilters,
},
requestId
)
@@ -261,6 +245,19 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
logger.error(`[${requestId}] Critical error in document processing pipeline:`, error)
})
recordAudit({
workspaceId: accessCheck.knowledgeBase?.workspaceId ?? null,
actorId: userId,
actorName: auth.userName,
actorEmail: auth.userEmail,
action: AuditAction.DOCUMENT_UPLOADED,
resourceType: AuditResourceType.DOCUMENT,
resourceId: knowledgeBaseId,
resourceName: `${createdDocuments.length} document(s)`,
description: `Uploaded ${createdDocuments.length} document(s) to knowledge base "${knowledgeBaseId}"`,
request: req,
})
return NextResponse.json({
success: true,
data: {
@@ -309,6 +306,19 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
// Silently fail
}
recordAudit({
workspaceId: accessCheck.knowledgeBase?.workspaceId ?? null,
actorId: userId,
actorName: auth.userName,
actorEmail: auth.userEmail,
action: AuditAction.DOCUMENT_UPLOADED,
resourceType: AuditResourceType.DOCUMENT,
resourceId: knowledgeBaseId,
resourceName: validatedData.filename,
description: `Uploaded document "${validatedData.filename}" to knowledge base "${knowledgeBaseId}"`,
request: req,
})
return NextResponse.json({
success: true,
data: newDocument,

View File

@@ -4,6 +4,7 @@
* @vitest-environment node
*/
import {
auditMock,
createMockRequest,
mockAuth,
mockConsoleLogger,
@@ -16,6 +17,8 @@ mockKnowledgeSchemas()
mockDrizzleOrm()
mockConsoleLogger()
vi.mock('@/lib/audit/log', () => auditMock)
vi.mock('@/lib/knowledge/service', () => ({
getKnowledgeBaseById: vi.fn(),
updateKnowledgeBase: vi.fn(),

View File

@@ -1,6 +1,7 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { PlatformEvents } from '@/lib/core/telemetry'
import { generateRequestId } from '@/lib/core/utils/request'
@@ -135,6 +136,19 @@ export async function PUT(req: NextRequest, { params }: { params: Promise<{ id:
logger.info(`[${requestId}] Knowledge base updated: ${id} for user ${userId}`)
recordAudit({
workspaceId: accessCheck.knowledgeBase.workspaceId ?? null,
actorId: userId,
actorName: auth.userName,
actorEmail: auth.userEmail,
action: AuditAction.KNOWLEDGE_BASE_UPDATED,
resourceType: AuditResourceType.KNOWLEDGE_BASE,
resourceId: id,
resourceName: validatedData.name ?? updatedKnowledgeBase.name,
description: `Updated knowledge base "${validatedData.name ?? updatedKnowledgeBase.name}"`,
request: req,
})
return NextResponse.json({
success: true,
data: updatedKnowledgeBase,
@@ -197,6 +211,19 @@ export async function DELETE(
logger.info(`[${requestId}] Knowledge base deleted: ${id} for user ${userId}`)
recordAudit({
workspaceId: accessCheck.knowledgeBase.workspaceId ?? null,
actorId: userId,
actorName: auth.userName,
actorEmail: auth.userEmail,
action: AuditAction.KNOWLEDGE_BASE_DELETED,
resourceType: AuditResourceType.KNOWLEDGE_BASE,
resourceId: id,
resourceName: accessCheck.knowledgeBase.name,
description: `Deleted knowledge base "${accessCheck.knowledgeBase.name || id}"`,
request: _request,
})
return NextResponse.json({
success: true,
data: { message: 'Knowledge base deleted successfully' },

View File

@@ -1,68 +0,0 @@
import { db } from '@sim/db'
import { knowledgeConnector } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, isNull, lte } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { verifyCronAuth } from '@/lib/auth/internal'
import { generateRequestId } from '@/lib/core/utils/request'
import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine'
export const dynamic = 'force-dynamic'
const logger = createLogger('ConnectorSyncSchedulerAPI')
/**
* Cron endpoint that checks for connectors due for sync and dispatches sync jobs.
* Should be called every 5 minutes by an external cron service.
*/
export async function GET(request: NextRequest) {
const requestId = generateRequestId()
logger.info(`[${requestId}] Connector sync scheduler triggered`)
const authError = verifyCronAuth(request, 'Connector sync scheduler')
if (authError) {
return authError
}
try {
const now = new Date()
const dueConnectors = await db
.select({
id: knowledgeConnector.id,
})
.from(knowledgeConnector)
.where(
and(
eq(knowledgeConnector.status, 'active'),
lte(knowledgeConnector.nextSyncAt, now),
isNull(knowledgeConnector.deletedAt)
)
)
logger.info(`[${requestId}] Found ${dueConnectors.length} connectors due for sync`)
if (dueConnectors.length === 0) {
return NextResponse.json({
success: true,
message: 'No connectors due for sync',
count: 0,
})
}
for (const connector of dueConnectors) {
dispatchSync(connector.id, { requestId }).catch((error) => {
logger.error(`[${requestId}] Failed to dispatch sync for connector ${connector.id}`, error)
})
}
return NextResponse.json({
success: true,
message: `Dispatched ${dueConnectors.length} connector sync(s)`,
count: dueConnectors.length,
})
} catch (error) {
logger.error(`[${requestId}] Connector sync scheduler error`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -4,6 +4,7 @@
* @vitest-environment node
*/
import {
auditMock,
createMockRequest,
mockAuth,
mockConsoleLogger,
@@ -16,6 +17,8 @@ mockKnowledgeSchemas()
mockDrizzleOrm()
mockConsoleLogger()
vi.mock('@/lib/audit/log', () => auditMock)
vi.mock('@/lib/workspaces/permissions/utils', () => ({
getUserEntityPermissions: vi.fn().mockResolvedValue('admin'),
}))

View File

@@ -1,6 +1,7 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { PlatformEvents } from '@/lib/core/telemetry'
import { generateRequestId } from '@/lib/core/utils/request'
@@ -109,6 +110,20 @@ export async function POST(req: NextRequest) {
`[${requestId}] Knowledge base created: ${newKnowledgeBase.id} for user ${session.user.id}`
)
recordAudit({
workspaceId: validatedData.workspaceId,
actorId: session.user.id,
actorName: session.user.name,
actorEmail: session.user.email,
action: AuditAction.KNOWLEDGE_BASE_CREATED,
resourceType: AuditResourceType.KNOWLEDGE_BASE,
resourceId: newKnowledgeBase.id,
resourceName: validatedData.name,
description: `Created knowledge base "${validatedData.name}"`,
metadata: { name: validatedData.name },
request: req,
})
return NextResponse.json({
success: true,
data: newKnowledgeBase,

View File

@@ -99,7 +99,7 @@ export interface EmbeddingData {
export interface KnowledgeBaseAccessResult {
hasAccess: true
knowledgeBase: Pick<KnowledgeBaseData, 'id' | 'userId' | 'workspaceId'>
knowledgeBase: Pick<KnowledgeBaseData, 'id' | 'userId' | 'workspaceId' | 'name'>
}
export interface KnowledgeBaseAccessDenied {
@@ -113,7 +113,7 @@ export type KnowledgeBaseAccessCheck = KnowledgeBaseAccessResult | KnowledgeBase
export interface DocumentAccessResult {
hasAccess: true
document: DocumentData
knowledgeBase: Pick<KnowledgeBaseData, 'id' | 'userId' | 'workspaceId'>
knowledgeBase: Pick<KnowledgeBaseData, 'id' | 'userId' | 'workspaceId' | 'name'>
}
export interface DocumentAccessDenied {
@@ -128,7 +128,7 @@ export interface ChunkAccessResult {
hasAccess: true
chunk: EmbeddingData
document: DocumentData
knowledgeBase: Pick<KnowledgeBaseData, 'id' | 'userId' | 'workspaceId'>
knowledgeBase: Pick<KnowledgeBaseData, 'id' | 'userId' | 'workspaceId' | 'name'>
}
export interface ChunkAccessDenied {
@@ -151,6 +151,7 @@ export async function checkKnowledgeBaseAccess(
id: knowledgeBase.id,
userId: knowledgeBase.userId,
workspaceId: knowledgeBase.workspaceId,
name: knowledgeBase.name,
})
.from(knowledgeBase)
.where(and(eq(knowledgeBase.id, knowledgeBaseId), isNull(knowledgeBase.deletedAt)))
@@ -193,6 +194,7 @@ export async function checkKnowledgeBaseWriteAccess(
id: knowledgeBase.id,
userId: knowledgeBase.userId,
workspaceId: knowledgeBase.workspaceId,
name: knowledgeBase.name,
})
.from(knowledgeBase)
.where(and(eq(knowledgeBase.id, knowledgeBaseId), isNull(knowledgeBase.deletedAt)))

View File

@@ -3,6 +3,8 @@ import { mcpServers } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, isNull } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { McpDomainNotAllowedError, validateMcpDomain } from '@/lib/mcp/domain-check'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { mcpService } from '@/lib/mcp/service'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
@@ -15,7 +17,11 @@ export const dynamic = 'force-dynamic'
* PATCH - Update an MCP server in the workspace (requires write or admin permission)
*/
export const PATCH = withMcpAuth<{ id: string }>('write')(
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
async (
request: NextRequest,
{ userId, userName, userEmail, workspaceId, requestId },
{ params }
) => {
const { id: serverId } = await params
try {
@@ -29,6 +35,17 @@ export const PATCH = withMcpAuth<{ id: string }>('write')(
// Remove workspaceId from body to prevent it from being updated
const { workspaceId: _, ...updateData } = body
if (updateData.url) {
try {
validateMcpDomain(updateData.url)
} catch (e) {
if (e instanceof McpDomainNotAllowedError) {
return createMcpErrorResponse(e, e.message, 403)
}
throw e
}
}
// Get the current server to check if URL is changing
const [currentServer] = await db
.select({ url: mcpServers.url })
@@ -73,6 +90,20 @@ export const PATCH = withMcpAuth<{ id: string }>('write')(
}
logger.info(`[${requestId}] Successfully updated MCP server: ${serverId}`)
recordAudit({
workspaceId,
actorId: userId,
actorName: userName,
actorEmail: userEmail,
action: AuditAction.MCP_SERVER_UPDATED,
resourceType: AuditResourceType.MCP_SERVER,
resourceId: serverId,
resourceName: updatedServer.name || serverId,
description: `Updated MCP server "${updatedServer.name || serverId}"`,
request,
})
return createMcpSuccessResponse({ server: updatedServer })
} catch (error) {
logger.error(`[${requestId}] Error updating MCP server:`, error)

View File

@@ -3,6 +3,8 @@ import { mcpServers } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, isNull } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { McpDomainNotAllowedError, validateMcpDomain } from '@/lib/mcp/domain-check'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { mcpService } from '@/lib/mcp/service'
import {
@@ -54,7 +56,7 @@ export const GET = withMcpAuth('read')(
* it will be updated instead of creating a duplicate.
*/
export const POST = withMcpAuth('write')(
async (request: NextRequest, { userId, workspaceId, requestId }) => {
async (request: NextRequest, { userId, userName, userEmail, workspaceId, requestId }) => {
try {
const body = getParsedBody(request) || (await request.json())
@@ -72,6 +74,15 @@ export const POST = withMcpAuth('write')(
)
}
try {
validateMcpDomain(body.url)
} catch (e) {
if (e instanceof McpDomainNotAllowedError) {
return createMcpErrorResponse(e, e.message, 403)
}
throw e
}
const serverId = body.url ? generateMcpServerId(workspaceId, body.url) : crypto.randomUUID()
const [existingServer] = await db
@@ -151,6 +162,20 @@ export const POST = withMcpAuth('write')(
// Silently fail
}
recordAudit({
workspaceId,
actorId: userId,
actorName: userName,
actorEmail: userEmail,
action: AuditAction.MCP_SERVER_ADDED,
resourceType: AuditResourceType.MCP_SERVER,
resourceId: serverId,
resourceName: body.name,
description: `Added MCP server "${body.name}"`,
metadata: { serverName: body.name, transport: body.transport },
request,
})
return createMcpSuccessResponse({ serverId }, 201)
} catch (error) {
logger.error(`[${requestId}] Error registering MCP server:`, error)
@@ -167,7 +192,7 @@ export const POST = withMcpAuth('write')(
* DELETE - Delete an MCP server from the workspace (requires admin permission)
*/
export const DELETE = withMcpAuth('admin')(
async (request: NextRequest, { userId, workspaceId, requestId }) => {
async (request: NextRequest, { userId, userName, userEmail, workspaceId, requestId }) => {
try {
const { searchParams } = new URL(request.url)
const serverId = searchParams.get('serverId')
@@ -198,6 +223,20 @@ export const DELETE = withMcpAuth('admin')(
await mcpService.clearCache(workspaceId)
logger.info(`[${requestId}] Successfully deleted MCP server: ${serverId}`)
recordAudit({
workspaceId,
actorId: userId,
actorName: userName,
actorEmail: userEmail,
action: AuditAction.MCP_SERVER_REMOVED,
resourceType: AuditResourceType.MCP_SERVER,
resourceId: serverId!,
resourceName: deletedServer.name,
description: `Removed MCP server "${deletedServer.name}"`,
request,
})
return createMcpSuccessResponse({ message: `Server ${serverId} deleted successfully` })
} catch (error) {
logger.error(`[${requestId}] Error deleting MCP server:`, error)

View File

@@ -1,6 +1,7 @@
import { createLogger } from '@sim/logger'
import type { NextRequest } from 'next/server'
import { McpClient } from '@/lib/mcp/client'
import { McpDomainNotAllowedError, validateMcpDomain } from '@/lib/mcp/domain-check'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { resolveMcpConfigEnvVars } from '@/lib/mcp/resolve-config'
import type { McpTransport } from '@/lib/mcp/types'
@@ -71,6 +72,15 @@ export const POST = withMcpAuth('write')(
)
}
try {
validateMcpDomain(body.url)
} catch (e) {
if (e instanceof McpDomainNotAllowedError) {
return createMcpErrorResponse(e, e.message, 403)
}
throw e
}
// Build initial config for resolution
const initialConfig = {
id: `test-${requestId}`,
@@ -95,6 +105,16 @@ export const POST = withMcpAuth('write')(
logger.warn(`[${requestId}] Some environment variables not found:`, { missingVars })
}
// Re-validate domain after env var resolution
try {
validateMcpDomain(testConfig.url)
} catch (e) {
if (e instanceof McpDomainNotAllowedError) {
return createMcpErrorResponse(e, e.message, 403)
}
throw e
}
const testSecurityPolicy = {
requireConsent: false,
auditLevel: 'none' as const,

View File

@@ -3,6 +3,7 @@ import { workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { mcpPubSub } from '@/lib/mcp/pubsub'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
@@ -71,7 +72,11 @@ export const GET = withMcpAuth<RouteParams>('read')(
* PATCH - Update a workflow MCP server
*/
export const PATCH = withMcpAuth<RouteParams>('write')(
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
async (
request: NextRequest,
{ userId, userName, userEmail, workspaceId, requestId },
{ params }
) => {
try {
const { id: serverId } = await params
const body = getParsedBody(request) || (await request.json())
@@ -112,6 +117,19 @@ export const PATCH = withMcpAuth<RouteParams>('write')(
logger.info(`[${requestId}] Successfully updated workflow MCP server: ${serverId}`)
recordAudit({
workspaceId,
actorId: userId,
actorName: userName,
actorEmail: userEmail,
action: AuditAction.MCP_SERVER_UPDATED,
resourceType: AuditResourceType.MCP_SERVER,
resourceId: serverId,
resourceName: updatedServer.name,
description: `Updated workflow MCP server "${updatedServer.name}"`,
request,
})
return createMcpSuccessResponse({ server: updatedServer })
} catch (error) {
logger.error(`[${requestId}] Error updating workflow MCP server:`, error)
@@ -128,7 +146,11 @@ export const PATCH = withMcpAuth<RouteParams>('write')(
* DELETE - Delete a workflow MCP server and all its tools
*/
export const DELETE = withMcpAuth<RouteParams>('admin')(
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
async (
request: NextRequest,
{ userId, userName, userEmail, workspaceId, requestId },
{ params }
) => {
try {
const { id: serverId } = await params
@@ -149,6 +171,19 @@ export const DELETE = withMcpAuth<RouteParams>('admin')(
mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId })
recordAudit({
workspaceId,
actorId: userId,
actorName: userName,
actorEmail: userEmail,
action: AuditAction.MCP_SERVER_REMOVED,
resourceType: AuditResourceType.MCP_SERVER,
resourceId: serverId,
resourceName: deletedServer.name,
description: `Unpublished workflow MCP server "${deletedServer.name}"`,
request,
})
return createMcpSuccessResponse({ message: `Server ${serverId} deleted successfully` })
} catch (error) {
logger.error(`[${requestId}] Error deleting workflow MCP server:`, error)

View File

@@ -3,6 +3,7 @@ import { workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { mcpPubSub } from '@/lib/mcp/pubsub'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
@@ -65,7 +66,11 @@ export const GET = withMcpAuth<RouteParams>('read')(
* PATCH - Update a tool's configuration
*/
export const PATCH = withMcpAuth<RouteParams>('write')(
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
async (
request: NextRequest,
{ userId, userName, userEmail, workspaceId, requestId },
{ params }
) => {
try {
const { id: serverId, toolId } = await params
const body = getParsedBody(request) || (await request.json())
@@ -118,6 +123,19 @@ export const PATCH = withMcpAuth<RouteParams>('write')(
mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId })
recordAudit({
workspaceId,
actorId: userId,
actorName: userName,
actorEmail: userEmail,
action: AuditAction.MCP_SERVER_UPDATED,
resourceType: AuditResourceType.MCP_SERVER,
resourceId: serverId,
description: `Updated tool "${updatedTool.toolName}" in MCP server`,
metadata: { toolId, toolName: updatedTool.toolName },
request,
})
return createMcpSuccessResponse({ tool: updatedTool })
} catch (error) {
logger.error(`[${requestId}] Error updating tool:`, error)
@@ -134,7 +152,11 @@ export const PATCH = withMcpAuth<RouteParams>('write')(
* DELETE - Remove a tool from an MCP server
*/
export const DELETE = withMcpAuth<RouteParams>('write')(
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
async (
request: NextRequest,
{ userId, userName, userEmail, workspaceId, requestId },
{ params }
) => {
try {
const { id: serverId, toolId } = await params
@@ -165,6 +187,19 @@ export const DELETE = withMcpAuth<RouteParams>('write')(
mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId })
recordAudit({
workspaceId,
actorId: userId,
actorName: userName,
actorEmail: userEmail,
action: AuditAction.MCP_SERVER_UPDATED,
resourceType: AuditResourceType.MCP_SERVER,
resourceId: serverId,
description: `Removed tool "${deletedTool.toolName}" from MCP server`,
metadata: { toolId, toolName: deletedTool.toolName },
request,
})
return createMcpSuccessResponse({ message: `Tool ${toolId} deleted successfully` })
} catch (error) {
logger.error(`[${requestId}] Error deleting tool:`, error)

View File

@@ -3,6 +3,7 @@ import { workflow, workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { mcpPubSub } from '@/lib/mcp/pubsub'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
@@ -76,7 +77,11 @@ export const GET = withMcpAuth<RouteParams>('read')(
* POST - Add a workflow as a tool to an MCP server
*/
export const POST = withMcpAuth<RouteParams>('write')(
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
async (
request: NextRequest,
{ userId, userName, userEmail, workspaceId, requestId },
{ params }
) => {
try {
const { id: serverId } = await params
const body = getParsedBody(request) || (await request.json())
@@ -197,6 +202,19 @@ export const POST = withMcpAuth<RouteParams>('write')(
mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId })
recordAudit({
workspaceId,
actorId: userId,
actorName: userName,
actorEmail: userEmail,
action: AuditAction.MCP_SERVER_UPDATED,
resourceType: AuditResourceType.MCP_SERVER,
resourceId: serverId,
description: `Added tool "${toolName}" to MCP server`,
metadata: { toolId, toolName, workflowId: body.workflowId },
request,
})
return createMcpSuccessResponse({ tool }, 201)
} catch (error) {
logger.error(`[${requestId}] Error adding tool:`, error)

View File

@@ -3,6 +3,7 @@ import { workflow, workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq, inArray, sql } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { mcpPubSub } from '@/lib/mcp/pubsub'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
@@ -85,7 +86,7 @@ export const GET = withMcpAuth('read')(
* POST - Create a new workflow MCP server
*/
export const POST = withMcpAuth('write')(
async (request: NextRequest, { userId, workspaceId, requestId }) => {
async (request: NextRequest, { userId, userName, userEmail, workspaceId, requestId }) => {
try {
const body = getParsedBody(request) || (await request.json())
@@ -188,6 +189,19 @@ export const POST = withMcpAuth('write')(
`[${requestId}] Successfully created workflow MCP server: ${body.name} (ID: ${serverId})`
)
recordAudit({
workspaceId,
actorId: userId,
actorName: userName,
actorEmail: userEmail,
action: AuditAction.MCP_SERVER_ADDED,
resourceType: AuditResourceType.MCP_SERVER,
resourceId: serverId,
resourceName: body.name.trim(),
description: `Published workflow MCP server "${body.name.trim()}" with ${addedTools.length} tool(s)`,
request,
})
return createMcpSuccessResponse({ server, addedTools }, 201)
} catch (error) {
logger.error(`[${requestId}] Error creating workflow MCP server:`, error)

View File

@@ -18,6 +18,7 @@ import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getEmailSubject, renderInvitationEmail } from '@/components/emails'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { hasAccessControlAccess } from '@/lib/billing'
import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage'
@@ -552,6 +553,25 @@ export async function PUT(
email: orgInvitation.email,
})
const auditActionMap = {
accepted: AuditAction.ORG_INVITATION_ACCEPTED,
rejected: AuditAction.ORG_INVITATION_REJECTED,
cancelled: AuditAction.ORG_INVITATION_CANCELLED,
} as const
recordAudit({
workspaceId: null,
actorId: session.user.id,
action: auditActionMap[status],
resourceType: AuditResourceType.ORGANIZATION,
resourceId: organizationId,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
description: `Organization invitation ${status} for ${orgInvitation.email}`,
metadata: { invitationId, email: orgInvitation.email, status },
request: req,
})
return NextResponse.json({
success: true,
message: `Invitation ${status} successfully`,

View File

@@ -17,6 +17,7 @@ import {
renderBatchInvitationEmail,
renderInvitationEmail,
} from '@/components/emails'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import {
validateBulkInvitations,
@@ -411,6 +412,22 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
workspaceInvitationCount: workspaceInvitationIds.length,
})
for (const inv of invitationsToCreate) {
recordAudit({
workspaceId: null,
actorId: session.user.id,
action: AuditAction.ORG_INVITATION_CREATED,
resourceType: AuditResourceType.ORGANIZATION,
resourceId: organizationId,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
resourceName: organizationEntry[0]?.name,
description: `Invited ${inv.email} to organization as ${role}`,
metadata: { invitationId: inv.id, email: inv.email, role },
request,
})
}
return NextResponse.json({
success: true,
message: `${invitationsToCreate.length} invitation(s) sent successfully`,
@@ -532,6 +549,19 @@ export async function DELETE(
email: result[0].email,
})
recordAudit({
workspaceId: null,
actorId: session.user.id,
action: AuditAction.ORG_INVITATION_REVOKED,
resourceType: AuditResourceType.ORGANIZATION,
resourceId: organizationId,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
description: `Revoked organization invitation for ${result[0].email}`,
metadata: { invitationId, email: result[0].email },
request,
})
return NextResponse.json({
success: true,
message: 'Invitation cancelled successfully',

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { getUserUsageData } from '@/lib/billing/core/usage'
import { removeUserFromOrganization } from '@/lib/billing/organizations/membership'
@@ -213,6 +214,19 @@ export async function PUT(
updatedBy: session.user.id,
})
recordAudit({
workspaceId: null,
actorId: session.user.id,
action: AuditAction.ORG_MEMBER_ROLE_CHANGED,
resourceType: AuditResourceType.ORGANIZATION,
resourceId: organizationId,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
description: `Changed role for member ${memberId} to ${role}`,
metadata: { targetUserId: memberId, newRole: role },
request,
})
return NextResponse.json({
success: true,
message: 'Member role updated successfully',
@@ -305,6 +319,22 @@ export async function DELETE(
billingActions: result.billingActions,
})
recordAudit({
workspaceId: null,
actorId: session.user.id,
action: AuditAction.ORG_MEMBER_REMOVED,
resourceType: AuditResourceType.ORGANIZATION,
resourceId: organizationId,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
description:
session.user.id === targetUserId
? 'Left the organization'
: `Removed member ${targetUserId} from organization`,
metadata: { targetUserId, wasSelfRemoval: session.user.id === targetUserId },
request,
})
return NextResponse.json({
success: true,
message:

View File

@@ -5,6 +5,7 @@ import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getEmailSubject, renderInvitationEmail } from '@/components/emails'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { getUserUsageData } from '@/lib/billing/core/usage'
import { validateSeatAvailability } from '@/lib/billing/validation/seat-management'
@@ -285,6 +286,19 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
// Don't fail the request if email fails
}
recordAudit({
workspaceId: null,
actorId: session.user.id,
action: AuditAction.ORG_INVITATION_CREATED,
resourceType: AuditResourceType.ORGANIZATION,
resourceId: organizationId,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
description: `Invited ${normalizedEmail} to organization as ${role}`,
metadata: { invitationId, email: normalizedEmail, role },
request,
})
return NextResponse.json({
success: true,
message: `Invitation sent to ${normalizedEmail}`,

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { and, eq, ne } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import {
getOrganizationSeatAnalytics,
@@ -192,6 +193,20 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
changes: { name, slug, logo },
})
recordAudit({
workspaceId: null,
actorId: session.user.id,
action: AuditAction.ORGANIZATION_UPDATED,
resourceType: AuditResourceType.ORGANIZATION,
resourceId: organizationId,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
resourceName: updatedOrg[0].name,
description: `Updated organization settings`,
metadata: { changes: { name, slug, logo } },
request,
})
return NextResponse.json({
success: true,
message: 'Organization updated successfully',

View File

@@ -3,6 +3,7 @@ import { member, organization } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, or } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { createOrganizationForTeamPlan } from '@/lib/billing/organization'
@@ -115,6 +116,19 @@ export async function POST(request: Request) {
organizationId,
})
recordAudit({
workspaceId: null,
actorId: user.id,
action: AuditAction.ORGANIZATION_CREATED,
resourceType: AuditResourceType.ORGANIZATION,
resourceId: organizationId,
actorName: user.name ?? undefined,
actorEmail: user.email ?? undefined,
resourceName: organizationName ?? undefined,
description: `Created organization "${organizationName}"`,
request,
})
return NextResponse.json({
success: true,
organizationId,

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { hasAccessControlAccess } from '@/lib/billing'
@@ -13,6 +14,7 @@ async function getPermissionGroupWithAccess(groupId: string, userId: string) {
const [group] = await db
.select({
id: permissionGroup.id,
name: permissionGroup.name,
organizationId: permissionGroup.organizationId,
})
.from(permissionGroup)
@@ -151,6 +153,20 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
assignedBy: session.user.id,
})
recordAudit({
workspaceId: null,
actorId: session.user.id,
action: AuditAction.PERMISSION_GROUP_MEMBER_ADDED,
resourceType: AuditResourceType.PERMISSION_GROUP,
resourceId: id,
resourceName: result.group.name,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
description: `Added member ${userId} to permission group "${result.group.name}"`,
metadata: { targetUserId: userId, permissionGroupId: id },
request: req,
})
return NextResponse.json({ member: newMember }, { status: 201 })
} catch (error) {
if (error instanceof z.ZodError) {
@@ -221,6 +237,20 @@ export async function DELETE(req: NextRequest, { params }: { params: Promise<{ i
userId: session.user.id,
})
recordAudit({
workspaceId: null,
actorId: session.user.id,
action: AuditAction.PERMISSION_GROUP_MEMBER_REMOVED,
resourceType: AuditResourceType.PERMISSION_GROUP,
resourceId: id,
resourceName: result.group.name,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
description: `Removed member ${memberToRemove.userId} from permission group "${result.group.name}"`,
metadata: { targetUserId: memberToRemove.userId, memberId, permissionGroupId: id },
request: req,
})
return NextResponse.json({ success: true })
} catch (error) {
logger.error('Error removing member from permission group', error)

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { hasAccessControlAccess } from '@/lib/billing'
import {
@@ -181,6 +182,19 @@ export async function PUT(req: NextRequest, { params }: { params: Promise<{ id:
.where(eq(permissionGroup.id, id))
.limit(1)
recordAudit({
workspaceId: null,
actorId: session.user.id,
action: AuditAction.PERMISSION_GROUP_UPDATED,
resourceType: AuditResourceType.PERMISSION_GROUP,
resourceId: id,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
resourceName: updated.name,
description: `Updated permission group "${updated.name}"`,
request: req,
})
return NextResponse.json({
permissionGroup: {
...updated,
@@ -229,6 +243,19 @@ export async function DELETE(req: NextRequest, { params }: { params: Promise<{ i
logger.info('Deleted permission group', { permissionGroupId: id, userId: session.user.id })
recordAudit({
workspaceId: null,
actorId: session.user.id,
action: AuditAction.PERMISSION_GROUP_DELETED,
resourceType: AuditResourceType.PERMISSION_GROUP,
resourceId: id,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
resourceName: result.group.name,
description: `Deleted permission group "${result.group.name}"`,
request: req,
})
return NextResponse.json({ success: true })
} catch (error) {
logger.error('Error deleting permission group', error)

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { and, count, desc, eq } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { hasAccessControlAccess } from '@/lib/billing'
import {
@@ -198,6 +199,19 @@ export async function POST(req: Request) {
userId: session.user.id,
})
recordAudit({
workspaceId: null,
actorId: session.user.id,
action: AuditAction.PERMISSION_GROUP_CREATED,
resourceType: AuditResourceType.PERMISSION_GROUP,
resourceId: newGroup.id,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
resourceName: name,
description: `Created permission group "${name}"`,
request: req,
})
return NextResponse.json({ permissionGroup: newGroup }, { status: 201 })
} catch (error) {
if (error instanceof z.ZodError) {

View File

@@ -3,7 +3,7 @@
*
* @vitest-environment node
*/
import { databaseMock, loggerMock } from '@sim/testing'
import { auditMock, databaseMock, loggerMock } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
@@ -37,6 +37,8 @@ vi.mock('@/lib/core/utils/request', () => ({
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@/lib/audit/log', () => auditMock)
import { PUT } from './route'
function createRequest(body: Record<string, unknown>): NextRequest {

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { validateCronExpression } from '@/lib/workflows/schedules/utils'
@@ -106,6 +107,18 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
logger.info(`[${requestId}] Reactivated schedule: ${scheduleId}`)
recordAudit({
workspaceId: authorization.workflow.workspaceId ?? null,
actorId: session.user.id,
action: AuditAction.SCHEDULE_UPDATED,
resourceType: AuditResourceType.SCHEDULE,
resourceId: scheduleId,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
description: `Reactivated schedule for workflow ${schedule.workflowId}`,
request,
})
return NextResponse.json({
message: 'Schedule activated successfully',
nextRunAt,

View File

@@ -0,0 +1,14 @@
import { NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { getAllowedIntegrationsFromEnv } from '@/lib/core/config/feature-flags'
export async function GET() {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
return NextResponse.json({
allowedIntegrations: getAllowedIntegrationsFromEnv(),
})
}

View File

@@ -0,0 +1,27 @@
import { NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { getAllowedMcpDomainsFromEnv } from '@/lib/core/config/feature-flags'
import { getBaseUrl } from '@/lib/core/utils/urls'
export async function GET() {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const configuredDomains = getAllowedMcpDomainsFromEnv()
if (configuredDomains === null) {
return NextResponse.json({ allowedMcpDomains: null })
}
try {
const platformHostname = new URL(getBaseUrl()).hostname.toLowerCase()
if (!configuredDomains.includes(platformHostname)) {
return NextResponse.json({
allowedMcpDomains: [...configuredDomains, platformHostname],
})
}
} catch {}
return NextResponse.json({ allowedMcpDomains: configuredDomains })
}

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import {
@@ -247,6 +248,18 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
logger.info(`[${requestId}] Successfully updated template: ${id}`)
recordAudit({
actorId: session.user.id,
actorName: session.user.name,
actorEmail: session.user.email,
action: AuditAction.TEMPLATE_UPDATED,
resourceType: AuditResourceType.TEMPLATE,
resourceId: id,
resourceName: name ?? template.name,
description: `Updated template "${name ?? template.name}"`,
request,
})
return NextResponse.json({
data: updatedTemplate[0],
message: 'Template updated successfully',
@@ -300,6 +313,19 @@ export async function DELETE(
await db.delete(templates).where(eq(templates.id, id))
logger.info(`[${requestId}] Deleted template: ${id}`)
recordAudit({
actorId: session.user.id,
actorName: session.user.name,
actorEmail: session.user.email,
action: AuditAction.TEMPLATE_DELETED,
resourceType: AuditResourceType.TEMPLATE,
resourceId: id,
resourceName: template.name,
description: `Deleted template "${template.name}"`,
request,
})
return NextResponse.json({ success: true })
} catch (error: any) {
logger.error(`[${requestId}] Error deleting template: ${id}`, error)

View File

@@ -11,6 +11,7 @@ import { and, desc, eq, ilike, or, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { v4 as uuidv4 } from 'uuid'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
@@ -285,6 +286,18 @@ export async function POST(request: NextRequest) {
logger.info(`[${requestId}] Successfully created template: ${templateId}`)
recordAudit({
actorId: session.user.id,
actorName: session.user.name,
actorEmail: session.user.email,
action: AuditAction.TEMPLATE_CREATED,
resourceType: AuditResourceType.TEMPLATE,
resourceId: templateId,
resourceName: data.name,
description: `Created template "${data.name}"`,
request,
})
return NextResponse.json(
{
id: templateId,

View File

@@ -0,0 +1,145 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
export const dynamic = 'force-dynamic'
const logger = createLogger('DataverseUploadFileAPI')
const DataverseUploadFileSchema = z.object({
accessToken: z.string().min(1, 'Access token is required'),
environmentUrl: z.string().min(1, 'Environment URL is required'),
entitySetName: z.string().min(1, 'Entity set name is required'),
recordId: z.string().min(1, 'Record ID is required'),
fileColumn: z.string().min(1, 'File column is required'),
fileName: z.string().min(1, 'File name is required'),
file: RawFileInputSchema.optional().nullable(),
fileContent: z.string().optional().nullable(),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success) {
logger.warn(`[${requestId}] Unauthorized Dataverse upload attempt: ${authResult.error}`)
return NextResponse.json(
{ success: false, error: authResult.error || 'Authentication required' },
{ status: 401 }
)
}
logger.info(
`[${requestId}] Authenticated Dataverse upload request via ${authResult.authType}`,
{
userId: authResult.userId,
}
)
const body = await request.json()
const validatedData = DataverseUploadFileSchema.parse(body)
logger.info(`[${requestId}] Uploading file to Dataverse`, {
entitySetName: validatedData.entitySetName,
recordId: validatedData.recordId,
fileColumn: validatedData.fileColumn,
fileName: validatedData.fileName,
hasFile: !!validatedData.file,
hasFileContent: !!validatedData.fileContent,
})
let fileBuffer: Buffer
if (validatedData.file) {
const rawFile = validatedData.file
logger.info(`[${requestId}] Processing UserFile upload: ${rawFile.name}`)
let userFile
try {
userFile = processSingleFileToUserFile(rawFile, requestId, logger)
} catch (error) {
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Failed to process file',
},
{ status: 400 }
)
}
fileBuffer = await downloadFileFromStorage(userFile, requestId, logger)
} else if (validatedData.fileContent) {
fileBuffer = Buffer.from(validatedData.fileContent, 'base64')
} else {
return NextResponse.json(
{ success: false, error: 'Either file or fileContent must be provided' },
{ status: 400 }
)
}
const baseUrl = validatedData.environmentUrl.replace(/\/$/, '')
const uploadUrl = `${baseUrl}/api/data/v9.2/${validatedData.entitySetName}(${validatedData.recordId})/${validatedData.fileColumn}`
const response = await fetch(uploadUrl, {
method: 'PATCH',
headers: {
Authorization: `Bearer ${validatedData.accessToken}`,
'Content-Type': 'application/octet-stream',
'OData-MaxVersion': '4.0',
'OData-Version': '4.0',
'x-ms-file-name': validatedData.fileName,
},
body: new Uint8Array(fileBuffer),
})
if (!response.ok) {
const errorData = await response.json().catch(() => ({}))
const errorMessage =
errorData?.error?.message ??
`Dataverse API error: ${response.status} ${response.statusText}`
logger.error(`[${requestId}] Dataverse upload file failed`, {
errorData,
status: response.status,
})
return NextResponse.json({ success: false, error: errorMessage }, { status: response.status })
}
logger.info(`[${requestId}] File uploaded to Dataverse successfully`, {
entitySetName: validatedData.entitySetName,
recordId: validatedData.recordId,
fileColumn: validatedData.fileColumn,
})
return NextResponse.json({
success: true,
output: {
recordId: validatedData.recordId,
fileColumn: validatedData.fileColumn,
fileName: validatedData.fileName,
success: true,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{ success: false, error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error uploading file to Dataverse:`, error)
return NextResponse.json(
{ success: false, error: error instanceof Error ? error.message : 'Internal server error' },
{ status: 500 }
)
}
}

View File

@@ -22,15 +22,20 @@ interface PipedriveFile {
interface PipedriveApiResponse {
success: boolean
data?: PipedriveFile[]
additional_data?: {
pagination?: {
more_items_in_collection: boolean
next_start: number
}
}
error?: string
}
const PipedriveGetFilesSchema = z.object({
accessToken: z.string().min(1, 'Access token is required'),
deal_id: z.string().optional().nullable(),
person_id: z.string().optional().nullable(),
org_id: z.string().optional().nullable(),
sort: z.enum(['id', 'update_time']).optional().nullable(),
limit: z.string().optional().nullable(),
start: z.string().optional().nullable(),
downloadFiles: z.boolean().optional().default(false),
})
@@ -54,20 +59,19 @@ export async function POST(request: NextRequest) {
const body = await request.json()
const validatedData = PipedriveGetFilesSchema.parse(body)
const { accessToken, deal_id, person_id, org_id, limit, downloadFiles } = validatedData
const { accessToken, sort, limit, start, downloadFiles } = validatedData
const baseUrl = 'https://api.pipedrive.com/v1/files'
const queryParams = new URLSearchParams()
if (deal_id) queryParams.append('deal_id', deal_id)
if (person_id) queryParams.append('person_id', person_id)
if (org_id) queryParams.append('org_id', org_id)
if (sort) queryParams.append('sort', sort)
if (limit) queryParams.append('limit', limit)
if (start) queryParams.append('start', start)
const queryString = queryParams.toString()
const apiUrl = queryString ? `${baseUrl}?${queryString}` : baseUrl
logger.info(`[${requestId}] Fetching files from Pipedrive`, { deal_id, person_id, org_id })
logger.info(`[${requestId}] Fetching files from Pipedrive`)
const urlValidation = await validateUrlWithDNS(apiUrl, 'apiUrl')
if (!urlValidation.isValid) {
@@ -93,6 +97,8 @@ export async function POST(request: NextRequest) {
}
const files = data.data || []
const hasMore = data.additional_data?.pagination?.more_items_in_collection || false
const nextStart = data.additional_data?.pagination?.next_start ?? null
const downloadedFiles: Array<{
name: string
mimeType: string
@@ -149,6 +155,8 @@ export async function POST(request: NextRequest) {
files,
downloadedFiles: downloadedFiles.length > 0 ? downloadedFiles : undefined,
total_items: files.length,
has_more: hasMore,
next_start: nextStart,
success: true,
},
})

View File

@@ -3,6 +3,7 @@ import { apiKey } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
@@ -34,12 +35,27 @@ export async function DELETE(
const result = await db
.delete(apiKey)
.where(and(eq(apiKey.id, keyId), eq(apiKey.userId, userId)))
.returning({ id: apiKey.id })
.returning({ id: apiKey.id, name: apiKey.name })
if (!result.length) {
return NextResponse.json({ error: 'API key not found' }, { status: 404 })
}
const deletedKey = result[0]
recordAudit({
workspaceId: null,
actorId: userId,
action: AuditAction.PERSONAL_API_KEY_REVOKED,
resourceType: AuditResourceType.API_KEY,
resourceId: keyId,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
resourceName: deletedKey.name,
description: `Revoked personal API key: ${deletedKey.name}`,
request,
})
return NextResponse.json({ success: true })
} catch (error) {
logger.error('Failed to delete API key', { error })

View File

@@ -5,6 +5,7 @@ import { and, eq } from 'drizzle-orm'
import { nanoid } from 'nanoid'
import { type NextRequest, NextResponse } from 'next/server'
import { createApiKey, getApiKeyDisplayFormat } from '@/lib/api-key/auth'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
const logger = createLogger('ApiKeysAPI')
@@ -110,6 +111,19 @@ export async function POST(request: NextRequest) {
createdAt: apiKey.createdAt,
})
recordAudit({
workspaceId: null,
actorId: userId,
action: AuditAction.PERSONAL_API_KEY_CREATED,
resourceType: AuditResourceType.API_KEY,
resourceId: newKey.id,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
resourceName: name,
description: `Created personal API key: ${name}`,
request,
})
return NextResponse.json({
key: {
...newKey,

View File

@@ -3,6 +3,7 @@ import { webhook, workflow } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { validateInteger } from '@/lib/core/security/input-validation'
import { PlatformEvents } from '@/lib/core/telemetry'
@@ -261,6 +262,20 @@ export async function DELETE(
logger.info(`[${requestId}] Successfully deleted webhook: ${id}`)
}
recordAudit({
workspaceId: webhookData.workflow.workspaceId || null,
actorId: userId,
actorName: auth.userName,
actorEmail: auth.userEmail,
action: AuditAction.WEBHOOK_DELETED,
resourceType: AuditResourceType.WEBHOOK,
resourceId: id,
resourceName: foundWebhook.provider || 'generic',
description: 'Deleted webhook',
metadata: { workflowId: webhookData.workflow.id },
request,
})
return NextResponse.json({ success: true }, { status: 200 })
} catch (error: any) {
logger.error(`[${requestId}] Error deleting webhook`, {

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { and, desc, eq, inArray, isNull, or } from 'drizzle-orm'
import { nanoid } from 'nanoid'
import { type NextRequest, NextResponse } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { PlatformEvents } from '@/lib/core/telemetry'
import { generateRequestId } from '@/lib/core/utils/request'
@@ -145,7 +146,8 @@ export async function GET(request: NextRequest) {
// Create or Update a webhook
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
const userId = (await getSession())?.user?.id
const session = await getSession()
const userId = session?.user?.id
if (!userId) {
logger.warn(`[${requestId}] Unauthorized webhook creation attempt`)
@@ -678,6 +680,20 @@ export async function POST(request: NextRequest) {
} catch {
// Telemetry should not fail the operation
}
recordAudit({
workspaceId: workflowRecord.workspaceId || null,
actorId: userId,
actorName: session?.user?.name ?? undefined,
actorEmail: session?.user?.email ?? undefined,
action: AuditAction.WEBHOOK_CREATED,
resourceType: AuditResourceType.WEBHOOK,
resourceId: savedWebhook.id,
resourceName: provider || 'generic',
description: `Created ${provider || 'generic'} webhook`,
metadata: { provider, workflowId },
request,
})
}
const status = targetWebhookId ? 200 : 201

View File

@@ -2,6 +2,7 @@ import { db, workflow, workflowDeploymentVersion } from '@sim/db'
import { createLogger } from '@sim/logger'
import { and, desc, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { generateRequestId } from '@/lib/core/utils/request'
import { removeMcpToolsForWorkflow, syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
import {
@@ -258,6 +259,19 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
// Sync MCP tools with the latest parameter schema
await syncMcpToolsForWorkflow({ workflowId: id, requestId, context: 'deploy' })
recordAudit({
workspaceId: workflowData?.workspaceId || null,
actorId: actorUserId,
actorName: session?.user?.name,
actorEmail: session?.user?.email,
action: AuditAction.WORKFLOW_DEPLOYED,
resourceType: AuditResourceType.WORKFLOW,
resourceId: id,
resourceName: workflowData?.name,
description: `Deployed workflow "${workflowData?.name || id}"`,
request,
})
const responseApiKeyInfo = workflowData!.workspaceId
? 'Workspace API keys'
: 'Personal API keys'
@@ -297,11 +311,11 @@ export async function DELETE(
try {
logger.debug(`[${requestId}] Undeploying workflow: ${id}`)
const { error, workflow: workflowData } = await validateWorkflowPermissions(
id,
requestId,
'admin'
)
const {
error,
session,
workflow: workflowData,
} = await validateWorkflowPermissions(id, requestId, 'admin')
if (error) {
return createErrorResponse(error.message, error.status)
}
@@ -325,6 +339,19 @@ export async function DELETE(
// Silently fail
}
recordAudit({
workspaceId: workflowData?.workspaceId || null,
actorId: session!.user.id,
actorName: session?.user?.name,
actorEmail: session?.user?.email,
action: AuditAction.WORKFLOW_UNDEPLOYED,
resourceType: AuditResourceType.WORKFLOW,
resourceId: id,
resourceName: workflowData?.name,
description: `Undeployed workflow "${workflowData?.name || id}"`,
request,
})
return createSuccessResponse({
isDeployed: false,
deployedAt: null,

View File

@@ -2,6 +2,7 @@ import { db, workflow, workflowDeploymentVersion } from '@sim/db'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { env } from '@/lib/core/config/env'
import { generateRequestId } from '@/lib/core/utils/request'
import { syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
@@ -22,7 +23,11 @@ export async function POST(
const { id, version } = await params
try {
const { error } = await validateWorkflowPermissions(id, requestId, 'admin')
const {
error,
session,
workflow: workflowRecord,
} = await validateWorkflowPermissions(id, requestId, 'admin')
if (error) {
return createErrorResponse(error.message, error.status)
}
@@ -107,6 +112,19 @@ export async function POST(
logger.error('Error sending workflow reverted event to socket server', e)
}
recordAudit({
workspaceId: workflowRecord?.workspaceId ?? null,
actorId: session!.user.id,
action: AuditAction.WORKFLOW_DEPLOYMENT_REVERTED,
resourceType: AuditResourceType.WORKFLOW,
resourceId: id,
actorName: session!.user.name ?? undefined,
actorEmail: session!.user.email ?? undefined,
resourceName: workflowRecord?.name ?? undefined,
description: `Reverted workflow to deployment version ${version}`,
request,
})
return createSuccessResponse({
message: 'Reverted to deployment version',
lastSaved: Date.now(),

View File

@@ -3,6 +3,7 @@ import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { generateRequestId } from '@/lib/core/utils/request'
import { syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
import { restorePreviousVersionWebhooks, saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy'
@@ -297,6 +298,19 @@ export async function PATCH(
}
}
recordAudit({
workspaceId: workflowData?.workspaceId,
actorId: actorUserId,
actorName: session?.user?.name,
actorEmail: session?.user?.email,
action: AuditAction.WORKFLOW_DEPLOYMENT_ACTIVATED,
resourceType: AuditResourceType.WORKFLOW,
resourceId: id,
description: `Activated deployment version ${versionNum}`,
metadata: { version: versionNum },
request,
})
return createSuccessResponse({
success: true,
deployedAt: result.deployedAt,

View File

@@ -1,6 +1,7 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { PlatformEvents } from '@/lib/core/telemetry'
import { generateRequestId } from '@/lib/core/utils/request'
@@ -61,6 +62,20 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
`[${requestId}] Successfully duplicated workflow ${sourceWorkflowId} to ${result.id} in ${elapsed}ms`
)
recordAudit({
workspaceId: workspaceId || null,
actorId: userId,
actorName: auth.userName,
actorEmail: auth.userEmail,
action: AuditAction.WORKFLOW_DUPLICATED,
resourceType: AuditResourceType.WORKFLOW,
resourceId: result.id,
resourceName: result.name,
description: `Duplicated workflow from ${sourceWorkflowId}`,
metadata: { sourceWorkflowId },
request: req,
})
return NextResponse.json(result, { status: 201 })
} catch (error) {
if (error instanceof Error) {

View File

@@ -5,7 +5,7 @@
* @vitest-environment node
*/
import { loggerMock, setupGlobalFetchMock } from '@sim/testing'
import { auditMock, loggerMock, setupGlobalFetchMock } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
@@ -23,6 +23,8 @@ vi.mock('@/lib/auth', () => ({
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@/lib/audit/log', () => auditMock)
vi.mock('@/lib/workflows/persistence/utils', () => ({
loadWorkflowFromNormalizedTables: (workflowId: string) =>
mockLoadWorkflowFromNormalizedTables(workflowId),

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkHybridAuth, checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { env } from '@/lib/core/config/env'
import { PlatformEvents } from '@/lib/core/telemetry'
@@ -336,6 +337,19 @@ export async function DELETE(
// Don't fail the deletion if Socket.IO notification fails
}
recordAudit({
workspaceId: workflowData.workspaceId || null,
actorId: userId,
actorName: auth.userName,
actorEmail: auth.userEmail,
action: AuditAction.WORKFLOW_DELETED,
resourceType: AuditResourceType.WORKFLOW,
resourceId: workflowId,
resourceName: workflowData.name,
description: `Deleted workflow "${workflowData.name}"`,
request,
})
return NextResponse.json({ success: true }, { status: 200 })
} catch (error: any) {
const elapsed = Date.now() - startTime

View File

@@ -5,6 +5,7 @@
* @vitest-environment node
*/
import {
auditMock,
databaseMock,
defaultMockUser,
mockAuth,
@@ -27,6 +28,8 @@ describe('Workflow Variables API Route', () => {
vi.doMock('@sim/db', () => databaseMock)
vi.doMock('@/lib/audit/log', () => auditMock)
vi.doMock('@/lib/workflows/utils', () => ({
authorizeWorkflowByWorkspacePermission: mockAuthorizeWorkflowByWorkspacePermission,
}))

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
@@ -79,6 +80,19 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
})
.where(eq(workflow.id, workflowId))
recordAudit({
workspaceId: workflowData.workspaceId ?? null,
actorId: userId,
actorName: auth.userName,
actorEmail: auth.userEmail,
action: AuditAction.WORKFLOW_VARIABLES_UPDATED,
resourceType: AuditResourceType.WORKFLOW,
resourceId: workflowId,
resourceName: workflowData.name ?? undefined,
description: `Updated workflow variables`,
request: req,
})
return NextResponse.json({ success: true })
} catch (validationError) {
if (validationError instanceof z.ZodError) {

View File

@@ -0,0 +1,137 @@
/**
* @vitest-environment node
*/
import { auditMock, createMockRequest, mockConsoleLogger, setupCommonApiMocks } from '@sim/testing'
import { drizzleOrmMock } from '@sim/testing/mocks'
import { beforeEach, describe, expect, it, vi } from 'vitest'
const mockCheckSessionOrInternalAuth = vi.fn()
const mockGetUserEntityPermissions = vi.fn()
const mockDbSelect = vi.fn()
const mockDbInsert = vi.fn()
const mockWorkflowCreated = vi.fn()
vi.mock('drizzle-orm', () => ({
...drizzleOrmMock,
min: vi.fn((field) => ({ type: 'min', field })),
}))
vi.mock('@/lib/audit/log', () => auditMock)
describe('Workflows API Route - POST ordering', () => {
beforeEach(() => {
vi.resetModules()
vi.clearAllMocks()
setupCommonApiMocks()
mockConsoleLogger()
vi.stubGlobal('crypto', {
randomUUID: vi.fn().mockReturnValue('workflow-new-id'),
})
mockCheckSessionOrInternalAuth.mockResolvedValue({
success: true,
userId: 'user-123',
userName: 'Test User',
userEmail: 'test@example.com',
})
mockGetUserEntityPermissions.mockResolvedValue('write')
vi.doMock('@sim/db', () => ({
db: {
select: (...args: unknown[]) => mockDbSelect(...args),
insert: (...args: unknown[]) => mockDbInsert(...args),
},
}))
vi.doMock('@/lib/auth/hybrid', () => ({
checkSessionOrInternalAuth: (...args: unknown[]) => mockCheckSessionOrInternalAuth(...args),
}))
vi.doMock('@/lib/workspaces/permissions/utils', () => ({
getUserEntityPermissions: (...args: unknown[]) => mockGetUserEntityPermissions(...args),
workspaceExists: vi.fn(),
}))
vi.doMock('@/app/api/workflows/utils', () => ({
verifyWorkspaceMembership: vi.fn(),
}))
vi.doMock('@/lib/core/telemetry', () => ({
PlatformEvents: {
workflowCreated: (...args: unknown[]) => mockWorkflowCreated(...args),
},
}))
})
it('uses top insertion against mixed siblings (folders + workflows)', async () => {
const minResultsQueue: Array<Array<{ minOrder: number }>> = [
[{ minOrder: 5 }],
[{ minOrder: 2 }],
]
mockDbSelect.mockImplementation(() => ({
from: vi.fn().mockReturnValue({
where: vi.fn().mockImplementation(() => Promise.resolve(minResultsQueue.shift() ?? [])),
}),
}))
let insertedValues: Record<string, unknown> | null = null
mockDbInsert.mockReturnValue({
values: vi.fn().mockImplementation((values: Record<string, unknown>) => {
insertedValues = values
return Promise.resolve(undefined)
}),
})
const req = createMockRequest('POST', {
name: 'New Workflow',
description: 'desc',
color: '#3972F6',
workspaceId: 'workspace-123',
folderId: null,
})
const { POST } = await import('@/app/api/workflows/route')
const response = await POST(req)
const data = await response.json()
expect(response.status).toBe(200)
expect(data.sortOrder).toBe(1)
expect(insertedValues).not.toBeNull()
expect(insertedValues?.sortOrder).toBe(1)
})
it('defaults to sortOrder 0 when there are no siblings', async () => {
const minResultsQueue: Array<Array<{ minOrder: number }>> = [[], []]
mockDbSelect.mockImplementation(() => ({
from: vi.fn().mockReturnValue({
where: vi.fn().mockImplementation(() => Promise.resolve(minResultsQueue.shift() ?? [])),
}),
}))
let insertedValues: Record<string, unknown> | null = null
mockDbInsert.mockReturnValue({
values: vi.fn().mockImplementation((values: Record<string, unknown>) => {
insertedValues = values
return Promise.resolve(undefined)
}),
})
const req = createMockRequest('POST', {
name: 'New Workflow',
description: 'desc',
color: '#3972F6',
workspaceId: 'workspace-123',
folderId: null,
})
const { POST } = await import('@/app/api/workflows/route')
const response = await POST(req)
const data = await response.json()
expect(response.status).toBe(200)
expect(data.sortOrder).toBe(0)
expect(insertedValues?.sortOrder).toBe(0)
})
})

View File

@@ -1,9 +1,10 @@
import { db } from '@sim/db'
import { permissions, workflow } from '@sim/db/schema'
import { permissions, workflow, workflowFolder } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, asc, eq, inArray, isNull, min } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { getUserEntityPermissions, workspaceExists } from '@/lib/workspaces/permissions/utils'
@@ -161,12 +162,33 @@ export async function POST(req: NextRequest) {
if (providedSortOrder !== undefined) {
sortOrder = providedSortOrder
} else {
const folderCondition = folderId ? eq(workflow.folderId, folderId) : isNull(workflow.folderId)
const [minResult] = await db
.select({ minOrder: min(workflow.sortOrder) })
.from(workflow)
.where(and(eq(workflow.workspaceId, workspaceId), folderCondition))
sortOrder = (minResult?.minOrder ?? 1) - 1
const workflowParentCondition = folderId
? eq(workflow.folderId, folderId)
: isNull(workflow.folderId)
const folderParentCondition = folderId
? eq(workflowFolder.parentId, folderId)
: isNull(workflowFolder.parentId)
const [[workflowMinResult], [folderMinResult]] = await Promise.all([
db
.select({ minOrder: min(workflow.sortOrder) })
.from(workflow)
.where(and(eq(workflow.workspaceId, workspaceId), workflowParentCondition)),
db
.select({ minOrder: min(workflowFolder.sortOrder) })
.from(workflowFolder)
.where(and(eq(workflowFolder.workspaceId, workspaceId), folderParentCondition)),
])
const minSortOrder = [workflowMinResult?.minOrder, folderMinResult?.minOrder].reduce<
number | null
>((currentMin, candidate) => {
if (candidate == null) return currentMin
if (currentMin == null) return candidate
return Math.min(currentMin, candidate)
}, null)
sortOrder = minSortOrder != null ? minSortOrder - 1 : 0
}
await db.insert(workflow).values({
@@ -188,6 +210,20 @@ export async function POST(req: NextRequest) {
logger.info(`[${requestId}] Successfully created empty workflow ${workflowId}`)
recordAudit({
workspaceId,
actorId: userId,
actorName: auth.userName,
actorEmail: auth.userEmail,
action: AuditAction.WORKFLOW_CREATED,
resourceType: AuditResourceType.WORKFLOW,
resourceId: workflowId,
resourceName: name,
description: `Created workflow "${name}"`,
metadata: { name },
request: req,
})
return NextResponse.json({
id: workflowId,
name,

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { and, eq, not } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
@@ -86,6 +87,19 @@ export async function PUT(
updatedAt: apiKey.updatedAt,
})
recordAudit({
workspaceId,
actorId: userId,
action: AuditAction.API_KEY_UPDATED,
resourceType: AuditResourceType.API_KEY,
resourceId: keyId,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
resourceName: name,
description: `Updated workspace API key: ${name}`,
request,
})
logger.info(`[${requestId}] Updated workspace API key: ${keyId} in workspace ${workspaceId}`)
return NextResponse.json({ key: updatedKey })
} catch (error: unknown) {
@@ -123,12 +137,27 @@ export async function DELETE(
.where(
and(eq(apiKey.workspaceId, workspaceId), eq(apiKey.id, keyId), eq(apiKey.type, 'workspace'))
)
.returning({ id: apiKey.id })
.returning({ id: apiKey.id, name: apiKey.name })
if (deletedRows.length === 0) {
return NextResponse.json({ error: 'API key not found' }, { status: 404 })
}
const deletedKey = deletedRows[0]
recordAudit({
workspaceId,
actorId: userId,
action: AuditAction.API_KEY_REVOKED,
resourceType: AuditResourceType.API_KEY,
resourceId: keyId,
actorName: session.user.name ?? undefined,
actorEmail: session.user.email ?? undefined,
resourceName: deletedKey.name,
description: `Revoked workspace API key: ${deletedKey.name}`,
request,
})
logger.info(`[${requestId}] Deleted workspace API key: ${keyId} from workspace ${workspaceId}`)
return NextResponse.json({ success: true })
} catch (error: unknown) {

Some files were not shown because too many files have changed in this diff Show More