mirror of
https://github.com/directus/directus.git
synced 2026-01-10 12:58:10 -05:00
Merge branch 'main' into fix-24816
This commit is contained in:
@@ -1,5 +0,0 @@
|
||||
---
|
||||
'@directus/app': patch
|
||||
---
|
||||
|
||||
Hide "Create new" and "Add existing" buttons on o2m fields with unique constraint
|
||||
2
.github/actions/prepare/action.yml
vendored
2
.github/actions/prepare/action.yml
vendored
@@ -32,6 +32,6 @@ runs:
|
||||
if: inputs.build == 'true'
|
||||
shell: bash
|
||||
env:
|
||||
npm_config_workspace_concurrency: 1
|
||||
npm_config_workspace_concurrency: 2
|
||||
NODE_OPTIONS: --max_old_space_size=6144
|
||||
run: pnpm run build
|
||||
|
||||
176
.github/workflows/changeset-check.yml
vendored
Normal file
176
.github/workflows/changeset-check.yml
vendored
Normal file
@@ -0,0 +1,176 @@
|
||||
name: Changeset Check
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- labeled
|
||||
- unlabeled
|
||||
branches:
|
||||
- main
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
changeset-check:
|
||||
name: Changeset Check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check Label
|
||||
if: contains(github.event.pull_request.labels.*.name, 'No Changeset')
|
||||
run: |
|
||||
echo "✅ No Changeset label present"
|
||||
exit 0
|
||||
|
||||
- name: Fetch Changesets
|
||||
if: ${{ ! contains(github.event.pull_request.labels.*.name, 'No Changeset') }}
|
||||
id: cs
|
||||
uses: tj-actions/changed-files@v46
|
||||
with:
|
||||
files_yaml: |
|
||||
changeset:
|
||||
- '.changeset/*.md'
|
||||
separator: ','
|
||||
|
||||
- name: Found Changeset
|
||||
id: found_changeset
|
||||
if:
|
||||
${{ ! contains(github.event.pull_request.labels.*.name, 'No Changeset') &&
|
||||
steps.cs.outputs.changeset_added_files != '' }}
|
||||
run: |
|
||||
echo "✅ Found changeset file"
|
||||
echo "found=true" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Missing Changeset
|
||||
if:
|
||||
${{ ! contains(github.event.pull_request.labels.*.name, 'No Changeset') &&
|
||||
steps.cs.outputs.changeset_added_files == '' }}
|
||||
run: |
|
||||
echo "❌ Pull request must add a changeset or have the 'No Changeset' label."
|
||||
exit 1
|
||||
|
||||
- name: Checkout Repository
|
||||
if: steps.found_changeset.outputs.found == 'true'
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Prepare
|
||||
if: steps.found_changeset.outputs.found == 'true'
|
||||
uses: ./.github/actions/prepare
|
||||
with:
|
||||
build: false
|
||||
|
||||
- name: Install Workflow Dependency
|
||||
if: steps.found_changeset.outputs.found == 'true'
|
||||
run: pnpm add @changesets/git@3 --workspace-root
|
||||
|
||||
- name: Validate Changeset Coverage
|
||||
if: steps.found_changeset.outputs.found == 'true'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const { getChangedPackagesSinceRef } = require('@changesets/git');
|
||||
|
||||
try {
|
||||
const cwd = process.cwd();
|
||||
|
||||
// 1. Get packages that actually changed in this PR/branch
|
||||
core.info('🔍 Detecting changed packages since main...');
|
||||
const changedPackages = await getChangedPackagesSinceRef({
|
||||
cwd,
|
||||
ref: 'origin/main'
|
||||
});
|
||||
|
||||
// 2. Filter out private packages
|
||||
const publicChangedPackages = changedPackages.filter(pkg => {
|
||||
const isPrivate = pkg.packageJson.private === true;
|
||||
if (isPrivate) {
|
||||
core.info(`🔒 Skipping private package: ${pkg.packageJson.name}`);
|
||||
}
|
||||
return !isPrivate;
|
||||
});
|
||||
|
||||
const publicChangedPackageNames = publicChangedPackages.map(pkg => pkg.packageJson.name);
|
||||
core.info(`📦 Public changed packages: ${JSON.stringify(publicChangedPackageNames)}`);
|
||||
|
||||
// 3. Parse changeset files added in this PR
|
||||
const changesetFiles = `${{ steps.cs.outputs.changeset_added_files }}`.split(',').filter(f => f);
|
||||
core.info(`📝 Added changeset files: ${JSON.stringify(changesetFiles)}`);
|
||||
|
||||
const packagesInChangesets = new Set();
|
||||
|
||||
for (const file of changesetFiles) {
|
||||
if (!fs.existsSync(file)) {
|
||||
core.warning(`⚠️ Changeset file not found: ${file}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const content = fs.readFileSync(file, 'utf8');
|
||||
const frontmatterMatch = content.match(/^---\n([\s\S]*?)\n---/);
|
||||
|
||||
if (frontmatterMatch) {
|
||||
const frontmatter = frontmatterMatch[1];
|
||||
// Parse YAML frontmatter to extract package names
|
||||
const packageLines = frontmatter.split('\n').filter(line =>
|
||||
line.trim() && !line.startsWith('#') && line.includes(':')
|
||||
);
|
||||
|
||||
packageLines.forEach(line => {
|
||||
// Match valid npm package names (scoped or unscoped)
|
||||
// Scoped: @scope/package-name, Unscoped: package-name
|
||||
const packageMatch = line.match(/["']?(@[a-z0-9-_.]+\/[a-z0-9-_.]+|[a-z0-9-_.]+)["']?\s*:/i);
|
||||
if (packageMatch) {
|
||||
packagesInChangesets.add(packageMatch[1].trim());
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
core.info(`📋 Packages covered by changesets: ${JSON.stringify(Array.from(packagesInChangesets))}`);
|
||||
|
||||
// 4. Compare: find public packages that changed but aren't in changesets
|
||||
const uncoveredPackages = publicChangedPackageNames.filter(pkg =>
|
||||
!packagesInChangesets.has(pkg)
|
||||
);
|
||||
|
||||
if (uncoveredPackages.length > 0) {
|
||||
const errorMessage = [
|
||||
'❌ The following public packages are changed but NOT covered by changesets:',
|
||||
...uncoveredPackages.map(pkg => ` - ${pkg}`),
|
||||
'',
|
||||
'💡 Please add these packages to your changeset or create additional changesets'
|
||||
].join('\n');
|
||||
|
||||
core.setFailed(errorMessage);
|
||||
return;
|
||||
}
|
||||
|
||||
// 5. Also check for packages in changesets that didn't actually change (optional warning)
|
||||
const extraPackages = Array.from(packagesInChangesets).filter(pkg =>
|
||||
!publicChangedPackageNames.includes(pkg)
|
||||
);
|
||||
|
||||
if (extraPackages.length > 0) {
|
||||
const warningMessage = [
|
||||
'⚠️ The following packages are in changesets but have no changes:',
|
||||
...extraPackages.map(pkg => ` - ${pkg}`),
|
||||
'This is usually okay for dependency bumps or cross-package updates'
|
||||
].join('\n');
|
||||
|
||||
core.warning(warningMessage);
|
||||
}
|
||||
|
||||
core.info(publicChangedPackageNames.length === 0
|
||||
? '✅ No public packages changed - validation passed'
|
||||
: '✅ All public changed packages are covered by changesets!'
|
||||
);
|
||||
|
||||
} catch (error) {
|
||||
core.setFailed(`❌ Error validating changeset coverage: ${error.message}\n${error.stack}`);
|
||||
}
|
||||
214
.github/workflows/prepare-release.yml
vendored
Normal file
214
.github/workflows/prepare-release.yml
vendored
Normal file
@@ -0,0 +1,214 @@
|
||||
name: Prepare Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Release version (eg: 11.12.0)'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
issues: write
|
||||
|
||||
env:
|
||||
NODE_OPTIONS: --max_old_space_size=6144
|
||||
|
||||
jobs:
|
||||
prepare-release:
|
||||
name: Prepare Release
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- name: Validate version format and existence
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
VERSION=${{ github.event.inputs.version }}
|
||||
TAG="v$VERSION"
|
||||
REPO="${{ github.repository }}"
|
||||
|
||||
# Check for semver format
|
||||
if [[ ! "$VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-.*)?$ ]]; then
|
||||
echo "❌ Invalid version format for '$VERSION'. Expected: x.y.z or x.y.z-prerelease"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check for tag existence
|
||||
if gh api repos/$REPO/git/refs/tags/$TAG \
|
||||
--silent >/dev/null 2>&1; then
|
||||
echo "❌ Tag '$TAG' already exists"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Version '$VERSION' is valid and available"
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check Crowdin PRs
|
||||
id: crowdin-check
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
# Fetch open Crowdin PR
|
||||
OPEN=$(gh pr list --state open --search "New Crowdin updates" --json url --limit 10)
|
||||
# Fetch merged Crowdin PR in Next Release milestone
|
||||
MERGED=$(gh pr list --state merged --search "New Crowdin updates milestone:\"Next Release\"" --json url --limit 10)
|
||||
|
||||
# Build URL lists
|
||||
OPEN_LIST=$(echo "$OPEN" | jq -r '.[] .url' | paste -sd ", " -)
|
||||
MERGED_LIST=$(echo "$MERGED" | jq -r '.[] .url' | paste -sd ", " -)
|
||||
|
||||
# Determine checkbox: checked if no open PR
|
||||
if [ -z "$OPEN_LIST" ]; then
|
||||
CHECKBOX="[x]"
|
||||
else
|
||||
CHECKBOX="[ ]"
|
||||
fi
|
||||
|
||||
# Combine lists
|
||||
if [ -n "$OPEN_LIST" ] && [ -n "$MERGED_LIST" ]; then
|
||||
PRS="$OPEN_LIST, $MERGED_LIST"
|
||||
else
|
||||
PRS="${OPEN_LIST}${MERGED_LIST}"
|
||||
fi
|
||||
|
||||
# If list is empty
|
||||
if [ -z "$PRS" ]; then
|
||||
PRS="No Crowdin PR found"
|
||||
CHECKBOX="[x]"
|
||||
fi
|
||||
|
||||
echo "crowdin_task=- $CHECKBOX Merge Crowdin PR: $PRS" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "✅ Crowdin PR check completed"
|
||||
|
||||
- name: Prepare
|
||||
uses: ./.github/actions/prepare
|
||||
|
||||
- name: Generate release notes and update versions
|
||||
id: changeset-version
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
DIRECTUS_VERSION: ${{ github.event.inputs.version }}
|
||||
run: |
|
||||
echo "🔄 Running changeset version..."
|
||||
|
||||
# Capture the changeset version output
|
||||
OUTPUT=$(pnpm changeset version 2>&1)
|
||||
echo "$OUTPUT"
|
||||
|
||||
# Extract release notes from the output
|
||||
# The release notes generator outputs between the divider lines
|
||||
RELEASE_NOTES=$(echo "$OUTPUT" | awk -v RS="==============================================================" 'NR==3 {print}' | sed '1{/^$/d}')
|
||||
|
||||
# Output release notes
|
||||
{
|
||||
echo "release_notes<<EOF"
|
||||
echo "$RELEASE_NOTES"
|
||||
echo "EOF"
|
||||
} >> $GITHUB_OUTPUT
|
||||
|
||||
# Verify version was updated in main package
|
||||
PACKAGE_VERSION=$(node -p "require('./directus/package.json').version")
|
||||
if [ "$PACKAGE_VERSION" != "${{ github.event.inputs.version }}" ]; then
|
||||
echo "❌ Version mismatch: package.json shows $PACKAGE_VERSION, expected ${{ github.event.inputs.version }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Version updated successfully to ${{ github.event.inputs.version }}"
|
||||
|
||||
- name: Verify changeset cleanup
|
||||
run: |
|
||||
# Count remaining changeset files (excluding config)
|
||||
CHANGESET_COUNT=$(find .changeset -name "*.md" -not -name "config.json" | wc -l)
|
||||
if [ $CHANGESET_COUNT -ne 0 ]; then
|
||||
echo "❌ Changesets not properly cleared. Found $CHANGESET_COUNT remaining files:"
|
||||
find .changeset -name "*.md" -not -name "config.json"
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Changesets cleared successfully"
|
||||
|
||||
- name: Create release PR
|
||||
id: create-pr
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.RELEASE_PAT }}
|
||||
run: |
|
||||
VERSION=${{ github.event.inputs.version }}
|
||||
TAG="v$VERSION"
|
||||
BRANCH_NAME="release-$VERSION"
|
||||
PR_TITLE="Release $VERSION"
|
||||
|
||||
# Build the PR body content
|
||||
PR_BODY=$(cat <<'EOF'
|
||||
### ✅ Release Checklist
|
||||
- [x] Changesets processed and cleared
|
||||
- [x] Package versions updated
|
||||
- [x] Release notes generated
|
||||
${{ steps.crowdin-check.outputs.crowdin_task }}
|
||||
- [ ] PR reviewed and approved
|
||||
- [ ] Blackbox tests passed
|
||||
|
||||
### 🚀 Release Notes
|
||||
```md
|
||||
${{ steps.changeset-version.outputs.release_notes }}
|
||||
```
|
||||
EOF
|
||||
)
|
||||
|
||||
# Configure git and commit changes
|
||||
git checkout -b "$BRANCH_NAME"
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git add -A
|
||||
git commit -m "Release $VERSION"
|
||||
git push origin "$BRANCH_NAME" --force
|
||||
|
||||
# Look for existing open PR with this branch
|
||||
EXISTING_PR=$(gh pr list \
|
||||
--state open \
|
||||
--head "$BRANCH_NAME" \
|
||||
--json number \
|
||||
--jq '.[0].number' || echo "")
|
||||
|
||||
if [ -n "$EXISTING_PR" ]; then
|
||||
echo "🔄 Updating existing PR #$EXISTING_PR"
|
||||
gh pr edit $EXISTING_PR \
|
||||
--title "$PR_TITLE" \
|
||||
--body "$PR_BODY"
|
||||
echo "pr_number=$EXISTING_PR" >> $GITHUB_OUTPUT
|
||||
echo "new_pr=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "✨ Creating new PR"
|
||||
NEW_PR=$(gh pr create \
|
||||
--title "$PR_TITLE" \
|
||||
--body "$PR_BODY" \
|
||||
--label "Release" \
|
||||
--label "Run Blackbox" \
|
||||
--label "No Changeset" \
|
||||
--head "$BRANCH_NAME" \
|
||||
--base "main" \
|
||||
--draft=false \
|
||||
| grep -o '[0-9]\+$')
|
||||
echo "pr_number=$NEW_PR" >> $GITHUB_OUTPUT
|
||||
echo "new_pr=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
echo "tag=$TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Notify Slack
|
||||
if: steps.create-pr.outputs.new_pr == 'true'
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_CMS_FREEZE }}
|
||||
run: |
|
||||
PAYLOAD=$(jq -n \
|
||||
--arg tag "${{ steps.create-pr.outputs.tag }}" \
|
||||
--arg pr_number "${{ steps.create-pr.outputs.pr_number }}" \
|
||||
'{tag: $tag, pr_number: $pr_number}')
|
||||
curl -X POST -H 'Content-Type: application/json' \
|
||||
--data "$PAYLOAD" \
|
||||
"$SLACK_WEBHOOK_URL"
|
||||
@@ -2,3 +2,4 @@
|
||||
!*.scss
|
||||
!*.css
|
||||
!*.vue
|
||||
Dockerfile
|
||||
|
||||
@@ -32,7 +32,7 @@ COPY --chown=node:node . .
|
||||
RUN <<EOF
|
||||
set -ex
|
||||
pnpm install --recursive --offline --frozen-lockfile
|
||||
npm_config_workspace_concurrency=1 pnpm run build
|
||||
npm_config_workspace_concurrency=2 pnpm run build
|
||||
pnpm --filter directus deploy --legacy --prod dist
|
||||
cd dist
|
||||
# Regenerate package.json file with essential fields only
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@directus/api",
|
||||
"version": "29.1.1",
|
||||
"version": "31.0.0",
|
||||
"description": "Directus is a real-time API and App dashboard for managing SQL database content",
|
||||
"keywords": [
|
||||
"directus",
|
||||
@@ -58,7 +58,7 @@
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "rimraf ./dist && tsc --project tsconfig.prod.json && copyfiles \"src/**/*.{yaml,liquid}\" -u 1 dist",
|
||||
"build": "rimraf ./dist && tsc --project tsconfig.prod.json && copyfiles \"src/**/*.{yaml,liquid,md}\" -u 1 dist",
|
||||
"cli": "NODE_ENV=development SERVE_APP=false tsx src/cli/run.ts",
|
||||
"dev": "NODE_ENV=development SERVE_APP=true tsx watch --ignore extensions --clear-screen=false src/start.ts",
|
||||
"test": "vitest run",
|
||||
@@ -67,7 +67,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@authenio/samlify-node-xmllint": "catalog:",
|
||||
"@aws-sdk/client-ses": "catalog:",
|
||||
"@aws-sdk/client-sesv2": "catalog:",
|
||||
"@directus/app": "workspace:*",
|
||||
"@directus/constants": "workspace:*",
|
||||
"@directus/env": "workspace:*",
|
||||
@@ -92,6 +92,7 @@
|
||||
"@directus/utils": "workspace:*",
|
||||
"@directus/validation": "workspace:*",
|
||||
"@godaddy/terminus": "catalog:",
|
||||
"@modelcontextprotocol/sdk": "catalog:",
|
||||
"@rollup/plugin-alias": "catalog:",
|
||||
"@rollup/plugin-node-resolve": "catalog:",
|
||||
"@rollup/plugin-virtual": "catalog:",
|
||||
@@ -171,8 +172,8 @@
|
||||
"proxy-addr": "catalog:",
|
||||
"qs": "catalog:",
|
||||
"rate-limiter-flexible": "catalog:",
|
||||
"rollup": "catalog:",
|
||||
"rolldown": "catalog:",
|
||||
"rollup": "catalog:",
|
||||
"samlify": "catalog:",
|
||||
"sanitize-html": "catalog:",
|
||||
"sharp": "catalog:",
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useEnv } from '@directus/env';
|
||||
import { InvalidPayloadError, ServiceUnavailableError } from '@directus/errors';
|
||||
import { handlePressure } from '@directus/pressure';
|
||||
import { toBoolean } from '@directus/utils';
|
||||
import cookieParser from 'cookie-parser';
|
||||
import type { Request, RequestHandler, Response } from 'express';
|
||||
import express from 'express';
|
||||
@@ -25,6 +26,7 @@ import flowsRouter from './controllers/flows.js';
|
||||
import foldersRouter from './controllers/folders.js';
|
||||
import graphqlRouter from './controllers/graphql.js';
|
||||
import itemsRouter from './controllers/items.js';
|
||||
import mcpRouter from './controllers/mcp.js';
|
||||
import metricsRouter from './controllers/metrics.js';
|
||||
import notFoundHandler from './controllers/not-found.js';
|
||||
import notificationsRouter from './controllers/notifications.js';
|
||||
@@ -297,6 +299,10 @@ export default async function createApp(): Promise<express.Application> {
|
||||
app.use('/folders', foldersRouter);
|
||||
app.use('/items', itemsRouter);
|
||||
|
||||
if (toBoolean(env['MCP_ENABLED']) === true) {
|
||||
app.use('/mcp', mcpRouter);
|
||||
}
|
||||
|
||||
if (env['METRICS_ENABLED'] === true) {
|
||||
app.use('/metrics', metricsRouter);
|
||||
}
|
||||
|
||||
@@ -361,12 +361,13 @@ export function createOAuth2AuthRouter(providerName: string): Router {
|
||||
const codeVerifier = provider.generateCodeVerifier();
|
||||
const prompt = !!req.query['prompt'];
|
||||
const redirect = req.query['redirect'];
|
||||
const otp = req.query['otp'];
|
||||
|
||||
if (isLoginRedirectAllowed(redirect, providerName) === false) {
|
||||
throw new InvalidPayloadError({ reason: `URL "${redirect}" can't be used to redirect after login` });
|
||||
}
|
||||
|
||||
const token = jwt.sign({ verifier: codeVerifier, redirect, prompt }, getSecret(), {
|
||||
const token = jwt.sign({ verifier: codeVerifier, redirect, prompt, otp }, getSecret(), {
|
||||
expiresIn: '5m',
|
||||
issuer: 'directus',
|
||||
});
|
||||
@@ -402,13 +403,15 @@ export function createOAuth2AuthRouter(providerName: string): Router {
|
||||
verifier: string;
|
||||
redirect?: string;
|
||||
prompt: boolean;
|
||||
otp?: string;
|
||||
};
|
||||
} catch (e: any) {
|
||||
logger.warn(e, `[OAuth2] Couldn't verify OAuth2 cookie`);
|
||||
throw new InvalidCredentialsError();
|
||||
}
|
||||
|
||||
const { verifier, redirect, prompt } = tokenData;
|
||||
const { verifier, prompt, otp } = tokenData;
|
||||
let { redirect } = tokenData;
|
||||
|
||||
const accountability: Accountability = createDefaultAccountability({
|
||||
ip: getIPFromReq(req),
|
||||
@@ -439,7 +442,7 @@ export function createOAuth2AuthRouter(providerName: string): Router {
|
||||
codeVerifier: verifier,
|
||||
state: req.query['state'],
|
||||
},
|
||||
{ session: authMode === 'session' },
|
||||
{ session: authMode === 'session', ...(otp ? { otp: String(otp) } : {}) },
|
||||
);
|
||||
} catch (error: any) {
|
||||
// Prompt user for a new refresh_token if invalidated
|
||||
@@ -465,6 +468,19 @@ export function createOAuth2AuthRouter(providerName: string): Router {
|
||||
|
||||
const { accessToken, refreshToken, expires } = authResponse;
|
||||
|
||||
try {
|
||||
const claims = verifyJWT(accessToken, getSecret()) as any;
|
||||
|
||||
if (claims?.enforce_tfa === true) {
|
||||
const url = new Url(env['PUBLIC_URL'] as string).addPath('admin', 'tfa-setup');
|
||||
if (redirect) url.setQuery('redirect', redirect);
|
||||
|
||||
redirect = url.toString();
|
||||
}
|
||||
} catch (e) {
|
||||
logger.warn(e, `[OAuth2] Unexpected error during OAuth2 login`);
|
||||
}
|
||||
|
||||
if (redirect) {
|
||||
if (authMode === 'session') {
|
||||
res.cookie(env['SESSION_COOKIE_NAME'] as string, accessToken, SESSION_COOKIE_OPTIONS);
|
||||
|
||||
@@ -440,12 +440,13 @@ export function createOpenIDAuthRouter(providerName: string): Router {
|
||||
const codeVerifier = provider.generateCodeVerifier();
|
||||
const prompt = !!req.query['prompt'];
|
||||
const redirect = req.query['redirect'];
|
||||
const otp = req.query['otp'];
|
||||
|
||||
if (isLoginRedirectAllowed(redirect, providerName) === false) {
|
||||
throw new InvalidPayloadError({ reason: `URL "${redirect}" can't be used to redirect after login` });
|
||||
}
|
||||
|
||||
const token = jwt.sign({ verifier: codeVerifier, redirect, prompt }, getSecret(), {
|
||||
const token = jwt.sign({ verifier: codeVerifier, redirect, prompt, otp }, getSecret(), {
|
||||
expiresIn: (env[`AUTH_${providerName.toUpperCase()}_LOGIN_TIMEOUT`] ?? '5m') as StringValue | number,
|
||||
issuer: 'directus',
|
||||
});
|
||||
@@ -491,6 +492,7 @@ export function createOpenIDAuthRouter(providerName: string): Router {
|
||||
verifier: string;
|
||||
redirect?: string;
|
||||
prompt: boolean;
|
||||
otp?: string;
|
||||
};
|
||||
} catch (e: any) {
|
||||
logger.warn(e, `[OpenID] Couldn't verify OpenID cookie`);
|
||||
@@ -498,7 +500,8 @@ export function createOpenIDAuthRouter(providerName: string): Router {
|
||||
return res.redirect(`${url.toString()}?reason=${ErrorCode.InvalidCredentials}`);
|
||||
}
|
||||
|
||||
const { verifier, redirect, prompt } = tokenData;
|
||||
const { verifier, prompt, otp } = tokenData;
|
||||
let { redirect } = tokenData;
|
||||
|
||||
const accountability: Accountability = createDefaultAccountability({ ip: getIPFromReq(req) });
|
||||
|
||||
@@ -528,7 +531,7 @@ export function createOpenIDAuthRouter(providerName: string): Router {
|
||||
state: req.query['state'],
|
||||
iss: req.query['iss'],
|
||||
},
|
||||
{ session: authMode === 'session' },
|
||||
{ session: authMode === 'session', ...(otp ? { otp: String(otp) } : {}) },
|
||||
);
|
||||
} catch (error: any) {
|
||||
// Prompt user for a new refresh_token if invalidated
|
||||
@@ -556,6 +559,19 @@ export function createOpenIDAuthRouter(providerName: string): Router {
|
||||
|
||||
const { accessToken, refreshToken, expires } = authResponse;
|
||||
|
||||
try {
|
||||
const claims = verifyJWT(accessToken, getSecret()) as any;
|
||||
|
||||
if (claims?.enforce_tfa === true) {
|
||||
const url = new Url(env['PUBLIC_URL'] as string).addPath('admin', 'tfa-setup');
|
||||
if (redirect) url.setQuery('redirect', redirect);
|
||||
|
||||
redirect = url.toString();
|
||||
}
|
||||
} catch (e) {
|
||||
logger.warn(e, `[OpenID] Unexpected error during OpenID login`);
|
||||
}
|
||||
|
||||
if (redirect) {
|
||||
if (authMode === 'session') {
|
||||
res.cookie(env['SESSION_COOKIE_NAME'] as string, accessToken, SESSION_COOKIE_OPTIONS);
|
||||
|
||||
@@ -61,7 +61,15 @@ export const DEFAULT_AUTH_PROVIDER = 'default';
|
||||
|
||||
export const COLUMN_TRANSFORMS = ['year', 'month', 'day', 'weekday', 'hour', 'minute', 'second'];
|
||||
|
||||
export const GENERATE_SPECIAL = ['uuid', 'date-created', 'role-created', 'user-created'] as const;
|
||||
export const GENERATE_SPECIAL = [
|
||||
'uuid',
|
||||
'date-created',
|
||||
'date-updated',
|
||||
'role-created',
|
||||
'role-updated',
|
||||
'user-created',
|
||||
'user-updated',
|
||||
] as const;
|
||||
|
||||
export const UUID_REGEX = '[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}';
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@ import { isSystemCollection } from '@directus/system-data';
|
||||
import type { PrimaryKey } from '@directus/types';
|
||||
import express from 'express';
|
||||
import collectionExists from '../middleware/collection-exists.js';
|
||||
import { mergeContentVersions } from '../middleware/merge-content-versions.js';
|
||||
import { respond } from '../middleware/respond.js';
|
||||
import { validateBatch } from '../middleware/validate-batch.js';
|
||||
import { ItemsService } from '../services/items.js';
|
||||
@@ -92,8 +91,8 @@ const readHandler = asyncHandler(async (req, res, next) => {
|
||||
return next();
|
||||
});
|
||||
|
||||
router.search('/:collection', collectionExists, validateBatch('read'), readHandler, mergeContentVersions, respond);
|
||||
router.get('/:collection', collectionExists, readHandler, mergeContentVersions, respond);
|
||||
router.search('/:collection', collectionExists, validateBatch('read'), readHandler, respond);
|
||||
router.get('/:collection', collectionExists, readHandler, respond);
|
||||
|
||||
router.get(
|
||||
'/:collection/:pk',
|
||||
@@ -114,7 +113,6 @@ router.get(
|
||||
|
||||
return next();
|
||||
}),
|
||||
mergeContentVersions,
|
||||
respond,
|
||||
);
|
||||
|
||||
|
||||
43
api/src/controllers/mcp.ts
Normal file
43
api/src/controllers/mcp.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { ForbiddenError } from '@directus/errors';
|
||||
import { Router } from 'express';
|
||||
import { DirectusMCP } from '../mcp/index.js';
|
||||
import { SettingsService } from '../services/settings.js';
|
||||
import asyncHandler from '../utils/async-handler.js';
|
||||
|
||||
const router = Router();
|
||||
|
||||
const mcpHandler = asyncHandler(async (req, res) => {
|
||||
const settings = new SettingsService({
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const { mcp_enabled, mcp_allow_deletes, mcp_prompts_collection, mcp_system_prompt, mcp_system_prompt_enabled } =
|
||||
await settings.readSingleton({
|
||||
fields: [
|
||||
'mcp_enabled',
|
||||
'mcp_allow_deletes',
|
||||
'mcp_prompts_collection',
|
||||
'mcp_system_prompt',
|
||||
'mcp_system_prompt_enabled',
|
||||
],
|
||||
});
|
||||
|
||||
if (!mcp_enabled) {
|
||||
throw new ForbiddenError({ reason: 'MCP must be enabled' });
|
||||
}
|
||||
|
||||
const mcp = new DirectusMCP({
|
||||
promptsCollection: mcp_prompts_collection,
|
||||
allowDeletes: mcp_allow_deletes,
|
||||
systemPromptEnabled: mcp_system_prompt_enabled,
|
||||
systemPrompt: mcp_system_prompt,
|
||||
});
|
||||
|
||||
mcp.handleRequest(req, res);
|
||||
});
|
||||
|
||||
router.get('/', mcpHandler);
|
||||
|
||||
router.post('/', mcpHandler);
|
||||
|
||||
export default router;
|
||||
@@ -18,6 +18,8 @@ import { TFAService } from '../services/tfa.js';
|
||||
import { UsersService } from '../services/users.js';
|
||||
import asyncHandler from '../utils/async-handler.js';
|
||||
import { sanitizeQuery } from '../utils/sanitize-query.js';
|
||||
import { DEFAULT_AUTH_PROVIDER } from '../constants.js';
|
||||
import { getDatabase } from '../database/index.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -332,7 +334,15 @@ router.post(
|
||||
throw new InvalidCredentialsError();
|
||||
}
|
||||
|
||||
if (!req.body.password) {
|
||||
const currentUser = await getDatabase()
|
||||
.select('provider')
|
||||
.from('directus_users')
|
||||
.where({ id: req.accountability.user })
|
||||
.first();
|
||||
|
||||
const requiresPassword = currentUser?.['provider'] === DEFAULT_AUTH_PROVIDER;
|
||||
|
||||
if (requiresPassword && !req.body.password) {
|
||||
throw new InvalidPayloadError({ reason: `"password" is required` });
|
||||
}
|
||||
|
||||
@@ -341,14 +351,16 @@ router.post(
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const authService = new AuthenticationService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
if (requiresPassword) {
|
||||
const authService = new AuthenticationService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
await authService.verifyPassword(req.accountability.user, req.body.password);
|
||||
await authService.verifyPassword(req.accountability.user, req.body.password);
|
||||
}
|
||||
|
||||
const { url, secret } = await service.generateTFA(req.accountability.user);
|
||||
const { url, secret } = await service.generateTFA(req.accountability.user, requiresPassword);
|
||||
|
||||
res.locals['payload'] = { data: { secret, otpauth_url: url } };
|
||||
return next();
|
||||
|
||||
@@ -211,11 +211,12 @@ router.get(
|
||||
|
||||
const { outdated, mainHash } = await service.verifyHash(version['collection'], version['item'], version['hash']);
|
||||
|
||||
const current = assign({}, version['delta']);
|
||||
const delta = version.delta ?? {};
|
||||
delta[req.schema.collections[version.collection]!.primary] = version.item;
|
||||
|
||||
const main = await service.getMainItem(version['collection'], version['item']);
|
||||
|
||||
res.locals['payload'] = { data: { outdated, mainHash, current, main } };
|
||||
res.locals['payload'] = { data: { outdated, mainHash, current: delta, main } };
|
||||
|
||||
return next();
|
||||
}),
|
||||
|
||||
@@ -7,23 +7,24 @@ import {
|
||||
ValueTooLongError,
|
||||
} from '@directus/errors';
|
||||
|
||||
import type { Item } from '@directus/types';
|
||||
import getDatabase from '../../index.js';
|
||||
import type { MSSQLError } from './types.js';
|
||||
import type { Item } from '@directus/types';
|
||||
|
||||
enum MSSQLErrorCodes {
|
||||
FOREIGN_KEY_VIOLATION = 547,
|
||||
NOT_NULL_VIOLATION = 515,
|
||||
NUMERIC_VALUE_OUT_OF_RANGE = 220,
|
||||
UNIQUE_VIOLATION = 2601, // or 2627
|
||||
UNIQUE_VIOLATION_INDEX = 2601,
|
||||
UNIQUE_VIOLATION_CONSTRAINT = 2627,
|
||||
VALUE_LIMIT_VIOLATION = 2628,
|
||||
}
|
||||
|
||||
export async function extractError(error: MSSQLError, data: Partial<Item>): Promise<MSSQLError | Error> {
|
||||
switch (error.number) {
|
||||
case MSSQLErrorCodes.UNIQUE_VIOLATION:
|
||||
case 2627:
|
||||
return await uniqueViolation();
|
||||
case MSSQLErrorCodes.UNIQUE_VIOLATION_CONSTRAINT:
|
||||
case MSSQLErrorCodes.UNIQUE_VIOLATION_INDEX:
|
||||
return await uniqueViolation(error);
|
||||
case MSSQLErrorCodes.NUMERIC_VALUE_OUT_OF_RANGE:
|
||||
return numericValueOutOfRange();
|
||||
case MSSQLErrorCodes.VALUE_LIMIT_VIOLATION:
|
||||
@@ -36,14 +37,20 @@ export async function extractError(error: MSSQLError, data: Partial<Item>): Prom
|
||||
|
||||
return error;
|
||||
|
||||
async function uniqueViolation() {
|
||||
async function uniqueViolation(error: MSSQLError) {
|
||||
/**
|
||||
* NOTE:
|
||||
* SQL Server doesn't return the name of the offending column when a unique constraint is thrown:
|
||||
* SQL Server doesn't return the name of the offending column when a unique error is thrown:
|
||||
*
|
||||
* Constraint:
|
||||
* insert into [articles] ([unique]) values (@p0)
|
||||
* - Violation of UNIQUE KEY constraint 'UQ__articles__5A062640242004EB'.
|
||||
* Cannot insert duplicate key in object 'dbo.articles'. The duplicate key value is (rijk).
|
||||
* - Violation of UNIQUE KEY constraint 'unique_contraint_name'. Cannot insert duplicate key in object 'dbo.article'.
|
||||
* The duplicate key value is (rijk).
|
||||
*
|
||||
* Index:
|
||||
* insert into [articles] ([unique]) values (@p0)
|
||||
* - Cannot insert duplicate key row in object 'dbo.articles' with unique index 'unique_index_name'.
|
||||
* The duplicate key value is (rijk).
|
||||
*
|
||||
* While it's not ideal, the best next thing we can do is extract the column name from
|
||||
* information_schema when this happens
|
||||
@@ -57,9 +64,12 @@ export async function extractError(error: MSSQLError, data: Partial<Item>): Prom
|
||||
|
||||
if (!quoteMatches || !parenMatches) return error;
|
||||
|
||||
const keyName = quoteMatches[1]!.slice(1, -1);
|
||||
const [keyNameMatchIndex, collectionNameMatchIndex] =
|
||||
error.number === MSSQLErrorCodes.UNIQUE_VIOLATION_INDEX ? [1, 0] : [0, 1];
|
||||
|
||||
let collection = quoteMatches[0]!.slice(1, -1);
|
||||
const keyName = quoteMatches[keyNameMatchIndex]!.slice(1, -1);
|
||||
|
||||
let collection = quoteMatches[collectionNameMatchIndex]!.slice(1, -1);
|
||||
let field: string | null = null;
|
||||
|
||||
if (keyName) {
|
||||
|
||||
21
api/src/database/migrations/20250813A-add-mcp.ts
Normal file
21
api/src/database/migrations/20250813A-add-mcp.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import type { Knex } from 'knex';
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable('directus_settings', (table) => {
|
||||
table.boolean('mcp_enabled').defaultTo(false).notNullable();
|
||||
table.boolean('mcp_allow_deletes').defaultTo(false).notNullable();
|
||||
table.string('mcp_prompts_collection').defaultTo(null).nullable();
|
||||
table.boolean('mcp_system_prompt_enabled').defaultTo(true).notNullable();
|
||||
table.text('mcp_system_prompt').defaultTo(null).nullable();
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable('directus_settings', (table) => {
|
||||
table.dropColumn('mcp_enabled');
|
||||
table.dropColumn('mcp_allow_deletes');
|
||||
table.dropColumn('mcp_prompts_collection');
|
||||
table.dropColumn('mcp_system_prompt_enabled');
|
||||
table.dropColumn('mcp_system_prompt');
|
||||
});
|
||||
}
|
||||
85
api/src/mailer.test.ts
Normal file
85
api/src/mailer.test.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { describe, test, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import getMailer from './mailer.js';
|
||||
|
||||
// Mock the dependencies
|
||||
vi.mock('@directus/env');
|
||||
vi.mock('./utils/get-config-from-env.js');
|
||||
|
||||
// Mock useEnv
|
||||
const mockUseEnv = vi.fn();
|
||||
vi.mocked(await import('@directus/env')).useEnv = mockUseEnv;
|
||||
|
||||
// Mock getConfigFromEnv
|
||||
const mockGetConfigFromEnv = vi.fn();
|
||||
vi.mocked(await import('./utils/get-config-from-env.js')).getConfigFromEnv = mockGetConfigFromEnv;
|
||||
|
||||
describe('getMailer', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
// Reset the module to clear any cached transporter
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
test('should not throw when creating SES transport', () => {
|
||||
mockUseEnv.mockReturnValue({
|
||||
EMAIL_TRANSPORT: 'ses',
|
||||
});
|
||||
|
||||
mockGetConfigFromEnv.mockReturnValue({
|
||||
region: 'us-east-1',
|
||||
credentials: {
|
||||
accessKeyId: 'access',
|
||||
secretAccessKey: 'secret',
|
||||
},
|
||||
});
|
||||
|
||||
expect(() => getMailer()).not.toThrow();
|
||||
});
|
||||
|
||||
test('should not throw when creating sendmail transport', () => {
|
||||
mockUseEnv.mockReturnValue({
|
||||
EMAIL_TRANSPORT: 'sendmail',
|
||||
});
|
||||
|
||||
mockGetConfigFromEnv.mockReturnValue({
|
||||
newLine: 'unix',
|
||||
path: '/usr/sbin/sendmail',
|
||||
});
|
||||
|
||||
expect(() => getMailer()).not.toThrow();
|
||||
});
|
||||
|
||||
test('should not throw when creating SMTP transport', () => {
|
||||
mockUseEnv.mockReturnValue({
|
||||
EMAIL_TRANSPORT: 'smtp',
|
||||
});
|
||||
|
||||
mockGetConfigFromEnv.mockReturnValue({
|
||||
host: '0.0.0.0',
|
||||
port: '123',
|
||||
user: 'me',
|
||||
password: 'safe',
|
||||
name: 'test',
|
||||
});
|
||||
|
||||
expect(() => getMailer()).not.toThrow();
|
||||
});
|
||||
|
||||
test('should not throw when creating Mailgun transport', () => {
|
||||
mockUseEnv.mockReturnValue({
|
||||
EMAIL_TRANSPORT: 'mailgun',
|
||||
});
|
||||
|
||||
mockGetConfigFromEnv.mockReturnValue({
|
||||
apiKey: 'test',
|
||||
domain: 'test',
|
||||
host: 'api.mailgun.net',
|
||||
});
|
||||
|
||||
expect(() => getMailer()).not.toThrow();
|
||||
});
|
||||
});
|
||||
@@ -25,14 +25,14 @@ export default function getMailer(): Transporter {
|
||||
path: (env['EMAIL_SENDMAIL_PATH'] as string) || '/usr/sbin/sendmail',
|
||||
});
|
||||
} else if (transportName === 'ses') {
|
||||
const aws = require('@aws-sdk/client-ses');
|
||||
const { SESv2Client, SendEmailCommand } = require('@aws-sdk/client-sesv2');
|
||||
|
||||
const sesOptions: Record<string, unknown> = getConfigFromEnv('EMAIL_SES_');
|
||||
|
||||
const ses = new aws.SES(sesOptions);
|
||||
const sesClient = new SESv2Client(sesOptions);
|
||||
|
||||
transporter = nodemailer.createTransport({
|
||||
SES: { ses, aws },
|
||||
SES: { sesClient, SendEmailCommand },
|
||||
} as Record<string, unknown>);
|
||||
} else if (transportName === 'smtp') {
|
||||
let auth: boolean | { user?: string; pass?: string } = false;
|
||||
|
||||
5
api/src/mcp/define.ts
Normal file
5
api/src/mcp/define.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import type { ToolConfig } from './types.js';
|
||||
|
||||
export function defineTool<Args>(tool: ToolConfig<Args>): ToolConfig<Args> {
|
||||
return tool;
|
||||
}
|
||||
1
api/src/mcp/index.ts
Normal file
1
api/src/mcp/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from './server.js';
|
||||
247
api/src/mcp/schema.ts
Normal file
247
api/src/mcp/schema.ts
Normal file
@@ -0,0 +1,247 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
// PK
|
||||
export const PrimaryKeyInputSchema = z.union([z.number(), z.string()]);
|
||||
export const PrimaryKeyValidateSchema = z.union([z.number(), z.string()]);
|
||||
|
||||
// item
|
||||
export const ItemInputSchema = z.record(z.string(), z.any());
|
||||
export const ItemValidateSchema = z.record(z.string(), z.any());
|
||||
|
||||
// query
|
||||
export const QueryInputSchema = z
|
||||
.object({
|
||||
fields: z.array(z.string()),
|
||||
sort: z.array(z.string()),
|
||||
filter: z.record(z.string(), z.any()),
|
||||
limit: z.number(),
|
||||
offset: z.number(),
|
||||
page: z.number(),
|
||||
search: z.string(),
|
||||
deep: z.record(z.string(), z.any()),
|
||||
alias: z.record(z.string(), z.string()),
|
||||
aggregate: z.object({
|
||||
count: z.array(z.string()),
|
||||
sum: z.array(z.string()),
|
||||
avg: z.array(z.string()),
|
||||
min: z.array(z.string()),
|
||||
max: z.array(z.string()),
|
||||
}),
|
||||
backlink: z.boolean(),
|
||||
version: z.string(),
|
||||
versionRaw: z.boolean(),
|
||||
export: z.string(),
|
||||
group: z.array(z.string()),
|
||||
})
|
||||
.partial();
|
||||
|
||||
export const QueryValidateSchema = QueryInputSchema;
|
||||
|
||||
// field
|
||||
export const RawFieldItemInputSchema = z.object({
|
||||
field: z.string(),
|
||||
type: z.string(),
|
||||
name: z.string().optional(),
|
||||
children: z.union([z.array(z.record(z.string(), z.any())), z.null()]).optional(),
|
||||
collection: z.string().optional(),
|
||||
schema: z.union([z.record(z.string(), z.any()), z.null()]).optional(),
|
||||
meta: z.union([z.record(z.string(), z.any()), z.null()]).optional(),
|
||||
});
|
||||
|
||||
export const RawFieldItemValidateSchema = RawFieldItemInputSchema;
|
||||
|
||||
export const FieldItemInputSchema = z.object({
|
||||
field: z.string(),
|
||||
type: z.string().nullable(),
|
||||
name: z.string().optional(),
|
||||
collection: z.string().optional(),
|
||||
schema: z.union([z.record(z.string(), z.any()), z.null()]).optional(),
|
||||
meta: z.union([z.record(z.string(), z.any()), z.null()]).optional(),
|
||||
});
|
||||
|
||||
export const FieldItemValidateSchema = FieldItemInputSchema;
|
||||
|
||||
// collection
|
||||
export const CollectionItemInputSchema = z.object({
|
||||
collection: z.string(),
|
||||
fields: z.array(RawFieldItemInputSchema).optional(),
|
||||
meta: z.union([z.record(z.string(), z.any()), z.null()]).optional(),
|
||||
schema: z
|
||||
.union([z.object({}), z.null()])
|
||||
.optional()
|
||||
.describe('ALWAYS an empty object for new collections. Only send `null` or `undefined` for folder collections.'),
|
||||
});
|
||||
|
||||
export const CollectionItemValidateCreateSchema = CollectionItemInputSchema;
|
||||
export const CollectionItemValidateUpdateSchema = z.object({
|
||||
collection: z.string(),
|
||||
meta: z.union([z.record(z.string(), z.any()), z.null()]).optional(),
|
||||
schema: z.union([z.record(z.string(), z.any()), z.null()]).optional(),
|
||||
});
|
||||
|
||||
// file
|
||||
export const FileItemInputSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
storage: z.string(),
|
||||
filename_disk: z.string(),
|
||||
filename_download: z.string(),
|
||||
title: z.union([z.string(), z.null()]),
|
||||
type: z.union([z.string(), z.null()]),
|
||||
folder: z.union([z.string(), z.null()]),
|
||||
created_on: z.string(),
|
||||
uploaded_by: z.union([z.string(), z.null()]),
|
||||
uploaded_on: z.union([z.string(), z.null()]),
|
||||
modified_by: z.union([z.string(), z.null()]),
|
||||
modified_on: z.string(),
|
||||
charset: z.union([z.string(), z.null()]),
|
||||
filesize: z.number(),
|
||||
width: z.union([z.number(), z.null()]),
|
||||
height: z.union([z.number(), z.null()]),
|
||||
duration: z.union([z.number(), z.null()]),
|
||||
embed: z.union([z.string(), z.null()]),
|
||||
description: z.union([z.string(), z.null()]),
|
||||
location: z.union([z.string(), z.null()]),
|
||||
tags: z.union([z.string(), z.null()]),
|
||||
metadata: z.union([z.record(z.string(), z.any()), z.null()]),
|
||||
focal_point_x: z.union([z.number(), z.null()]),
|
||||
focal_point_y: z.union([z.number(), z.null()]),
|
||||
tus_id: z.union([z.string(), z.null()]),
|
||||
tus_data: z.union([z.record(z.string(), z.any()), z.null()]),
|
||||
})
|
||||
.partial();
|
||||
|
||||
export const FileItemValidateSchema = FileItemInputSchema;
|
||||
|
||||
export const FileImportItemInputSchema = z.object({
|
||||
url: z.string(),
|
||||
file: FileItemInputSchema,
|
||||
});
|
||||
|
||||
export const FileImportItemValidateSchema = z.object({
|
||||
url: z.string(),
|
||||
file: FileItemValidateSchema,
|
||||
});
|
||||
|
||||
// opertations
|
||||
export const OperationItemInputSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.union([z.string(), z.null()]),
|
||||
key: z.string(),
|
||||
type: z.string(),
|
||||
position_x: z.number(),
|
||||
position_y: z.number(),
|
||||
options: z.record(z.string(), z.any()),
|
||||
resolve: z.union([z.string(), z.null()]),
|
||||
reject: z.union([z.string(), z.null()]),
|
||||
flow: z.string(),
|
||||
date_created: z.string(),
|
||||
user_created: z.string(),
|
||||
})
|
||||
.partial();
|
||||
|
||||
export const OperationItemValidateSchema = OperationItemInputSchema;
|
||||
|
||||
// flow
|
||||
export const FlowItemInputSchema = z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
icon: z.union([z.string(), z.null()]),
|
||||
color: z.union([z.string(), z.null()]),
|
||||
description: z.union([z.string(), z.null()]),
|
||||
status: z.enum(['active', 'inactive']),
|
||||
trigger: z.union([z.enum(['event', 'schedule', 'operation', 'webhook', 'manual']), z.null()]),
|
||||
options: z.union([z.record(z.string(), z.any()), z.null()]),
|
||||
operation: z.union([z.string(), z.null()]),
|
||||
operations: z.array(OperationItemInputSchema),
|
||||
date_created: z.string(),
|
||||
user_created: z.string(),
|
||||
accountability: z.union([z.enum(['all', 'activity']), z.null()]),
|
||||
})
|
||||
.partial();
|
||||
|
||||
export const FlowItemValidateSchema = FlowItemInputSchema;
|
||||
|
||||
// trigger flow
|
||||
export const TriggerFlowInputSchema = z.object({
|
||||
id: PrimaryKeyInputSchema,
|
||||
collection: z.string(),
|
||||
keys: z.array(PrimaryKeyInputSchema).optional(),
|
||||
headers: z.record(z.string(), z.any()).optional(),
|
||||
query: z.record(z.string(), z.any()).optional(),
|
||||
data: z.record(z.string(), z.any()).optional(),
|
||||
});
|
||||
|
||||
export const TriggerFlowValidateSchema = z.strictObject({
|
||||
id: PrimaryKeyValidateSchema,
|
||||
collection: z.string(),
|
||||
keys: z.array(PrimaryKeyValidateSchema).optional(),
|
||||
query: z.record(z.string(), z.any()).optional(),
|
||||
headers: z.record(z.string(), z.any()).optional(),
|
||||
data: z.record(z.string(), z.any()).optional(),
|
||||
});
|
||||
|
||||
// folder
|
||||
export const FolderItemInputSchema = z.object({
|
||||
id: PrimaryKeyInputSchema.optional(),
|
||||
name: z.string(),
|
||||
parent: z.string().optional(),
|
||||
});
|
||||
|
||||
export const FolderItemValidateSchema = FolderItemInputSchema;
|
||||
|
||||
// relation
|
||||
export const RelationItemInputSchema = z.object({
|
||||
collection: z.string(),
|
||||
field: z.string(),
|
||||
related_collection: z.union([z.string(), z.null()]),
|
||||
schema: z.union([z.record(z.string(), z.any()), z.null()]),
|
||||
meta: z.union([z.record(z.string(), z.any()), z.null()]),
|
||||
});
|
||||
|
||||
const RelationMetaSchema = z.object({
|
||||
id: z.number(),
|
||||
many_collection: z.string(),
|
||||
many_field: z.string(),
|
||||
one_collection: z.string().nullable(),
|
||||
one_field: z.string().nullable(),
|
||||
one_collection_field: z.string().nullable(),
|
||||
one_allowed_collections: z.array(z.string()).nullable(),
|
||||
one_deselect_action: z.enum(['nullify', 'delete']),
|
||||
junction_field: z.string().nullable(),
|
||||
sort_field: z.string().nullable(),
|
||||
system: z.boolean().optional(),
|
||||
});
|
||||
|
||||
const FkActionEnum = z.enum(['NO ACTION', 'RESTRICT', 'CASCADE', 'SET NULL', 'SET DEFAULT']);
|
||||
|
||||
export const ForeignKeySchema = z.object({
|
||||
table: z.string(),
|
||||
column: z.string(),
|
||||
foreign_key_table: z.string(),
|
||||
foreign_key_column: z.string(),
|
||||
foreign_key_schema: z.string().optional(),
|
||||
constraint_name: z.union([z.string(), z.null()]),
|
||||
on_update: z.union([FkActionEnum, z.null()]),
|
||||
on_delete: z.union([FkActionEnum, z.null()]),
|
||||
});
|
||||
|
||||
export const RelationItemValidateCreateSchema = z.object({
|
||||
collection: z.string(),
|
||||
field: z.string(),
|
||||
related_collection: z.string().nullable(),
|
||||
schema: ForeignKeySchema.partial().nullable().optional(),
|
||||
meta: RelationMetaSchema.partial().nullable(),
|
||||
});
|
||||
|
||||
export const RelationItemValidateUpdateSchema = z
|
||||
.object({
|
||||
collection: z.string(),
|
||||
field: z.string(),
|
||||
related_collection: z.string().nullable().optional(),
|
||||
schema: ForeignKeySchema.partial().nullable().optional(),
|
||||
meta: RelationMetaSchema.partial().nullable().optional(),
|
||||
})
|
||||
.optional();
|
||||
1319
api/src/mcp/server.test.ts
Normal file
1319
api/src/mcp/server.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
394
api/src/mcp/server.ts
Normal file
394
api/src/mcp/server.ts
Normal file
@@ -0,0 +1,394 @@
|
||||
import { useEnv } from '@directus/env';
|
||||
import { ForbiddenError, InvalidPayloadError, isDirectusError } from '@directus/errors';
|
||||
import type { Query } from '@directus/types';
|
||||
import { isObject, parseJSON, toArray } from '@directus/utils';
|
||||
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
GetPromptRequestSchema,
|
||||
InitializedNotificationSchema,
|
||||
ErrorCode as JSONRPCErrorCode,
|
||||
JSONRPCMessageSchema,
|
||||
ListPromptsRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
McpError,
|
||||
type CallToolRequest,
|
||||
type CallToolResult,
|
||||
type GetPromptRequest,
|
||||
type GetPromptResult,
|
||||
type PromptArgument,
|
||||
} from '@modelcontextprotocol/sdk/types.js';
|
||||
import type { Request, Response } from 'express';
|
||||
import { render, tokenize } from 'micromustache';
|
||||
import { z } from 'zod';
|
||||
import { fromZodError } from 'zod-validation-error';
|
||||
import { ItemsService } from '../services/index.js';
|
||||
import { sanitizeQuery } from '../utils/sanitize-query.js';
|
||||
import { Url } from '../utils/url.js';
|
||||
import { findMcpTool, getAllMcpTools } from './tools/index.js';
|
||||
import { DirectusTransport } from './transport.js';
|
||||
import type { MCPOptions, Prompt, ToolConfig, ToolResult } from './types.js';
|
||||
|
||||
export class DirectusMCP {
|
||||
promptsCollection?: string | null;
|
||||
systemPrompt?: string | null;
|
||||
systemPromptEnabled?: boolean;
|
||||
server: Server;
|
||||
allowDeletes?: boolean;
|
||||
|
||||
constructor(options: MCPOptions = {}) {
|
||||
this.promptsCollection = options.promptsCollection ?? null;
|
||||
this.systemPromptEnabled = options.systemPromptEnabled ?? true;
|
||||
this.systemPrompt = options.systemPrompt ?? null;
|
||||
this.allowDeletes = options.allowDeletes ?? false;
|
||||
|
||||
this.server = new Server(
|
||||
{
|
||||
name: 'directus-mcp',
|
||||
version: '0.1.0',
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
tools: {},
|
||||
prompts: {},
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* This handleRequest function is not awaiting lower level logic resulting in the actual
|
||||
* response being an asynchronous side effect happening after the function has returned
|
||||
*/
|
||||
handleRequest(req: Request, res: Response) {
|
||||
if (!req.accountability?.user && !req.accountability?.role && req.accountability?.admin !== true) {
|
||||
throw new ForbiddenError();
|
||||
}
|
||||
|
||||
if (!req.accepts('application/json')) {
|
||||
// we currently dont support "text/event-stream" requests
|
||||
res.status(405).send();
|
||||
return;
|
||||
}
|
||||
|
||||
this.server.setNotificationHandler(InitializedNotificationSchema, () => {
|
||||
res.status(202).send();
|
||||
});
|
||||
|
||||
// list prompts
|
||||
this.server.setRequestHandler(ListPromptsRequestSchema, async () => {
|
||||
const prompts = [];
|
||||
|
||||
if (!this.promptsCollection) {
|
||||
throw new McpError(1001, `A prompts collection must be set in settings`);
|
||||
}
|
||||
|
||||
const service = new ItemsService<Prompt>(this.promptsCollection, {
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
try {
|
||||
const promptList = await service.readByQuery({
|
||||
fields: ['name', 'description', 'system_prompt', 'messages'],
|
||||
});
|
||||
|
||||
for (const prompt of promptList) {
|
||||
// builds args
|
||||
const args: PromptArgument[] = [];
|
||||
|
||||
// Add system prompt as the first assistant message if it exists
|
||||
if (prompt.system_prompt) {
|
||||
for (const varName of tokenize(prompt.system_prompt).varNames) {
|
||||
args.push({
|
||||
name: varName,
|
||||
description: `Value for ${varName}`,
|
||||
required: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (const message of prompt.messages || []) {
|
||||
for (const varName of tokenize(message.text).varNames) {
|
||||
args.push({
|
||||
name: varName,
|
||||
description: `Value for ${varName}`,
|
||||
required: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
prompts.push({
|
||||
name: prompt.name,
|
||||
description: prompt.description,
|
||||
arguments: args,
|
||||
});
|
||||
}
|
||||
|
||||
return { prompts };
|
||||
} catch (error) {
|
||||
return this.toExecutionError(error);
|
||||
}
|
||||
});
|
||||
|
||||
// get prompt
|
||||
this.server.setRequestHandler(GetPromptRequestSchema, async (request: GetPromptRequest) => {
|
||||
if (!this.promptsCollection) {
|
||||
throw new McpError(1001, `A prompts collection must be set in settings`);
|
||||
}
|
||||
|
||||
const service = new ItemsService<Prompt>(this.promptsCollection, {
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const { name: promptName, arguments: args } = request.params;
|
||||
|
||||
const promptCommand = await service.readByQuery({
|
||||
fields: ['description', 'system_prompt', 'messages'],
|
||||
filter: {
|
||||
name: {
|
||||
_eq: promptName,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const prompt = promptCommand[0];
|
||||
|
||||
if (!prompt) {
|
||||
throw new McpError(JSONRPCErrorCode.InvalidParams, `Invalid prompt "${promptName}"`);
|
||||
}
|
||||
|
||||
const messages: GetPromptResult['messages'] = [];
|
||||
|
||||
// Add system prompt as the first assistant message if it exists
|
||||
if (prompt.system_prompt) {
|
||||
messages.push({
|
||||
role: 'assistant',
|
||||
content: {
|
||||
type: 'text',
|
||||
text: render(prompt.system_prompt, args),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// render any provided args
|
||||
(prompt.messages || []).forEach((message) => {
|
||||
// skip invalid prompts
|
||||
if (!message.role || !message.text) return;
|
||||
|
||||
messages.push({
|
||||
role: message.role,
|
||||
content: {
|
||||
type: 'text',
|
||||
text: render(message.text, args),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
return this.toPromptResponse({
|
||||
messages,
|
||||
description: prompt.description,
|
||||
});
|
||||
});
|
||||
|
||||
// listing tools
|
||||
this.server.setRequestHandler(ListToolsRequestSchema, () => {
|
||||
const tools = [];
|
||||
|
||||
for (const tool of getAllMcpTools()) {
|
||||
if (req.accountability?.admin !== true && tool.admin === true) continue;
|
||||
if (tool.name === 'system-prompt' && this.systemPromptEnabled === false) continue;
|
||||
|
||||
tools.push({
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
inputSchema: z.toJSONSchema(tool.inputSchema),
|
||||
annotations: tool.annotations,
|
||||
});
|
||||
}
|
||||
|
||||
return { tools };
|
||||
});
|
||||
|
||||
// calling tools
|
||||
this.server.setRequestHandler(CallToolRequestSchema, async (request: CallToolRequest) => {
|
||||
const tool = findMcpTool(request.params.name);
|
||||
|
||||
let sanitizedQuery = {};
|
||||
|
||||
try {
|
||||
if (!tool || (tool.name === 'system-prompt' && this.systemPromptEnabled === false)) {
|
||||
throw new InvalidPayloadError({ reason: `"${request.params.name}" doesn't exist in the toolset` });
|
||||
}
|
||||
|
||||
if (req.accountability?.admin !== true && tool.admin === true) {
|
||||
throw new ForbiddenError({ reason: 'You must be an admin to access this tool' });
|
||||
}
|
||||
|
||||
if (tool.name === 'system-prompt') {
|
||||
request.params.arguments = { promptOverride: this.systemPrompt };
|
||||
}
|
||||
|
||||
// ensure json expected fields are not stringified
|
||||
if (request.params.arguments) {
|
||||
for (const field of ['data', 'keys', 'query']) {
|
||||
const arg = request.params.arguments[field];
|
||||
|
||||
if (typeof arg === 'string') {
|
||||
request.params.arguments[field] = parseJSON(arg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const { error, data: args } = tool.validateSchema?.safeParse(request.params.arguments) ?? {
|
||||
data: request.params.arguments,
|
||||
};
|
||||
|
||||
if (error) {
|
||||
throw new InvalidPayloadError({ reason: fromZodError(error).message });
|
||||
}
|
||||
|
||||
if (!isObject(args)) {
|
||||
throw new InvalidPayloadError({ reason: '"arguments" must be an object' });
|
||||
}
|
||||
|
||||
if ('action' in args && args['action'] === 'delete' && !this.allowDeletes) {
|
||||
throw new InvalidPayloadError({ reason: 'Delete actions are disabled' });
|
||||
}
|
||||
|
||||
if ('query' in args && args['query']) {
|
||||
sanitizedQuery = await sanitizeQuery(
|
||||
{
|
||||
fields: (args['query'] as Query)['fields'] || '*',
|
||||
...args['query'],
|
||||
},
|
||||
req.schema,
|
||||
req.accountability || null,
|
||||
);
|
||||
}
|
||||
|
||||
const result = await tool.handler({
|
||||
args,
|
||||
sanitizedQuery,
|
||||
schema: req.schema,
|
||||
accountability: req.accountability,
|
||||
});
|
||||
|
||||
// if single item and create/read/update/import add url
|
||||
const data = toArray(result?.data);
|
||||
|
||||
if (
|
||||
'action' in args &&
|
||||
['create', 'update', 'read', 'import'].includes(args['action'] as string) &&
|
||||
result?.data &&
|
||||
data.length === 1
|
||||
) {
|
||||
result.url = this.buildURL(tool, args, data[0]);
|
||||
}
|
||||
|
||||
return this.toToolResponse(result);
|
||||
} catch (error) {
|
||||
return this.toExecutionError(error);
|
||||
}
|
||||
});
|
||||
|
||||
const transport = new DirectusTransport(res);
|
||||
|
||||
this.server.connect(transport);
|
||||
|
||||
try {
|
||||
const parsedMessage = JSONRPCMessageSchema.parse(req.body);
|
||||
transport.onmessage?.(parsedMessage);
|
||||
} catch (error) {
|
||||
transport.onerror?.(error as Error);
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
buildURL(tool: ToolConfig<unknown>, input: unknown, data: unknown) {
|
||||
const env = useEnv();
|
||||
|
||||
const publicURL = env['PUBLIC_URL'] as string | undefined;
|
||||
|
||||
if (!publicURL) return;
|
||||
|
||||
if (!tool.endpoint) return;
|
||||
|
||||
const path = tool.endpoint({ input, data });
|
||||
|
||||
if (!path) return;
|
||||
|
||||
return new Url(env['PUBLIC_URL'] as string).addPath('admin', ...path).toString();
|
||||
}
|
||||
|
||||
toPromptResponse(result: {
|
||||
description?: string | undefined;
|
||||
messages: GetPromptResult['messages'];
|
||||
}): GetPromptResult {
|
||||
const response: GetPromptResult = {
|
||||
messages: result.messages,
|
||||
};
|
||||
|
||||
if (result.description) {
|
||||
response.description = result.description;
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
toToolResponse(result?: ToolResult) {
|
||||
const response: CallToolResult = {
|
||||
content: [],
|
||||
};
|
||||
|
||||
if (!result || typeof result.data === 'undefined' || result.data === null) return response;
|
||||
|
||||
if (result.type === 'text') {
|
||||
response.content.push({
|
||||
type: 'text',
|
||||
text: JSON.stringify({ raw: result.data, url: result.url }),
|
||||
});
|
||||
} else {
|
||||
response.content.push(result);
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
toExecutionError(err: unknown) {
|
||||
const errors: { error: string; code?: string }[] = [];
|
||||
const receivedErrors: unknown[] = Array.isArray(err) ? err : [err];
|
||||
|
||||
for (const error of receivedErrors) {
|
||||
if (isDirectusError(error)) {
|
||||
errors.push({
|
||||
error: error.message || 'Unknown error',
|
||||
code: error.code,
|
||||
});
|
||||
} else {
|
||||
// Handle generic errors
|
||||
let message = 'An unknown error occurred.';
|
||||
let code: string | undefined;
|
||||
|
||||
if (error instanceof Error) {
|
||||
message = error.message;
|
||||
code = 'code' in error ? String(error.code) : undefined;
|
||||
} else if (typeof error === 'object' && error !== null) {
|
||||
message = 'message' in error ? String(error.message) : message;
|
||||
code = 'code' in error ? String(error.code) : undefined;
|
||||
} else if (typeof error === 'string') {
|
||||
message = error;
|
||||
}
|
||||
|
||||
errors.push({ error: message, ...(code && { code }) });
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
isError: true,
|
||||
content: [{ type: 'text' as const, text: JSON.stringify(errors) }],
|
||||
};
|
||||
}
|
||||
}
|
||||
280
api/src/mcp/tools/assets.test.ts
Normal file
280
api/src/mcp/tools/assets.test.ts
Normal file
@@ -0,0 +1,280 @@
|
||||
import type { Accountability, SchemaOverview } from '@directus/types';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi, type MockedFunction } from 'vitest';
|
||||
import { AssetsService } from '../../services/assets.js';
|
||||
import { FilesService } from '../../services/files.js';
|
||||
import { assets } from './assets.js';
|
||||
|
||||
vi.mock('../../services/assets.js');
|
||||
vi.mock('../../services/files.js');
|
||||
|
||||
vi.mock('../tool.js', () => ({
|
||||
defineTool: vi.fn((config) => config),
|
||||
}));
|
||||
|
||||
describe('assets tool', () => {
|
||||
const mockSchema = {} as SchemaOverview;
|
||||
const mockAccountability = { user: 'test-user' } as Accountability;
|
||||
const mockSanitizedQuery = { fields: ['*'] };
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('asset operations', () => {
|
||||
let mockFilesService: {
|
||||
readOne: MockedFunction<any>;
|
||||
};
|
||||
|
||||
let mockAssetsService: {
|
||||
getAsset: MockedFunction<any>;
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockFilesService = {
|
||||
readOne: vi.fn(),
|
||||
};
|
||||
|
||||
mockAssetsService = {
|
||||
getAsset: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mocked(AssetsService).mockImplementation(() => mockAssetsService as unknown as AssetsService);
|
||||
vi.mocked(FilesService).mockImplementation(() => mockFilesService as unknown as FilesService);
|
||||
});
|
||||
|
||||
describe('READ asset', () => {
|
||||
test.each(['audio/wav', 'image/png'])(
|
||||
'should read asset and return base64 encoded data for valid file types',
|
||||
async (fileType) => {
|
||||
const assetId = 'asset-123';
|
||||
|
||||
const mockChunks = [Buffer.from('chunk1'), Buffer.from('chunk2'), Buffer.from('chunk3')];
|
||||
|
||||
// Create an async generator to simulate the stream
|
||||
async function* mockStream() {
|
||||
for (const chunk of mockChunks) {
|
||||
yield chunk;
|
||||
}
|
||||
}
|
||||
|
||||
mockAssetsService.getAsset.mockResolvedValue({
|
||||
file: {
|
||||
type: fileType,
|
||||
},
|
||||
stream: mockStream(),
|
||||
});
|
||||
|
||||
mockFilesService.readOne.mockResolvedValue({
|
||||
type: fileType,
|
||||
});
|
||||
|
||||
const result = await assets.handler({
|
||||
args: {
|
||||
id: assetId,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(AssetsService).toHaveBeenCalledWith({
|
||||
accountability: mockAccountability,
|
||||
schema: mockSchema,
|
||||
});
|
||||
|
||||
expect(mockFilesService.readOne).toHaveBeenCalledWith(assetId, { limit: 1 });
|
||||
expect(mockAssetsService.getAsset).toHaveBeenCalledWith(assetId, undefined);
|
||||
|
||||
const expectedBuffer = Buffer.concat(mockChunks);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: fileType.startsWith('image') ? 'image' : 'audio',
|
||||
data: expectedBuffer.toString('base64'),
|
||||
mimeType: fileType,
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
test('should handle empty stream', async () => {
|
||||
const assetId = 'asset-123';
|
||||
const fileType = 'image/png';
|
||||
|
||||
async function* emptyStream() {
|
||||
// Empty generator
|
||||
}
|
||||
|
||||
mockFilesService.readOne.mockResolvedValue({
|
||||
type: fileType,
|
||||
});
|
||||
|
||||
mockAssetsService.getAsset.mockResolvedValue({
|
||||
file: {
|
||||
type: fileType,
|
||||
},
|
||||
stream: emptyStream(),
|
||||
});
|
||||
|
||||
const result = await assets.handler({
|
||||
args: {
|
||||
id: assetId,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'image',
|
||||
data: Buffer.concat([]).toString('base64'),
|
||||
mimeType: 'image/png',
|
||||
});
|
||||
});
|
||||
|
||||
describe('should downsize images larger that 1200px in width or height', () => {
|
||||
test('should downsize to 800px width if width>height', async () => {
|
||||
const assetId = 'asset-123';
|
||||
const fileType = 'image/png';
|
||||
|
||||
const transforms = {
|
||||
transformationParams: {
|
||||
transforms: [['resize', { width: 800, fit: 'contain' }]],
|
||||
},
|
||||
};
|
||||
|
||||
async function* emptyStream() {
|
||||
// Empty generator
|
||||
}
|
||||
|
||||
mockFilesService.readOne.mockResolvedValue({
|
||||
type: fileType,
|
||||
width: 1300,
|
||||
height: 500,
|
||||
});
|
||||
|
||||
mockAssetsService.getAsset.mockResolvedValue({
|
||||
file: {
|
||||
type: fileType,
|
||||
},
|
||||
stream: emptyStream(),
|
||||
});
|
||||
|
||||
await assets.handler({
|
||||
args: {
|
||||
id: assetId,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockAssetsService.getAsset).toBeCalledWith(assetId, transforms);
|
||||
});
|
||||
|
||||
test('should downsize to 800px height if width<height', async () => {
|
||||
const assetId = 'asset-123';
|
||||
const fileType = 'image/png';
|
||||
|
||||
const transforms = {
|
||||
transformationParams: {
|
||||
transforms: [['resize', { height: 800, fit: 'contain' }]],
|
||||
},
|
||||
};
|
||||
|
||||
async function* emptyStream() {
|
||||
// Empty generator
|
||||
}
|
||||
|
||||
mockFilesService.readOne.mockResolvedValue({
|
||||
type: fileType,
|
||||
width: 500,
|
||||
height: 1300,
|
||||
});
|
||||
|
||||
mockAssetsService.getAsset.mockResolvedValue({
|
||||
file: {
|
||||
type: fileType,
|
||||
},
|
||||
stream: emptyStream(),
|
||||
});
|
||||
|
||||
await assets.handler({
|
||||
args: {
|
||||
id: assetId,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockAssetsService.getAsset).toBeCalledWith(assetId, transforms);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
let mockFilesService: {
|
||||
readOne: MockedFunction<any>;
|
||||
};
|
||||
|
||||
let mockAssetsService: {
|
||||
getAsset: MockedFunction<any>;
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockFilesService = {
|
||||
readOne: vi.fn(),
|
||||
};
|
||||
|
||||
mockAssetsService = {
|
||||
getAsset: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mocked(AssetsService).mockImplementation(() => mockAssetsService as unknown as AssetsService);
|
||||
vi.mocked(FilesService).mockImplementation(() => mockFilesService as unknown as FilesService);
|
||||
});
|
||||
|
||||
test.each([null, 'application/pdf', 'text/plain'])(
|
||||
'should throw UnsupportedMediaType error for invalid file type',
|
||||
async (fileType) => {
|
||||
const assetId = 'asset-123';
|
||||
|
||||
mockFilesService.readOne.mockResolvedValue({
|
||||
type: fileType,
|
||||
});
|
||||
|
||||
await expect(
|
||||
assets.handler({
|
||||
args: {
|
||||
id: assetId,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
}),
|
||||
).rejects.toThrow(`Unsupported media type "${fileType === null ? 'unknown' : fileType}" in asset tool.`);
|
||||
|
||||
expect(mockAssetsService.getAsset).not.toBeCalled();
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
describe('tool configuration', () => {
|
||||
test('should have correct tool name', () => {
|
||||
expect(assets.name).toBe('assets');
|
||||
});
|
||||
|
||||
test('should not be admin tool', () => {
|
||||
expect(assets.admin).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should have description', () => {
|
||||
expect(assets.description).toBeDefined();
|
||||
});
|
||||
|
||||
test('should have input and validation schemas', () => {
|
||||
expect(assets.inputSchema).toBeDefined();
|
||||
expect(assets.validateSchema).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
69
api/src/mcp/tools/assets.ts
Normal file
69
api/src/mcp/tools/assets.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { UnsupportedMediaTypeError } from '@directus/errors';
|
||||
import type { TransformationSet } from '@directus/types';
|
||||
import { z } from 'zod';
|
||||
import { AssetsService } from '../../services/assets.js';
|
||||
import { FilesService } from '../../services/files.js';
|
||||
import { defineTool } from '../define.js';
|
||||
import prompts from './prompts/index.js';
|
||||
|
||||
const AssetsValidateSchema = z.strictObject({
|
||||
id: z.string(),
|
||||
});
|
||||
|
||||
const AssetsInputSchema = z.object({
|
||||
id: z.string(),
|
||||
});
|
||||
|
||||
export const assets = defineTool<z.infer<typeof AssetsValidateSchema>>({
|
||||
name: 'assets',
|
||||
description: prompts.assets,
|
||||
annotations: {
|
||||
title: 'Directus - Assets',
|
||||
},
|
||||
inputSchema: AssetsInputSchema,
|
||||
validateSchema: AssetsValidateSchema,
|
||||
async handler({ args, schema, accountability }) {
|
||||
const serviceOptions = {
|
||||
accountability,
|
||||
schema,
|
||||
};
|
||||
|
||||
const filesService = new FilesService(serviceOptions);
|
||||
|
||||
const file = await filesService.readOne(args.id, { limit: 1 });
|
||||
|
||||
if (!file.type || !['image', 'audio'].some((t) => file.type?.startsWith(t))) {
|
||||
throw new UnsupportedMediaTypeError({ mediaType: file.type ?? 'unknown', where: 'asset tool' });
|
||||
}
|
||||
|
||||
let transformation: TransformationSet | undefined = undefined;
|
||||
|
||||
// ensure image dimensions are within allowable LLM limits
|
||||
if (file.type.startsWith('image') && file.width && file.height && (file.width > 1200 || file.height > 1200)) {
|
||||
transformation = {
|
||||
transformationParams: {
|
||||
transforms:
|
||||
file.width > file.height
|
||||
? [['resize', { width: 800, fit: 'contain' }]]
|
||||
: [['resize', { height: 800, fit: 'contain' }]],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const assetsService = new AssetsService(serviceOptions);
|
||||
|
||||
const asset = await assetsService.getAsset(args.id, transformation);
|
||||
|
||||
const chunks = [];
|
||||
|
||||
for await (const chunk of asset.stream) {
|
||||
chunks.push(Buffer.from(chunk));
|
||||
}
|
||||
|
||||
return {
|
||||
type: file.type.startsWith('image') ? 'image' : 'audio',
|
||||
data: Buffer.concat(chunks).toString('base64'),
|
||||
mimeType: file.type,
|
||||
};
|
||||
},
|
||||
});
|
||||
226
api/src/mcp/tools/collections.test.ts
Normal file
226
api/src/mcp/tools/collections.test.ts
Normal file
@@ -0,0 +1,226 @@
|
||||
import type { Accountability, Collection, SchemaOverview } from '@directus/types';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi, type MockedFunction } from 'vitest';
|
||||
import { CollectionsService } from '../../services/collections.js';
|
||||
import { collections } from './collections.js';
|
||||
|
||||
vi.mock('../../services/collections.js');
|
||||
|
||||
vi.mock('../tool.js', () => ({
|
||||
defineTool: vi.fn((config) => config),
|
||||
}));
|
||||
|
||||
describe('collections tool', () => {
|
||||
const mockSchema = { collections: {}, fields: {}, relations: {} } as unknown as SchemaOverview;
|
||||
const mockAccountability = { user: 'test-user', admin: true } as Accountability;
|
||||
const mockSanitizedQuery = { fields: ['*'] };
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('collection operations', () => {
|
||||
let mockCollectionsService: {
|
||||
createMany: MockedFunction<any>;
|
||||
readMany: MockedFunction<any>;
|
||||
readByQuery: MockedFunction<any>;
|
||||
updateBatch: MockedFunction<any>;
|
||||
deleteMany: MockedFunction<any>;
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockCollectionsService = {
|
||||
createMany: vi.fn(),
|
||||
readMany: vi.fn(),
|
||||
readByQuery: vi.fn(),
|
||||
updateBatch: vi.fn(),
|
||||
deleteMany: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mocked(CollectionsService).mockImplementation(() => mockCollectionsService as unknown as CollectionsService);
|
||||
});
|
||||
|
||||
describe('CREATE action', () => {
|
||||
test('should create a single collection', async () => {
|
||||
const collectionData = {
|
||||
collection: 'test_collection',
|
||||
meta: { hidden: false, singleton: false },
|
||||
};
|
||||
|
||||
mockCollectionsService.createMany.mockResolvedValue(['test_collection']);
|
||||
mockCollectionsService.readMany.mockResolvedValue([collectionData]);
|
||||
|
||||
const result = await collections.handler({
|
||||
args: {
|
||||
action: 'create',
|
||||
data: collectionData,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(CollectionsService).toHaveBeenCalledWith({
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
});
|
||||
|
||||
expect(mockCollectionsService.createMany).toHaveBeenCalledWith([collectionData]);
|
||||
expect(mockCollectionsService.readMany).toHaveBeenCalledWith(['test_collection']);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: [collectionData],
|
||||
});
|
||||
});
|
||||
|
||||
test('should create multiple collections', async () => {
|
||||
const collectionsData = [
|
||||
{ collection: 'collection1', meta: { hidden: false } },
|
||||
{ collection: 'collection2', meta: { hidden: false } },
|
||||
];
|
||||
|
||||
mockCollectionsService.createMany.mockResolvedValue(['collection1', 'collection2']);
|
||||
mockCollectionsService.readMany.mockResolvedValue(collectionsData);
|
||||
|
||||
const result = await collections.handler({
|
||||
args: {
|
||||
action: 'create',
|
||||
data: collectionsData,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockCollectionsService.createMany).toHaveBeenCalledWith(collectionsData);
|
||||
expect(result).toEqual({ type: 'text', data: collectionsData });
|
||||
});
|
||||
});
|
||||
|
||||
describe('READ action', () => {
|
||||
test('should read collections by keys', async () => {
|
||||
const keys = ['collection1', 'collection2'];
|
||||
|
||||
const expectedData = [
|
||||
{ collection: 'collection1', meta: { hidden: false } },
|
||||
{ collection: 'collection2', meta: { hidden: false } },
|
||||
];
|
||||
|
||||
mockCollectionsService.readMany.mockResolvedValue(expectedData);
|
||||
|
||||
const result = await collections.handler({
|
||||
args: {
|
||||
action: 'read',
|
||||
keys,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: {},
|
||||
});
|
||||
|
||||
expect(mockCollectionsService.readMany).toHaveBeenCalledWith(keys);
|
||||
expect(result).toEqual({ type: 'text', data: expectedData });
|
||||
});
|
||||
|
||||
test('should read collections by query', async () => {
|
||||
const expectedData = [{ collection: 'test_collection' }];
|
||||
mockCollectionsService.readByQuery.mockResolvedValue(expectedData);
|
||||
|
||||
const result = await collections.handler({
|
||||
args: {
|
||||
action: 'read',
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockCollectionsService.readByQuery).toHaveBeenCalled();
|
||||
expect(result).toEqual({ type: 'text', data: expectedData });
|
||||
});
|
||||
});
|
||||
|
||||
describe('UPDATE action', () => {
|
||||
test('should update collection by data array', async () => {
|
||||
const keys = ['collection1'];
|
||||
const updateData = { collection: 'collection1', meta: { hidden: true }, schema: {} } as Collection;
|
||||
const expectedResult = [{ collection: 'collection1', meta: { hidden: true } }];
|
||||
|
||||
mockCollectionsService.updateBatch.mockResolvedValue(keys);
|
||||
mockCollectionsService.readMany.mockResolvedValue(expectedResult);
|
||||
|
||||
const result = await collections.handler({
|
||||
args: {
|
||||
action: 'update',
|
||||
data: updateData,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockCollectionsService.updateBatch).toHaveBeenCalledWith([updateData]);
|
||||
expect(result).toEqual({ type: 'text', data: expectedResult });
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE action', () => {
|
||||
test('should delete collections', async () => {
|
||||
const keys = ['collection1', 'collection2'];
|
||||
|
||||
mockCollectionsService.deleteMany.mockResolvedValue(keys);
|
||||
|
||||
const result = await collections.handler({
|
||||
args: {
|
||||
action: 'delete',
|
||||
keys,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockCollectionsService.deleteMany).toHaveBeenCalledWith(keys);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: keys,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
test('should throw error for invalid action', async () => {
|
||||
await expect(
|
||||
collections.handler({
|
||||
args: {
|
||||
action: 'invalid' as any,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
}),
|
||||
).rejects.toThrow('Invalid action.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('tool configuration', () => {
|
||||
test('should have correct tool name', () => {
|
||||
expect(collections.name).toBe('collections');
|
||||
});
|
||||
|
||||
test('should be admin tool', () => {
|
||||
expect(collections.admin).toBe(true);
|
||||
});
|
||||
|
||||
test('should have description', () => {
|
||||
expect(collections.description).toBeDefined();
|
||||
});
|
||||
|
||||
test('should have input and validation schemas', () => {
|
||||
expect(collections.inputSchema).toBeDefined();
|
||||
expect(collections.validateSchema).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
111
api/src/mcp/tools/collections.ts
Normal file
111
api/src/mcp/tools/collections.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import { InvalidPayloadError } from '@directus/errors';
|
||||
import type { Collection, RawCollection } from '@directus/types';
|
||||
import { isObject, toArray } from '@directus/utils';
|
||||
import { z } from 'zod';
|
||||
import { CollectionsService } from '../../services/collections.js';
|
||||
import { defineTool } from '../define.js';
|
||||
import {
|
||||
CollectionItemInputSchema,
|
||||
CollectionItemValidateCreateSchema,
|
||||
CollectionItemValidateUpdateSchema,
|
||||
} from '../schema.js';
|
||||
import prompts from './prompts/index.js';
|
||||
|
||||
export const CollectionsValidateSchema = z.discriminatedUnion('action', [
|
||||
z.strictObject({
|
||||
action: z.literal('create'),
|
||||
data: z.array(CollectionItemValidateCreateSchema),
|
||||
}),
|
||||
z.strictObject({
|
||||
action: z.literal('read'),
|
||||
keys: z.array(z.string()).optional(),
|
||||
}),
|
||||
z.strictObject({
|
||||
action: z.literal('update'),
|
||||
data: z.array(CollectionItemValidateUpdateSchema),
|
||||
}),
|
||||
z.strictObject({
|
||||
action: z.literal('delete'),
|
||||
keys: z.array(z.string()),
|
||||
}),
|
||||
]);
|
||||
|
||||
export const CollectionsInputSchema = z.object({
|
||||
action: z.enum(['create', 'read', 'update', 'delete']).describe('The operation to perform'),
|
||||
keys: z.array(z.string()).optional(),
|
||||
data: z.array(CollectionItemInputSchema).optional(),
|
||||
});
|
||||
|
||||
export const collections = defineTool<z.infer<typeof CollectionsValidateSchema>>({
|
||||
name: 'collections',
|
||||
admin: true,
|
||||
description: prompts.collections,
|
||||
annotations: {
|
||||
title: 'Directus - Collections',
|
||||
},
|
||||
inputSchema: CollectionsInputSchema,
|
||||
validateSchema: CollectionsValidateSchema,
|
||||
endpoint({ data }) {
|
||||
if (!isObject(data) || !('collection' in data)) {
|
||||
return;
|
||||
}
|
||||
|
||||
return ['content', data['collection'] as string];
|
||||
},
|
||||
async handler({ args, schema, accountability }) {
|
||||
const service = new CollectionsService({
|
||||
schema,
|
||||
accountability,
|
||||
});
|
||||
|
||||
if (args.action === 'create') {
|
||||
const data = toArray(args.data);
|
||||
|
||||
const savedKeys = await service.createMany(data as RawCollection[]);
|
||||
|
||||
const result = await service.readMany(savedKeys);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result || null,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'read') {
|
||||
let result = null;
|
||||
|
||||
if (args.keys) {
|
||||
result = await service.readMany(args.keys);
|
||||
} else {
|
||||
result = await service.readByQuery();
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result || null,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'update') {
|
||||
const updatedKeys = await service.updateBatch(toArray(args.data as Collection | Collection[]));
|
||||
|
||||
const result = await service.readMany(updatedKeys);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result || null,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'delete') {
|
||||
const deletedKeys = await service.deleteMany(args.keys);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: deletedKeys,
|
||||
};
|
||||
}
|
||||
|
||||
throw new InvalidPayloadError({ reason: 'Invalid action' });
|
||||
},
|
||||
});
|
||||
292
api/src/mcp/tools/fields.test.ts
Normal file
292
api/src/mcp/tools/fields.test.ts
Normal file
@@ -0,0 +1,292 @@
|
||||
import type { Accountability, Field, SchemaOverview } from '@directus/types';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi, type MockedFunction } from 'vitest';
|
||||
import { FieldsService } from '../../services/fields.js';
|
||||
import { fields } from './fields.js';
|
||||
|
||||
vi.mock('../../services/fields.js');
|
||||
|
||||
vi.mock('../../utils/get-schema.js', () => {
|
||||
return { getSchema: vi.fn() };
|
||||
});
|
||||
|
||||
vi.mock('../../database/index.js', () => {
|
||||
const self: Record<string, any> = {
|
||||
transaction: vi.fn((cb) => cb(self)),
|
||||
};
|
||||
|
||||
return { default: vi.fn(() => self), getDatabaseClient: vi.fn().mockReturnValue('postgres') };
|
||||
});
|
||||
|
||||
vi.mock('../tool.js', () => ({
|
||||
defineTool: vi.fn((config) => config),
|
||||
}));
|
||||
|
||||
describe('fields tool', () => {
|
||||
const mockSchema = { collections: {}, fields: {}, relations: {} } as unknown as SchemaOverview;
|
||||
const mockAccountability = { user: 'test-user', admin: true } as Accountability;
|
||||
const mockSanitizedQuery = { fields: ['*'] };
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('field operations', () => {
|
||||
let mockFieldsService: {
|
||||
createField: MockedFunction<any>;
|
||||
readOne: MockedFunction<any>;
|
||||
readAll: MockedFunction<any>;
|
||||
updateField: MockedFunction<any>;
|
||||
deleteField: MockedFunction<any>;
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockFieldsService = {
|
||||
createField: vi.fn(),
|
||||
readOne: vi.fn(),
|
||||
readAll: vi.fn(),
|
||||
updateField: vi.fn(),
|
||||
deleteField: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mocked(FieldsService).mockImplementation(() => mockFieldsService as unknown as FieldsService);
|
||||
});
|
||||
|
||||
describe('CREATE action', () => {
|
||||
test('should create a field', async () => {
|
||||
const fieldData = {
|
||||
field: 'title',
|
||||
type: 'string',
|
||||
collection: 'articles',
|
||||
meta: { required: true },
|
||||
} as Field;
|
||||
|
||||
mockFieldsService.readOne.mockResolvedValue(fieldData);
|
||||
|
||||
const result = await fields.handler({
|
||||
args: {
|
||||
action: 'create',
|
||||
collection: 'articles',
|
||||
data: fieldData,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(FieldsService).toHaveBeenCalledWith({
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
});
|
||||
|
||||
expect(result).toEqual({ type: 'text', data: [fieldData] });
|
||||
});
|
||||
});
|
||||
|
||||
describe('READ action', () => {
|
||||
test('should read fields', async () => {
|
||||
const expectedFields = [
|
||||
{ field: 'title', type: 'string', collection: 'articles' },
|
||||
{ field: 'content', type: 'text', collection: 'articles' },
|
||||
];
|
||||
|
||||
mockFieldsService.readAll.mockResolvedValue(expectedFields);
|
||||
|
||||
const result = await fields.handler({
|
||||
args: {
|
||||
collection: 'articles',
|
||||
action: 'read',
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({ type: 'text', data: expectedFields });
|
||||
});
|
||||
|
||||
test('should read field by field name', async () => {
|
||||
const expectedField = { field: 'title', type: 'string', collection: 'articles' };
|
||||
|
||||
mockFieldsService.readOne.mockResolvedValue(expectedField);
|
||||
|
||||
const result = await fields.handler({
|
||||
args: {
|
||||
collection: 'articles',
|
||||
field: 'title',
|
||||
action: 'read',
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockFieldsService.readOne).toHaveBeenCalledWith(expectedField.collection, expectedField.field);
|
||||
|
||||
expect(result).toEqual({ type: 'text', data: expectedField });
|
||||
});
|
||||
});
|
||||
|
||||
describe('UPDATE action', () => {
|
||||
test('should update field by field', async () => {
|
||||
const collection = 'articles';
|
||||
|
||||
const updateData = {
|
||||
field: 'title',
|
||||
meta: { required: false, note: 'Updated field note' },
|
||||
} as Field;
|
||||
|
||||
const expectedResult = [
|
||||
{
|
||||
field: 'title',
|
||||
type: 'string',
|
||||
collection,
|
||||
meta: { required: false, note: 'Updated field note' },
|
||||
},
|
||||
];
|
||||
|
||||
mockFieldsService.readOne.mockImplementation((collection, field) =>
|
||||
expectedResult.find((f) => f.collection === collection && f.field === field),
|
||||
);
|
||||
|
||||
const result = await fields.handler({
|
||||
args: {
|
||||
action: 'update',
|
||||
collection,
|
||||
data: [updateData],
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockFieldsService.updateField).toHaveBeenCalledOnce();
|
||||
|
||||
expect(mockFieldsService.updateField).toHaveBeenCalledWith(collection, updateData, {
|
||||
autoPurgeCache: false,
|
||||
autoPurgeSystemCache: false,
|
||||
});
|
||||
|
||||
expect(result).toEqual({ type: 'text', data: expectedResult });
|
||||
});
|
||||
|
||||
test('should update field by fields', async () => {
|
||||
const collection = 'articles';
|
||||
|
||||
const updateData = [
|
||||
{
|
||||
field: 'title',
|
||||
meta: { required: false, note: 'Updated field note' },
|
||||
},
|
||||
{
|
||||
field: 'subtitle',
|
||||
meta: { required: false, note: 'Updated field note' },
|
||||
},
|
||||
] as Field[];
|
||||
|
||||
const expectedResult = [
|
||||
{
|
||||
field: 'title',
|
||||
type: 'string',
|
||||
collection,
|
||||
meta: { required: false, note: 'Updated field note' },
|
||||
},
|
||||
{
|
||||
field: 'subtitle',
|
||||
type: 'string',
|
||||
collection,
|
||||
meta: { required: false, note: 'Updated field note' },
|
||||
},
|
||||
];
|
||||
|
||||
mockFieldsService.readOne.mockImplementation((collection, field) => {
|
||||
return expectedResult.find((f) => f.collection === collection && f.field === field);
|
||||
});
|
||||
|
||||
const result = await fields.handler({
|
||||
args: {
|
||||
action: 'update',
|
||||
collection,
|
||||
data: updateData,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockFieldsService.updateField).toHaveBeenNthCalledWith(1, collection, updateData[0], {
|
||||
autoPurgeCache: false,
|
||||
autoPurgeSystemCache: false,
|
||||
});
|
||||
|
||||
expect(mockFieldsService.updateField).toHaveBeenNthCalledWith(2, collection, updateData[1], {
|
||||
autoPurgeCache: false,
|
||||
autoPurgeSystemCache: false,
|
||||
});
|
||||
|
||||
expect(result).toEqual({ type: 'text', data: expectedResult });
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE action', () => {
|
||||
test('should delete fields', async () => {
|
||||
const collection = 'articles';
|
||||
const fieldName = 'title';
|
||||
|
||||
const result = await fields.handler({
|
||||
args: {
|
||||
action: 'delete',
|
||||
collection,
|
||||
field: fieldName,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockFieldsService.deleteField).toHaveBeenCalledWith(collection, fieldName);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: {
|
||||
collection,
|
||||
field: fieldName,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
test('should throw error for invalid action', async () => {
|
||||
await expect(
|
||||
fields.handler({
|
||||
args: {
|
||||
action: 'invalid' as any,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
}),
|
||||
).rejects.toThrow('Invalid action.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('tool configuration', () => {
|
||||
test('should have correct tool name', () => {
|
||||
expect(fields.name).toBe('fields');
|
||||
});
|
||||
|
||||
test('should be admin tool', () => {
|
||||
expect(fields.admin).toBe(true);
|
||||
});
|
||||
|
||||
test('should have description', () => {
|
||||
expect(fields.description).toBeDefined();
|
||||
});
|
||||
|
||||
test('should have input and validation schemas', () => {
|
||||
expect(fields.inputSchema).toBeDefined();
|
||||
expect(fields.validateSchema).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
198
api/src/mcp/tools/fields.ts
Normal file
198
api/src/mcp/tools/fields.ts
Normal file
@@ -0,0 +1,198 @@
|
||||
import { InvalidPayloadError } from '@directus/errors';
|
||||
import type { Field, Item, RawField, Type } from '@directus/types';
|
||||
import { toArray } from '@directus/utils';
|
||||
import { z } from 'zod';
|
||||
import { clearSystemCache } from '../../cache.js';
|
||||
import getDatabase from '../../database/index.js';
|
||||
import { FieldsService } from '../../services/fields.js';
|
||||
import { getSchema } from '../../utils/get-schema.js';
|
||||
import { shouldClearCache } from '../../utils/should-clear-cache.js';
|
||||
import { transaction } from '../../utils/transaction.js';
|
||||
import { defineTool } from '../define.js';
|
||||
import {
|
||||
FieldItemInputSchema,
|
||||
FieldItemValidateSchema,
|
||||
RawFieldItemInputSchema,
|
||||
RawFieldItemValidateSchema,
|
||||
} from '../schema.js';
|
||||
import prompts from './prompts/index.js';
|
||||
|
||||
type FieldCreateItem = Partial<Field> & {
|
||||
field: string;
|
||||
type: Type | null;
|
||||
};
|
||||
|
||||
export const FieldsBaseValidateSchema = z.strictObject({
|
||||
collection: z.string(),
|
||||
});
|
||||
|
||||
export const FieldsValidateSchema = z.discriminatedUnion('action', [
|
||||
FieldsBaseValidateSchema.extend({
|
||||
action: z.literal('create'),
|
||||
data: z.union([z.array(FieldItemValidateSchema), FieldItemValidateSchema]),
|
||||
}),
|
||||
z.object({
|
||||
action: z.literal('read'),
|
||||
collection: z.string().optional(),
|
||||
field: z.string().optional(),
|
||||
}),
|
||||
FieldsBaseValidateSchema.extend({
|
||||
action: z.literal('update'),
|
||||
data: z.array(RawFieldItemValidateSchema),
|
||||
}),
|
||||
FieldsBaseValidateSchema.extend({
|
||||
action: z.literal('delete'),
|
||||
field: z.string(),
|
||||
}),
|
||||
]);
|
||||
|
||||
export const FieldsInputSchema = z.object({
|
||||
action: z.enum(['read', 'create', 'update', 'delete']).describe('The operation to perform'),
|
||||
collection: z.string().describe('The name of the collection').optional(),
|
||||
field: z.string().optional(),
|
||||
data: z
|
||||
.array(
|
||||
FieldItemInputSchema.extend({
|
||||
children: RawFieldItemInputSchema.shape.children,
|
||||
}).partial(),
|
||||
)
|
||||
.optional(),
|
||||
});
|
||||
|
||||
export const fields = defineTool<z.infer<typeof FieldsValidateSchema>>({
|
||||
name: 'fields',
|
||||
admin: true,
|
||||
description: prompts.fields,
|
||||
annotations: {
|
||||
title: 'Directus - Fields',
|
||||
},
|
||||
inputSchema: FieldsInputSchema,
|
||||
validateSchema: FieldsValidateSchema,
|
||||
async handler({ args, schema, accountability }) {
|
||||
let service = new FieldsService({
|
||||
schema,
|
||||
accountability,
|
||||
});
|
||||
|
||||
if (args.action === 'create') {
|
||||
const fields = toArray(args.data as FieldCreateItem | FieldCreateItem[]);
|
||||
|
||||
const knex = getDatabase();
|
||||
|
||||
const result: Item[] = [];
|
||||
|
||||
await transaction(knex, async (trx) => {
|
||||
service = new FieldsService({
|
||||
schema,
|
||||
accountability,
|
||||
knex: trx,
|
||||
});
|
||||
|
||||
for (const field of fields) {
|
||||
await service.createField(args.collection, field, undefined, {
|
||||
autoPurgeCache: false,
|
||||
autoPurgeSystemCache: false,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// manually clear cache
|
||||
if (shouldClearCache(service.cache)) {
|
||||
await service.cache.clear();
|
||||
}
|
||||
|
||||
await clearSystemCache();
|
||||
|
||||
service = new FieldsService({
|
||||
schema: await getSchema(),
|
||||
accountability,
|
||||
});
|
||||
|
||||
for (const field of fields) {
|
||||
const createdField = await service.readOne(args.collection, field.field);
|
||||
result.push(createdField);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result || null,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'read') {
|
||||
let result = null;
|
||||
|
||||
if (args.collection) {
|
||||
if (args.field) {
|
||||
result = await service.readOne(args.collection, args.field);
|
||||
} else {
|
||||
result = await service.readAll(args.collection);
|
||||
}
|
||||
} else {
|
||||
result = await service.readAll();
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result || null,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'update') {
|
||||
const fields = toArray(args.data as RawField | RawField[]);
|
||||
|
||||
const knex = getDatabase();
|
||||
|
||||
const result: Item[] = [];
|
||||
|
||||
await transaction(knex, async (trx) => {
|
||||
service = new FieldsService({
|
||||
schema,
|
||||
accountability,
|
||||
knex: trx,
|
||||
});
|
||||
|
||||
for (const field of fields) {
|
||||
await service.updateField(args.collection, field, {
|
||||
autoPurgeCache: false,
|
||||
autoPurgeSystemCache: false,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// manually clear cache
|
||||
if (shouldClearCache(service.cache)) {
|
||||
await service.cache.clear();
|
||||
}
|
||||
|
||||
await clearSystemCache();
|
||||
|
||||
service = new FieldsService({
|
||||
schema: await getSchema(),
|
||||
accountability,
|
||||
});
|
||||
|
||||
for (const field of fields) {
|
||||
const updatedField = await service.readOne(args.collection, field.field);
|
||||
result.push(updatedField);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result || null,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'delete') {
|
||||
const { collection, field } = args;
|
||||
await service.deleteField(collection, field);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: { collection, field },
|
||||
};
|
||||
}
|
||||
|
||||
throw new InvalidPayloadError({ reason: 'Invalid action' });
|
||||
},
|
||||
});
|
||||
160
api/src/mcp/tools/files.test.ts
Normal file
160
api/src/mcp/tools/files.test.ts
Normal file
@@ -0,0 +1,160 @@
|
||||
import type { Accountability, SchemaOverview } from '@directus/types';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi, type MockedFunction } from 'vitest';
|
||||
import { FilesService } from '../../services/files.js';
|
||||
import { files } from './files.js';
|
||||
|
||||
vi.mock('../../services/files.js');
|
||||
|
||||
vi.mock('../tool.js', () => ({
|
||||
defineTool: vi.fn((config) => config),
|
||||
}));
|
||||
|
||||
describe('files tool', () => {
|
||||
const mockSchema = { collections: {}, fields: {}, relations: {} } as unknown as SchemaOverview;
|
||||
const mockAccountability = { user: 'test-user' } as Accountability;
|
||||
const mockSanitizedQuery = { fields: ['*'] };
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('file operations', () => {
|
||||
let mockFilesService: {
|
||||
createMany: MockedFunction<any>;
|
||||
readMany: MockedFunction<any>;
|
||||
readByQuery: MockedFunction<any>;
|
||||
updateBatch: MockedFunction<any>;
|
||||
updateMany: MockedFunction<any>;
|
||||
updateByQuery: MockedFunction<any>;
|
||||
deleteMany: MockedFunction<any>;
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockFilesService = {
|
||||
createMany: vi.fn(),
|
||||
readMany: vi.fn(),
|
||||
readByQuery: vi.fn(),
|
||||
updateBatch: vi.fn(),
|
||||
updateMany: vi.fn(),
|
||||
updateByQuery: vi.fn(),
|
||||
deleteMany: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mocked(FilesService).mockImplementation(() => mockFilesService as unknown as FilesService);
|
||||
});
|
||||
|
||||
describe('READ action', () => {
|
||||
test('should read files by keys', async () => {
|
||||
const keys = ['file-1', 'file-2'];
|
||||
const expectedResult = [{ id: 'file-1' }, { id: 'file-2' }];
|
||||
|
||||
mockFilesService.readMany.mockResolvedValue(expectedResult);
|
||||
|
||||
const result = await files.handler({
|
||||
args: {
|
||||
action: 'read',
|
||||
keys,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockFilesService.readMany).toHaveBeenCalledWith(keys, mockSanitizedQuery);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: expectedResult,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('UPDATE action', () => {
|
||||
test('should update files using keys', async () => {
|
||||
const keys = ['file-1'];
|
||||
const updateData = { filename_download: 'updated.jpg' };
|
||||
const expectedResult = [{ id: 'file-1', filename_download: 'updated.jpg' }];
|
||||
|
||||
mockFilesService.updateMany.mockResolvedValue(keys);
|
||||
mockFilesService.readMany.mockResolvedValue(expectedResult);
|
||||
|
||||
const result = await files.handler({
|
||||
args: {
|
||||
action: 'update',
|
||||
data: updateData,
|
||||
keys,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockFilesService.updateMany).toHaveBeenCalledWith(keys, updateData);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: expectedResult,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE action', () => {
|
||||
test('should delete files by keys', async () => {
|
||||
const keys = ['file-1', 'file-2'];
|
||||
|
||||
mockFilesService.deleteMany.mockResolvedValue(keys);
|
||||
|
||||
const result = await files.handler({
|
||||
args: {
|
||||
action: 'delete',
|
||||
keys,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockFilesService.deleteMany).toHaveBeenCalledWith(keys);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: keys,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
test('should throw error for invalid action', async () => {
|
||||
await expect(
|
||||
files.handler({
|
||||
args: {
|
||||
action: 'invalid' as any,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
}),
|
||||
).rejects.toThrow('Invalid action.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('tool configuration', () => {
|
||||
test('should have correct tool name', () => {
|
||||
expect(files.name).toBe('files');
|
||||
});
|
||||
|
||||
test('should not be admin tool', () => {
|
||||
expect(files.admin).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should have description', () => {
|
||||
expect(files.description).toBeDefined();
|
||||
});
|
||||
|
||||
test('should have input and validation schemas', () => {
|
||||
expect(files.inputSchema).toBeDefined();
|
||||
expect(files.validateSchema).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
128
api/src/mcp/tools/files.ts
Normal file
128
api/src/mcp/tools/files.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
import type { File, PrimaryKey } from '@directus/types';
|
||||
import { isObject } from '@directus/utils';
|
||||
import { z } from 'zod';
|
||||
import { FilesService } from '../../services/files.js';
|
||||
import { defineTool } from '../define.js';
|
||||
import {
|
||||
FileImportItemInputSchema,
|
||||
FileImportItemValidateSchema,
|
||||
FileItemInputSchema,
|
||||
FileItemValidateSchema,
|
||||
PrimaryKeyInputSchema,
|
||||
PrimaryKeyValidateSchema,
|
||||
QueryInputSchema,
|
||||
QueryValidateSchema,
|
||||
} from '../schema.js';
|
||||
import prompts from './prompts/index.js';
|
||||
|
||||
export const FilesValidateSchema = z.discriminatedUnion('action', [
|
||||
z.strictObject({
|
||||
action: z.literal('read'),
|
||||
keys: z.array(PrimaryKeyValidateSchema).optional(),
|
||||
query: QueryValidateSchema.optional(),
|
||||
}),
|
||||
z.strictObject({
|
||||
action: z.literal('update'),
|
||||
data: FileItemValidateSchema,
|
||||
keys: z.array(PrimaryKeyValidateSchema).optional(),
|
||||
query: QueryValidateSchema.optional(),
|
||||
}),
|
||||
z.strictObject({
|
||||
action: z.literal('delete'),
|
||||
keys: z.array(PrimaryKeyValidateSchema),
|
||||
}),
|
||||
z.strictObject({
|
||||
action: z.literal('import'),
|
||||
data: z.array(FileImportItemValidateSchema),
|
||||
}),
|
||||
]);
|
||||
|
||||
const FilesInputSchema = z.object({
|
||||
action: z.enum(['read', 'update', 'delete', 'import']).describe('The operation to perform'),
|
||||
query: QueryInputSchema.optional(),
|
||||
keys: z.array(PrimaryKeyInputSchema).optional(),
|
||||
data: z.array(FileItemInputSchema.extend({ ...FileImportItemInputSchema.shape }).partial()).optional(),
|
||||
});
|
||||
|
||||
export const files = defineTool<z.infer<typeof FilesValidateSchema>>({
|
||||
name: 'files',
|
||||
description: prompts.files,
|
||||
annotations: {
|
||||
title: 'Directus - Files',
|
||||
},
|
||||
inputSchema: FilesInputSchema,
|
||||
validateSchema: FilesValidateSchema,
|
||||
endpoint({ data }) {
|
||||
if (!isObject(data) || !('id' in data)) {
|
||||
return;
|
||||
}
|
||||
|
||||
return ['files', data['id'] as string];
|
||||
},
|
||||
async handler({ args, schema, accountability, sanitizedQuery }) {
|
||||
const service = new FilesService({
|
||||
schema,
|
||||
accountability,
|
||||
});
|
||||
|
||||
if (args.action === 'read') {
|
||||
let result = null;
|
||||
|
||||
if (args.keys) {
|
||||
result = await service.readMany(args.keys, sanitizedQuery);
|
||||
} else {
|
||||
result = await service.readByQuery(sanitizedQuery);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'update') {
|
||||
let updatedKeys: PrimaryKey[] = [];
|
||||
|
||||
if (Array.isArray(args.data)) {
|
||||
updatedKeys = await service.updateBatch(args.data);
|
||||
} else if (args.keys) {
|
||||
updatedKeys = await service.updateMany(args.keys, args.data as Partial<File>);
|
||||
} else {
|
||||
updatedKeys = await service.updateByQuery(sanitizedQuery, args.data as Partial<File>);
|
||||
}
|
||||
|
||||
const result = await service.readMany(updatedKeys, sanitizedQuery);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'delete') {
|
||||
const deletedKeys = await service.deleteMany(args.keys);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: deletedKeys,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'import') {
|
||||
const savedKeys = [];
|
||||
|
||||
for (const file of args.data) {
|
||||
const savedKey = await service.importOne(file.url, file.file as Partial<File>);
|
||||
|
||||
savedKeys.push(savedKey);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: savedKeys,
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error('Invalid action.');
|
||||
},
|
||||
});
|
||||
200
api/src/mcp/tools/flows.test.ts
Normal file
200
api/src/mcp/tools/flows.test.ts
Normal file
@@ -0,0 +1,200 @@
|
||||
import type { Accountability, FlowRaw, SchemaOverview } from '@directus/types';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi, type MockedFunction } from 'vitest';
|
||||
import { FlowsService } from '../../services/flows.js';
|
||||
import { flows } from './flows.js';
|
||||
|
||||
vi.mock('../../services/flows');
|
||||
|
||||
vi.mock('../tool.js', () => ({
|
||||
defineTool: vi.fn((config) => config),
|
||||
}));
|
||||
|
||||
describe('flows tool', () => {
|
||||
const mockSchema = { collections: {}, fields: {}, relations: {} } as unknown as SchemaOverview;
|
||||
const mockAccountability = { user: 'test-user' } as Accountability;
|
||||
const mockSanitizedQuery = { fields: ['*'] };
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('file operations', () => {
|
||||
let mockFlowsService: {
|
||||
createOne: MockedFunction<any>;
|
||||
readOne: MockedFunction<any>;
|
||||
readByQuery: MockedFunction<any>;
|
||||
updateOne: MockedFunction<any>;
|
||||
deleteOne: MockedFunction<any>;
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockFlowsService = {
|
||||
createOne: vi.fn(),
|
||||
readOne: vi.fn(),
|
||||
readByQuery: vi.fn(),
|
||||
updateOne: vi.fn(),
|
||||
deleteOne: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mocked(FlowsService).mockImplementation(() => mockFlowsService as unknown as FlowsService);
|
||||
});
|
||||
|
||||
describe('CREATE action', () => {
|
||||
test('should create a flow and return the result', async () => {
|
||||
const mockFlowData = {
|
||||
name: 'Test Flow',
|
||||
trigger: 'manual',
|
||||
status: 'active',
|
||||
} satisfies Partial<FlowRaw>;
|
||||
|
||||
const mockCreatedKey = 'flow-123';
|
||||
const mockCreatedFlow = { id: mockCreatedKey, ...mockFlowData };
|
||||
|
||||
mockFlowsService.createOne.mockResolvedValue(mockCreatedKey);
|
||||
mockFlowsService.readOne.mockResolvedValue(mockCreatedFlow);
|
||||
|
||||
const result = await flows.handler({
|
||||
args: { action: 'create', data: mockFlowData },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockFlowsService.createOne).toHaveBeenCalledWith(mockFlowData);
|
||||
expect(mockFlowsService.readOne).toHaveBeenCalledWith(mockCreatedKey);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: mockCreatedFlow,
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle null result from readOne after create', async () => {
|
||||
const mockFlowData = { name: 'Test Flow', trigger: 'manual' } satisfies Partial<FlowRaw>;
|
||||
const mockCreatedKey = 'flow-123';
|
||||
|
||||
mockFlowsService.createOne.mockResolvedValue(mockCreatedKey);
|
||||
mockFlowsService.readOne.mockResolvedValue(null);
|
||||
|
||||
const result = await flows.handler({
|
||||
args: { action: 'create', data: mockFlowData },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: null,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('READ action', () => {
|
||||
test('should read flows by query', async () => {
|
||||
const mockFlows = [
|
||||
{ id: 'flow-1', name: 'Flow 1', trigger: 'manual' },
|
||||
{ id: 'flow-2', name: 'Flow 2', trigger: 'event' },
|
||||
];
|
||||
|
||||
mockFlowsService.readByQuery.mockResolvedValue(mockFlows);
|
||||
|
||||
const result = await flows.handler({
|
||||
args: { action: 'read' },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockFlowsService.readByQuery).toHaveBeenCalledWith(mockSanitizedQuery);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: mockFlows,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('UPDATE action', () => {
|
||||
test('should update a flow and return the updated result', async () => {
|
||||
const mockKey = 'flow-123';
|
||||
const mockUpdateData = { status: 'inactive', description: 'Updated description' } satisfies Partial<FlowRaw>;
|
||||
const mockUpdatedFlow = { id: mockKey, name: 'Test Flow', ...mockUpdateData };
|
||||
|
||||
mockFlowsService.updateOne.mockResolvedValue(mockKey);
|
||||
mockFlowsService.readOne.mockResolvedValue(mockUpdatedFlow);
|
||||
|
||||
const result = await flows.handler({
|
||||
args: { action: 'update', key: mockKey, data: mockUpdateData },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockFlowsService.updateOne).toHaveBeenCalledWith(mockKey, mockUpdateData);
|
||||
expect(mockFlowsService.readOne).toHaveBeenCalledWith(mockKey, mockSanitizedQuery);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: mockUpdatedFlow,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE action', () => {
|
||||
test('should delete a flow and return the deleted key', async () => {
|
||||
const mockKey = 'flow-123';
|
||||
|
||||
mockFlowsService.deleteOne.mockResolvedValue(mockKey);
|
||||
|
||||
const result = await flows.handler({
|
||||
args: { action: 'delete', key: mockKey },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockFlowsService.deleteOne).toHaveBeenCalledWith(mockKey);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: mockKey,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
test('should throw error for invalid action', async () => {
|
||||
await expect(
|
||||
flows.handler({
|
||||
args: {
|
||||
action: 'invalid' as any,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
}),
|
||||
).rejects.toThrow('Invalid action.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('tool configuration', () => {
|
||||
test('should have correct tool name', () => {
|
||||
expect(flows.name).toBe('flows');
|
||||
});
|
||||
|
||||
test('should be admin tool', () => {
|
||||
expect(flows.admin).toBe(true);
|
||||
});
|
||||
|
||||
test('should have description', () => {
|
||||
expect(flows.description).toBeDefined();
|
||||
});
|
||||
|
||||
test('should have input and validation schemas', () => {
|
||||
expect(flows.inputSchema).toBeDefined();
|
||||
expect(flows.validateSchema).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
99
api/src/mcp/tools/flows.ts
Normal file
99
api/src/mcp/tools/flows.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
import type { FlowRaw } from '@directus/types';
|
||||
import { isObject } from '@directus/utils';
|
||||
import { z } from 'zod';
|
||||
import { FlowsService } from '../../services/flows.js';
|
||||
import { defineTool } from '../define.js';
|
||||
import { FlowItemInputSchema, FlowItemValidateSchema, QueryInputSchema, QueryValidateSchema } from '../schema.js';
|
||||
import prompts from './prompts/index.js';
|
||||
|
||||
export const FlowsValidateSchema = z.discriminatedUnion('action', [
|
||||
z.strictObject({
|
||||
action: z.literal('create'),
|
||||
data: FlowItemValidateSchema,
|
||||
}),
|
||||
z.strictObject({
|
||||
action: z.literal('read'),
|
||||
query: QueryValidateSchema.optional(),
|
||||
}),
|
||||
z.strictObject({
|
||||
action: z.literal('update'),
|
||||
key: z.string(),
|
||||
data: FlowItemValidateSchema,
|
||||
query: QueryValidateSchema.optional(),
|
||||
}),
|
||||
z.strictObject({
|
||||
action: z.literal('delete'),
|
||||
key: z.string(),
|
||||
}),
|
||||
]);
|
||||
|
||||
export const FlowsInputSchema = z.object({
|
||||
action: z.enum(['create', 'read', 'update', 'delete']).describe('The operation to perform'),
|
||||
query: QueryInputSchema.optional(),
|
||||
data: FlowItemInputSchema.optional(),
|
||||
key: z.string().optional(),
|
||||
});
|
||||
|
||||
export const flows = defineTool<z.infer<typeof FlowsValidateSchema>>({
|
||||
name: 'flows',
|
||||
admin: true,
|
||||
description: prompts.flows,
|
||||
annotations: {
|
||||
title: 'Directus - Flows',
|
||||
},
|
||||
inputSchema: FlowsInputSchema,
|
||||
validateSchema: FlowsValidateSchema,
|
||||
endpoint({ data }) {
|
||||
if (!isObject(data) || !('id' in data)) {
|
||||
return;
|
||||
}
|
||||
|
||||
return ['settings', 'flows', data['id'] as string];
|
||||
},
|
||||
async handler({ args, schema, accountability, sanitizedQuery }) {
|
||||
const flowsService = new FlowsService({
|
||||
schema,
|
||||
accountability,
|
||||
});
|
||||
|
||||
if (args.action === 'create') {
|
||||
const savedKey = await flowsService.createOne(args.data);
|
||||
const result = await flowsService.readOne(savedKey);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result || null,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'read') {
|
||||
const result = await flowsService.readByQuery(sanitizedQuery);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result || null,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'update') {
|
||||
const updatedKey = await flowsService.updateOne(args.key, args.data as Partial<FlowRaw>);
|
||||
const result = await flowsService.readOne(updatedKey, sanitizedQuery);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result || null,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'delete') {
|
||||
const deletedKey = await flowsService.deleteOne(args.key);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: deletedKey,
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error('Invalid action.');
|
||||
},
|
||||
});
|
||||
345
api/src/mcp/tools/folders.test.ts
Normal file
345
api/src/mcp/tools/folders.test.ts
Normal file
@@ -0,0 +1,345 @@
|
||||
import type { Accountability, SchemaOverview } from '@directus/types';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi, type MockedFunction } from 'vitest';
|
||||
import { FoldersService } from '../../services/folders.js';
|
||||
import { folders } from './folders.js';
|
||||
|
||||
vi.mock('../../services/folders.js');
|
||||
|
||||
vi.mock('../tool.js', () => ({
|
||||
defineTool: vi.fn((config) => config),
|
||||
}));
|
||||
|
||||
describe('folders tool', () => {
|
||||
const mockSchema = {} as SchemaOverview;
|
||||
const mockAccountability = { user: 'test-user' } as Accountability;
|
||||
const mockSanitizedQuery = { fields: ['*'] };
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('folder operations', () => {
|
||||
let mockFoldersService: {
|
||||
createMany: MockedFunction<any>;
|
||||
readMany: MockedFunction<any>;
|
||||
readByQuery: MockedFunction<any>;
|
||||
updateBatch: MockedFunction<any>;
|
||||
updateMany: MockedFunction<any>;
|
||||
updateByQuery: MockedFunction<any>;
|
||||
deleteMany: MockedFunction<any>;
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockFoldersService = {
|
||||
createMany: vi.fn(),
|
||||
readMany: vi.fn(),
|
||||
readByQuery: vi.fn(),
|
||||
updateBatch: vi.fn(),
|
||||
updateMany: vi.fn(),
|
||||
updateByQuery: vi.fn(),
|
||||
deleteMany: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mocked(FoldersService).mockImplementation(() => mockFoldersService as unknown as FoldersService);
|
||||
});
|
||||
|
||||
describe('CREATE action', () => {
|
||||
test('should create a single folder and return the result', async () => {
|
||||
const folderData = { name: 'test-folder', parent: 'parent-id' };
|
||||
const savedKeys = ['folder-1'];
|
||||
const expectedResult = [{ id: 'folder-1', name: 'test-folder' }];
|
||||
|
||||
mockFoldersService.createMany.mockResolvedValue(savedKeys);
|
||||
mockFoldersService.readMany.mockResolvedValue(expectedResult);
|
||||
|
||||
const result = await folders.handler({
|
||||
args: {
|
||||
action: 'create',
|
||||
data: folderData,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(FoldersService).toHaveBeenCalledWith({
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
});
|
||||
|
||||
expect(mockFoldersService.createMany).toHaveBeenCalledWith([folderData]);
|
||||
expect(mockFoldersService.readMany).toHaveBeenCalledWith(savedKeys, mockSanitizedQuery);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: expectedResult,
|
||||
});
|
||||
});
|
||||
|
||||
test('should create multiple folders', async () => {
|
||||
const foldersData = [{ name: 'folder-1' }, { name: 'folder-2', parent: 'parent-id' }];
|
||||
|
||||
const savedKeys = ['folder-1', 'folder-2'];
|
||||
|
||||
mockFoldersService.createMany.mockResolvedValue(savedKeys);
|
||||
mockFoldersService.readMany.mockResolvedValue([]);
|
||||
|
||||
await folders.handler({
|
||||
args: {
|
||||
action: 'create',
|
||||
data: foldersData,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockFoldersService.createMany).toHaveBeenCalledWith(foldersData);
|
||||
});
|
||||
});
|
||||
|
||||
describe('READ action', () => {
|
||||
test('should read folders by keys', async () => {
|
||||
const keys = ['folder-1', 'folder-2'];
|
||||
const expectedResult = [{ id: 'folder-1' }, { id: 'folder-2' }];
|
||||
|
||||
mockFoldersService.readMany.mockResolvedValue(expectedResult);
|
||||
|
||||
const result = await folders.handler({
|
||||
args: {
|
||||
action: 'read',
|
||||
keys,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockFoldersService.readMany).toHaveBeenCalledWith(keys, mockSanitizedQuery);
|
||||
expect(mockFoldersService.readByQuery).not.toHaveBeenCalled();
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: expectedResult,
|
||||
});
|
||||
});
|
||||
|
||||
test('should read folders by query when no keys provided', async () => {
|
||||
const expectedResult = [{ id: 'folder-1' }];
|
||||
|
||||
mockFoldersService.readByQuery.mockResolvedValue(expectedResult);
|
||||
|
||||
const result = await folders.handler({
|
||||
args: {
|
||||
action: 'read',
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockFoldersService.readByQuery).toHaveBeenCalledWith(mockSanitizedQuery);
|
||||
expect(mockFoldersService.readMany).not.toHaveBeenCalled();
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: expectedResult,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('UPDATE action', () => {
|
||||
test('should update folders using keys', async () => {
|
||||
const keys = ['folder-1'];
|
||||
const updateData = { name: 'updated-folder' };
|
||||
const expectedResult = [{ id: 'folder-1', name: 'updated-folder' }];
|
||||
|
||||
mockFoldersService.updateMany.mockResolvedValue(keys);
|
||||
mockFoldersService.readMany.mockResolvedValue(expectedResult);
|
||||
|
||||
const result = await folders.handler({
|
||||
args: {
|
||||
action: 'update',
|
||||
data: updateData,
|
||||
keys,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockFoldersService.updateMany).toHaveBeenCalledWith(keys, updateData);
|
||||
expect(mockFoldersService.updateByQuery).not.toHaveBeenCalled();
|
||||
expect(mockFoldersService.updateBatch).not.toHaveBeenCalled();
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: expectedResult,
|
||||
});
|
||||
});
|
||||
|
||||
test('should update folders using batch when data is array', async () => {
|
||||
const batchData = [{ id: 'folder-1', name: 'updated-1' }] as unknown as {
|
||||
id: '';
|
||||
name: string;
|
||||
parent?: string | undefined;
|
||||
};
|
||||
|
||||
const updatedKeys = ['folder-1'];
|
||||
|
||||
mockFoldersService.updateBatch.mockResolvedValue(updatedKeys);
|
||||
mockFoldersService.readMany.mockResolvedValue([]);
|
||||
|
||||
await folders.handler({
|
||||
args: {
|
||||
action: 'update',
|
||||
data: batchData,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockFoldersService.updateBatch).toHaveBeenCalledWith(batchData);
|
||||
expect(mockFoldersService.updateByQuery).not.toHaveBeenCalled();
|
||||
expect(mockFoldersService.updateMany).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should update folders by query when no keys provided', async () => {
|
||||
const updateData = { name: 'updated-folder' };
|
||||
const updatedKeys = ['folder-1'];
|
||||
|
||||
mockFoldersService.updateByQuery.mockResolvedValue(updatedKeys);
|
||||
mockFoldersService.readMany.mockResolvedValue([]);
|
||||
|
||||
await folders.handler({
|
||||
args: {
|
||||
action: 'update',
|
||||
data: updateData,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockFoldersService.updateByQuery).toHaveBeenCalledWith(mockSanitizedQuery, updateData);
|
||||
expect(mockFoldersService.updateMany).not.toHaveBeenCalled();
|
||||
expect(mockFoldersService.updateMany).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE action', () => {
|
||||
test('should delete folders by keys', async () => {
|
||||
const keys = ['folder-1', 'folder-2'];
|
||||
|
||||
mockFoldersService.deleteMany.mockResolvedValue(keys);
|
||||
|
||||
const result = await folders.handler({
|
||||
args: {
|
||||
action: 'delete',
|
||||
keys,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockFoldersService.deleteMany).toHaveBeenCalledWith(keys);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: keys,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
test('should throw error for invalid action', async () => {
|
||||
await expect(
|
||||
folders.handler({
|
||||
args: {
|
||||
action: 'test' as any,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
}),
|
||||
).rejects.toThrow('Invalid action.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
let mockFoldersService: any;
|
||||
|
||||
beforeEach(() => {
|
||||
mockFoldersService = {
|
||||
createMany: vi.fn(),
|
||||
readMany: vi.fn(),
|
||||
readByQuery: vi.fn(),
|
||||
updateMany: vi.fn(),
|
||||
deleteMany: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mocked(FoldersService).mockImplementation(() => mockFoldersService);
|
||||
});
|
||||
|
||||
test('should handle null result from readMany after create', async () => {
|
||||
const folderData = { name: 'test-folder' };
|
||||
const savedKeys = ['folder-1'];
|
||||
|
||||
mockFoldersService.createMany.mockResolvedValue(savedKeys);
|
||||
mockFoldersService.readMany.mockResolvedValue(null);
|
||||
|
||||
const result = await folders.handler({
|
||||
args: {
|
||||
action: 'create',
|
||||
data: folderData,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: null,
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle service errors', async () => {
|
||||
const error = new Error('Service error');
|
||||
mockFoldersService.readByQuery.mockRejectedValue(error);
|
||||
|
||||
await expect(
|
||||
folders.handler({
|
||||
args: {
|
||||
action: 'read',
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
}),
|
||||
).rejects.toThrow('Service error');
|
||||
});
|
||||
});
|
||||
|
||||
describe('tool configuration', () => {
|
||||
test('should have correct tool name', () => {
|
||||
expect(folders.name).toBe('folders');
|
||||
});
|
||||
|
||||
test('should not be admin tool', () => {
|
||||
expect(folders.admin).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should have description', () => {
|
||||
expect(folders.description).toBeDefined();
|
||||
});
|
||||
|
||||
test('should have input and validation schemas', () => {
|
||||
expect(folders.inputSchema).toBeDefined();
|
||||
expect(folders.validateSchema).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
118
api/src/mcp/tools/folders.ts
Normal file
118
api/src/mcp/tools/folders.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
import type { PrimaryKey } from '@directus/types';
|
||||
import { toArray } from '@directus/utils';
|
||||
import { z } from 'zod';
|
||||
import { FoldersService } from '../../services/folders.js';
|
||||
import { defineTool } from '../define.js';
|
||||
import {
|
||||
FolderItemInputSchema,
|
||||
FolderItemValidateSchema,
|
||||
PrimaryKeyInputSchema,
|
||||
PrimaryKeyValidateSchema,
|
||||
QueryInputSchema,
|
||||
QueryValidateSchema,
|
||||
} from '../schema.js';
|
||||
import prompts from './prompts/index.js';
|
||||
|
||||
const FoldersValidateSchema = z.discriminatedUnion('action', [
|
||||
z.strictObject({
|
||||
action: z.literal('create'),
|
||||
data: z.union([z.array(FolderItemValidateSchema), FolderItemValidateSchema]),
|
||||
query: QueryValidateSchema.optional(),
|
||||
}),
|
||||
z.strictObject({
|
||||
action: z.literal('read'),
|
||||
keys: z.array(PrimaryKeyValidateSchema).optional(),
|
||||
query: QueryValidateSchema.optional(),
|
||||
}),
|
||||
z.strictObject({
|
||||
action: z.literal('update'),
|
||||
data: FolderItemValidateSchema,
|
||||
keys: z.array(PrimaryKeyValidateSchema).optional(),
|
||||
query: QueryValidateSchema.optional(),
|
||||
}),
|
||||
z.strictObject({
|
||||
action: z.literal('delete'),
|
||||
keys: z.array(PrimaryKeyValidateSchema),
|
||||
}),
|
||||
]);
|
||||
|
||||
const FoldersInputSchema = z.object({
|
||||
action: z.enum(['create', 'read', 'update', 'delete']).describe('The operation to perform'),
|
||||
query: QueryInputSchema.optional(),
|
||||
keys: z.array(PrimaryKeyInputSchema).optional(),
|
||||
data: z.array(FolderItemInputSchema).optional(),
|
||||
});
|
||||
|
||||
export const folders = defineTool<z.infer<typeof FoldersValidateSchema>>({
|
||||
name: 'folders',
|
||||
description: prompts.folders,
|
||||
annotations: {
|
||||
title: 'Directus - Folders',
|
||||
},
|
||||
inputSchema: FoldersInputSchema,
|
||||
validateSchema: FoldersValidateSchema,
|
||||
async handler({ args, schema, accountability, sanitizedQuery }) {
|
||||
const service = new FoldersService({
|
||||
schema,
|
||||
accountability,
|
||||
});
|
||||
|
||||
if (args.action === 'create') {
|
||||
const data = toArray(args.data);
|
||||
|
||||
const savedKeys = await service.createMany(data);
|
||||
|
||||
const result = await service.readMany(savedKeys, sanitizedQuery);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result || null,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'read') {
|
||||
let result = null;
|
||||
|
||||
if (args.keys) {
|
||||
result = await service.readMany(args.keys, sanitizedQuery);
|
||||
} else {
|
||||
result = await service.readByQuery(sanitizedQuery);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result || null,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'update') {
|
||||
let updatedKeys: PrimaryKey[] = [];
|
||||
|
||||
if (Array.isArray(args.data)) {
|
||||
updatedKeys = await service.updateBatch(args.data);
|
||||
} else if (args.keys) {
|
||||
updatedKeys = await service.updateMany(args.keys, args.data);
|
||||
} else {
|
||||
updatedKeys = await service.updateByQuery(sanitizedQuery, args.data);
|
||||
}
|
||||
|
||||
const result = await service.readMany(updatedKeys, sanitizedQuery);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'delete') {
|
||||
const deletedKeys = await service.deleteMany(args.keys);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: deletedKeys,
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error('Invalid action.');
|
||||
},
|
||||
});
|
||||
34
api/src/mcp/tools/index.ts
Normal file
34
api/src/mcp/tools/index.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import type { ToolConfig } from '../types.js';
|
||||
import { assets } from './assets.js';
|
||||
import { collections } from './collections.js';
|
||||
import { fields } from './fields.js';
|
||||
import { files } from './files.js';
|
||||
import { flows } from './flows.js';
|
||||
import { folders } from './folders.js';
|
||||
import { items } from './items.js';
|
||||
import { operations } from './operations.js';
|
||||
import { relations } from './relations.js';
|
||||
import { schema } from './schema.js';
|
||||
import { system } from './system.js';
|
||||
import { triggerFlow } from './trigger-flow.js';
|
||||
|
||||
export const ALL_TOOLS: ToolConfig<any>[] = [
|
||||
system,
|
||||
items,
|
||||
files,
|
||||
folders,
|
||||
assets,
|
||||
flows,
|
||||
triggerFlow,
|
||||
operations,
|
||||
schema,
|
||||
collections,
|
||||
fields,
|
||||
relations,
|
||||
];
|
||||
|
||||
export const getAllMcpTools = () => ALL_TOOLS;
|
||||
|
||||
export const findMcpTool = (name: string) => ALL_TOOLS.find((tool) => tool.name === name);
|
||||
|
||||
export { collections, fields, files, flows, items, operations, relations, schema, system, triggerFlow };
|
||||
605
api/src/mcp/tools/items.test.ts
Normal file
605
api/src/mcp/tools/items.test.ts
Normal file
@@ -0,0 +1,605 @@
|
||||
import { ForbiddenError, InvalidPayloadError } from '@directus/errors';
|
||||
import type { Accountability, SchemaOverview } from '@directus/types';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi, type MockedFunction } from 'vitest';
|
||||
import { ItemsService } from '../../services/items.js';
|
||||
import { items } from './items.js';
|
||||
|
||||
vi.mock('../../services/items.js');
|
||||
|
||||
describe('items tool', () => {
|
||||
const mockSchema = {
|
||||
collections: {
|
||||
test_collection: { singleton: false },
|
||||
singleton_collection: { singleton: true },
|
||||
},
|
||||
fields: {},
|
||||
relations: {},
|
||||
} as unknown as SchemaOverview;
|
||||
|
||||
const mockAccountability = { user: 'test-user' } as Accountability;
|
||||
const mockSanitizedQuery = { fields: ['*'] };
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('item operations', () => {
|
||||
let mockItemsService: {
|
||||
createMany: MockedFunction<any>;
|
||||
readMany: MockedFunction<any>;
|
||||
readByQuery: MockedFunction<any>;
|
||||
readSingleton: MockedFunction<any>;
|
||||
updateMany: MockedFunction<any>;
|
||||
updateBatch: MockedFunction<any>;
|
||||
updateByQuery: MockedFunction<any>;
|
||||
upsertSingleton: MockedFunction<any>;
|
||||
deleteMany: MockedFunction<any>;
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockItemsService = {
|
||||
createMany: vi.fn(),
|
||||
readMany: vi.fn(),
|
||||
readByQuery: vi.fn(),
|
||||
readSingleton: vi.fn(),
|
||||
updateMany: vi.fn(),
|
||||
updateBatch: vi.fn(),
|
||||
updateByQuery: vi.fn(),
|
||||
upsertSingleton: vi.fn(),
|
||||
deleteMany: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mocked(ItemsService).mockImplementation(() => mockItemsService as unknown as ItemsService);
|
||||
});
|
||||
|
||||
describe('CREATE action', () => {
|
||||
test('should create single item in regular collection', async () => {
|
||||
const item = { title: 'Test Item', status: 'published' };
|
||||
const savedKeys = [1];
|
||||
const createdItem = { id: 1, title: 'Test Item', status: 'published' };
|
||||
|
||||
mockItemsService.createMany.mockResolvedValue(savedKeys);
|
||||
mockItemsService.readMany.mockResolvedValue([createdItem]);
|
||||
|
||||
const result = await items.handler({
|
||||
args: { action: 'create', collection: 'test_collection', data: item },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockItemsService.createMany).toHaveBeenCalledWith([item]);
|
||||
expect(mockItemsService.readMany).toHaveBeenCalledWith(savedKeys, mockSanitizedQuery);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: [createdItem],
|
||||
});
|
||||
});
|
||||
|
||||
test('should create multiple items in regular collection', async () => {
|
||||
const data = [
|
||||
{ title: 'Item 1', status: 'published' },
|
||||
{ title: 'Item 2', status: 'draft' },
|
||||
];
|
||||
|
||||
const savedKeys = [1, 2];
|
||||
|
||||
const createdItems = [
|
||||
{ id: 1, title: 'Item 1', status: 'published' },
|
||||
{ id: 2, title: 'Item 2', status: 'draft' },
|
||||
];
|
||||
|
||||
mockItemsService.createMany.mockResolvedValue(savedKeys);
|
||||
mockItemsService.readMany.mockResolvedValue(createdItems);
|
||||
|
||||
const result = await items.handler({
|
||||
args: { action: 'create', collection: 'test_collection', data: data },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockItemsService.createMany).toHaveBeenCalledWith(data);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: createdItems,
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle singleton collection creation', async () => {
|
||||
const item = { setting_name: 'site_title', value: 'My Site' };
|
||||
const singletonItem = { id: 1, setting_name: 'site_title', value: 'My Site' };
|
||||
|
||||
mockItemsService.readSingleton.mockResolvedValue(singletonItem);
|
||||
|
||||
const result = await items.handler({
|
||||
args: { action: 'create', collection: 'singleton_collection', data: item },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockItemsService.upsertSingleton).toHaveBeenCalledWith(item);
|
||||
expect(mockItemsService.readSingleton).toHaveBeenCalledWith(mockSanitizedQuery);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: singletonItem,
|
||||
});
|
||||
});
|
||||
|
||||
test('should return null when no item is created', async () => {
|
||||
mockItemsService.createMany.mockResolvedValue([]);
|
||||
mockItemsService.readMany.mockResolvedValue(null);
|
||||
|
||||
const result = await items.handler({
|
||||
args: { action: 'create', collection: 'test_collection', data: { title: 'Test' } },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: null,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('READ action', () => {
|
||||
test('should read all items when no keys provided', async () => {
|
||||
const data = [
|
||||
{ id: 1, title: 'Item 1' },
|
||||
{ id: 2, title: 'Item 2' },
|
||||
];
|
||||
|
||||
mockItemsService.readByQuery.mockResolvedValue(data);
|
||||
|
||||
const result = await items.handler({
|
||||
args: { action: 'read', collection: 'test_collection' },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockItemsService.readByQuery).toHaveBeenCalledWith(mockSanitizedQuery);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: data,
|
||||
});
|
||||
});
|
||||
|
||||
test('should read specific items by keys', async () => {
|
||||
const keys = [1, 2];
|
||||
|
||||
const data = [
|
||||
{ id: 1, title: 'Item 1' },
|
||||
{ id: 2, title: 'Item 2' },
|
||||
];
|
||||
|
||||
mockItemsService.readMany.mockResolvedValue(data);
|
||||
|
||||
const result = await items.handler({
|
||||
args: { action: 'read', collection: 'test_collection', keys },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockItemsService.readMany).toHaveBeenCalledWith(keys, mockSanitizedQuery);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: data,
|
||||
});
|
||||
});
|
||||
|
||||
test('should read singleton item', async () => {
|
||||
const singletonItem = { id: 1, setting: 'value' };
|
||||
|
||||
mockItemsService.readSingleton.mockResolvedValue(singletonItem);
|
||||
|
||||
const result = await items.handler({
|
||||
args: { action: 'read', collection: 'singleton_collection' },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockItemsService.readSingleton).toHaveBeenCalledWith(mockSanitizedQuery);
|
||||
expect(result).toEqual({ type: 'text', data: singletonItem });
|
||||
});
|
||||
|
||||
test('should return null when no items found', async () => {
|
||||
mockItemsService.readByQuery.mockResolvedValue(null);
|
||||
|
||||
const result = await items.handler({
|
||||
args: { action: 'read', collection: 'test_collection' },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: null,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('UPDATE action', () => {
|
||||
test('should update items by keys', async () => {
|
||||
const keys = [1, 2];
|
||||
const updateData = { status: 'published' };
|
||||
|
||||
const updatedItems = [
|
||||
{ id: 1, title: 'Item 1', status: 'published' },
|
||||
{ id: 2, title: 'Item 2', status: 'published' },
|
||||
];
|
||||
|
||||
mockItemsService.updateMany.mockResolvedValue(keys);
|
||||
mockItemsService.readMany.mockResolvedValue(updatedItems);
|
||||
|
||||
const result = await items.handler({
|
||||
args: { action: 'update', collection: 'test_collection', keys, data: updateData },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockItemsService.updateMany).toHaveBeenCalledWith(keys, updateData);
|
||||
expect(mockItemsService.readMany).toHaveBeenCalledWith(keys, mockSanitizedQuery);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: updatedItems,
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle batch update with array data', async () => {
|
||||
const updateData = [
|
||||
{ id: 1, title: 'Updated Item 1' },
|
||||
{ id: 2, title: 'Updated Item 2' },
|
||||
];
|
||||
|
||||
const updatedKeys = [1, 2];
|
||||
|
||||
mockItemsService.updateBatch.mockResolvedValue(updatedKeys);
|
||||
mockItemsService.readMany.mockResolvedValue(updateData);
|
||||
|
||||
const result = await items.handler({
|
||||
args: { action: 'update', collection: 'test_collection', data: updateData },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockItemsService.updateBatch).toHaveBeenCalledWith(updateData);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: updateData,
|
||||
});
|
||||
});
|
||||
|
||||
test('should update by query when no keys provided', async () => {
|
||||
const updateData = { status: 'archived' };
|
||||
const updatedKeys = [1, 2, 3];
|
||||
|
||||
const updatedItems = [
|
||||
{ id: 1, status: 'archived' },
|
||||
{ id: 2, status: 'archived' },
|
||||
{ id: 3, status: 'archived' },
|
||||
];
|
||||
|
||||
mockItemsService.updateByQuery.mockResolvedValue(updatedKeys);
|
||||
mockItemsService.readMany.mockResolvedValue(updatedItems);
|
||||
|
||||
const result = await items.handler({
|
||||
args: { action: 'update', collection: 'test_collection', data: updateData },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockItemsService.updateByQuery).toHaveBeenCalledWith(mockSanitizedQuery, updateData);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: updatedItems,
|
||||
});
|
||||
});
|
||||
|
||||
test('should update singleton item', async () => {
|
||||
const updateData = { value: 'Updated Value' };
|
||||
const updatedSingleton = { id: 1, setting: 'test', value: 'Updated Value' };
|
||||
|
||||
mockItemsService.readSingleton.mockResolvedValue(updatedSingleton);
|
||||
|
||||
const result = await items.handler({
|
||||
args: { action: 'update', collection: 'singleton_collection', data: updateData },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockItemsService.upsertSingleton).toHaveBeenCalledWith(updateData);
|
||||
expect(mockItemsService.readSingleton).toHaveBeenCalledWith(mockSanitizedQuery);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: updatedSingleton,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE action', () => {
|
||||
test('should delete items by keys', async () => {
|
||||
const keys = [1, 2, 3];
|
||||
|
||||
mockItemsService.deleteMany.mockResolvedValue(keys);
|
||||
|
||||
const result = await items.handler({
|
||||
args: { action: 'delete', collection: 'test_collection', keys },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockItemsService.deleteMany).toHaveBeenCalledWith(keys);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: keys,
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle empty keys array', async () => {
|
||||
const keys: number[] = [];
|
||||
|
||||
mockItemsService.deleteMany.mockResolvedValue(keys);
|
||||
|
||||
const result = await items.handler({
|
||||
args: { action: 'delete', collection: 'test_collection', keys },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockItemsService.deleteMany).toHaveBeenCalledWith(keys);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: keys,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
let mockItemsService: {
|
||||
createMany: MockedFunction<any>;
|
||||
readMany: MockedFunction<any>;
|
||||
readByQuery: MockedFunction<any>;
|
||||
readSingleton: MockedFunction<any>;
|
||||
updateMany: MockedFunction<any>;
|
||||
updateBatch: MockedFunction<any>;
|
||||
updateByQuery: MockedFunction<any>;
|
||||
upsertSingleton: MockedFunction<any>;
|
||||
deleteMany: MockedFunction<any>;
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockItemsService = {
|
||||
createMany: vi.fn(),
|
||||
readMany: vi.fn(),
|
||||
readByQuery: vi.fn(),
|
||||
readSingleton: vi.fn(),
|
||||
updateMany: vi.fn(),
|
||||
updateBatch: vi.fn(),
|
||||
updateByQuery: vi.fn(),
|
||||
upsertSingleton: vi.fn(),
|
||||
deleteMany: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mocked(ItemsService).mockImplementation(() => mockItemsService as unknown as ItemsService);
|
||||
});
|
||||
|
||||
test('should throw InvalidPayloadError for system collections', async () => {
|
||||
await expect(
|
||||
items.handler({
|
||||
args: { action: 'read', collection: 'directus_users' },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
}),
|
||||
).rejects.toThrow(InvalidPayloadError);
|
||||
});
|
||||
|
||||
test('should throw ForbiddenError for for non-existent collections', async () => {
|
||||
await expect(
|
||||
items.handler({
|
||||
args: { action: 'read', collection: 'nonexistent' },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
}),
|
||||
).rejects.toThrow(ForbiddenError);
|
||||
});
|
||||
|
||||
test('should throw error for invalid action', async () => {
|
||||
await expect(
|
||||
items.handler({
|
||||
args: { action: 'invalid' as any, collection: 'test_collection' },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
}),
|
||||
).rejects.toThrow('Invalid action.');
|
||||
});
|
||||
|
||||
test('should propagate ItemsService errors', async () => {
|
||||
const serviceError = new Error('Database connection failed');
|
||||
mockItemsService.readByQuery.mockRejectedValue(serviceError);
|
||||
|
||||
await expect(
|
||||
items.handler({
|
||||
args: { action: 'read', collection: 'test_collection' },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
}),
|
||||
).rejects.toThrow('Database connection failed');
|
||||
});
|
||||
});
|
||||
|
||||
describe('meta', () => {
|
||||
let mockItemsService: {
|
||||
createMany: MockedFunction<any>;
|
||||
readMany: MockedFunction<any>;
|
||||
readByQuery: MockedFunction<any>;
|
||||
readSingleton: MockedFunction<any>;
|
||||
updateMany: MockedFunction<any>;
|
||||
updateBatch: MockedFunction<any>;
|
||||
updateByQuery: MockedFunction<any>;
|
||||
upsertSingleton: MockedFunction<any>;
|
||||
deleteMany: MockedFunction<any>;
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockItemsService = {
|
||||
createMany: vi.fn(),
|
||||
readMany: vi.fn(),
|
||||
readByQuery: vi.fn(),
|
||||
readSingleton: vi.fn(),
|
||||
updateMany: vi.fn(),
|
||||
updateBatch: vi.fn(),
|
||||
updateByQuery: vi.fn(),
|
||||
upsertSingleton: vi.fn(),
|
||||
deleteMany: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mocked(ItemsService).mockImplementation(() => mockItemsService as unknown as ItemsService);
|
||||
});
|
||||
|
||||
describe('construction', () => {
|
||||
test('should create ItemsService with correct parameters', async () => {
|
||||
await items.handler({
|
||||
args: { action: 'read', collection: 'test_collection' },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(ItemsService).toHaveBeenCalledWith('test_collection', {
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('schema validation', () => {
|
||||
test('should validate create action with required data', () => {
|
||||
const validInput = {
|
||||
action: 'create',
|
||||
collection: 'test_collection',
|
||||
data: { title: 'Test Item' },
|
||||
};
|
||||
|
||||
expect(() => items.validateSchema?.parse(validInput)).not.toThrow();
|
||||
});
|
||||
|
||||
test('should validate read action with optional keys', () => {
|
||||
const validInput = {
|
||||
action: 'read',
|
||||
collection: 'test_collection',
|
||||
keys: [1, 2, 3],
|
||||
};
|
||||
|
||||
expect(() => items.validateSchema?.parse(validInput)).not.toThrow();
|
||||
});
|
||||
|
||||
test('should validate update action with data and keys', () => {
|
||||
const validInput = {
|
||||
action: 'update',
|
||||
collection: 'test_collection',
|
||||
data: { status: 'published' },
|
||||
keys: [1, 2],
|
||||
};
|
||||
|
||||
expect(() => items.validateSchema?.parse(validInput)).not.toThrow();
|
||||
});
|
||||
|
||||
test('should validate delete action with required keys', () => {
|
||||
const validInput = {
|
||||
action: 'delete',
|
||||
collection: 'test_collection',
|
||||
keys: [1, 2, 3],
|
||||
};
|
||||
|
||||
expect(() => items.validateSchema?.parse(validInput)).not.toThrow();
|
||||
});
|
||||
|
||||
test('should reject invalid action types', () => {
|
||||
const invalidInput = {
|
||||
action: 'invalid',
|
||||
collection: 'test_collection',
|
||||
};
|
||||
|
||||
expect(() => items.validateSchema?.parse(invalidInput)).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('singleton handling', () => {
|
||||
test('should correctly identify singleton collections', async () => {
|
||||
mockItemsService.readSingleton.mockResolvedValue({ id: 1, value: 'test' });
|
||||
|
||||
await items.handler({
|
||||
args: { action: 'read', collection: 'singleton_collection' },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockItemsService.readSingleton).toHaveBeenCalled();
|
||||
expect(mockItemsService.readByQuery).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should handle missing singleton flag as false', async () => {
|
||||
mockItemsService.readByQuery.mockResolvedValue([]);
|
||||
|
||||
await items.handler({
|
||||
args: { action: 'read', collection: 'test_collection' },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockItemsService.readByQuery).toHaveBeenCalled();
|
||||
expect(mockItemsService.readSingleton).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('tool configuration', () => {
|
||||
test('should have correct tool name', () => {
|
||||
expect(items.name).toBe('items');
|
||||
});
|
||||
|
||||
test('should not be admin tool', () => {
|
||||
expect(items.admin).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should have description', () => {
|
||||
expect(items.description).toBeDefined();
|
||||
});
|
||||
|
||||
test('should have input and validation schemas', () => {
|
||||
expect(items.inputSchema).toBeDefined();
|
||||
expect(items.validateSchema).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
175
api/src/mcp/tools/items.ts
Normal file
175
api/src/mcp/tools/items.ts
Normal file
@@ -0,0 +1,175 @@
|
||||
import { ForbiddenError, InvalidPayloadError } from '@directus/errors';
|
||||
import { isSystemCollection } from '@directus/system-data';
|
||||
import type { PrimaryKey } from '@directus/types';
|
||||
import { toArray } from '@directus/utils';
|
||||
import { isObject } from 'graphql-compose';
|
||||
import { z } from 'zod';
|
||||
import { ItemsService } from '../../services/items.js';
|
||||
import { defineTool } from '../define.js';
|
||||
import {
|
||||
ItemInputSchema,
|
||||
ItemValidateSchema,
|
||||
PrimaryKeyInputSchema,
|
||||
PrimaryKeyValidateSchema,
|
||||
QueryInputSchema,
|
||||
QueryValidateSchema,
|
||||
} from '../schema.js';
|
||||
import prompts from './prompts/index.js';
|
||||
|
||||
const PartialItemInputSchema = z.strictObject({
|
||||
collection: z.string(),
|
||||
});
|
||||
|
||||
const ItemsValidateSchema = z.discriminatedUnion('action', [
|
||||
PartialItemInputSchema.extend({
|
||||
action: z.literal('create'),
|
||||
data: z.union([z.array(ItemValidateSchema), ItemValidateSchema]),
|
||||
query: QueryValidateSchema.optional(),
|
||||
}),
|
||||
PartialItemInputSchema.extend({
|
||||
action: z.literal('read'),
|
||||
keys: z.array(PrimaryKeyValidateSchema).optional(),
|
||||
query: QueryValidateSchema.optional(),
|
||||
}),
|
||||
PartialItemInputSchema.extend({
|
||||
action: z.literal('update'),
|
||||
data: z.union([z.array(ItemValidateSchema), ItemValidateSchema]),
|
||||
keys: z.array(PrimaryKeyValidateSchema).optional(),
|
||||
query: QueryValidateSchema.optional(),
|
||||
}),
|
||||
PartialItemInputSchema.extend({
|
||||
action: z.literal('delete'),
|
||||
keys: z.array(PrimaryKeyValidateSchema),
|
||||
}),
|
||||
]);
|
||||
|
||||
const ItemsInputSchema = z.object({
|
||||
action: z.enum(['create', 'read', 'update', 'delete']).describe('The operation to perform'),
|
||||
collection: z.string().describe('The name of the collection'),
|
||||
query: QueryInputSchema.optional(),
|
||||
keys: z.array(PrimaryKeyInputSchema).optional(),
|
||||
data: z.array(ItemInputSchema).optional(),
|
||||
});
|
||||
|
||||
export const items = defineTool<z.infer<typeof ItemsValidateSchema>>({
|
||||
name: 'items',
|
||||
description: prompts.items,
|
||||
annotations: {
|
||||
title: 'Directus - Items',
|
||||
},
|
||||
inputSchema: ItemsInputSchema,
|
||||
validateSchema: ItemsValidateSchema,
|
||||
endpoint({ input, data }) {
|
||||
if (!isObject(data) || !('id' in data)) {
|
||||
return;
|
||||
}
|
||||
|
||||
return ['content', input.collection, data['id']];
|
||||
},
|
||||
async handler({ args, schema, accountability, sanitizedQuery }) {
|
||||
if (isSystemCollection(args.collection)) {
|
||||
throw new InvalidPayloadError({ reason: 'Cannot provide a core collection' });
|
||||
}
|
||||
|
||||
if (args.collection in schema.collections === false) {
|
||||
throw new ForbiddenError();
|
||||
}
|
||||
|
||||
const isSingleton = schema.collections[args.collection]?.singleton ?? false;
|
||||
|
||||
const itemsService = new ItemsService(args.collection, {
|
||||
schema,
|
||||
accountability,
|
||||
});
|
||||
|
||||
if (args.action === 'create') {
|
||||
const data = toArray(args.data);
|
||||
|
||||
if (isSingleton) {
|
||||
if (Array.isArray(args.data)) {
|
||||
throw new InvalidPayloadError({ reason: 'Invalid data payload, object exptected' });
|
||||
}
|
||||
|
||||
await itemsService.upsertSingleton(args.data);
|
||||
|
||||
const item = await itemsService.readSingleton(sanitizedQuery);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: item || null,
|
||||
};
|
||||
}
|
||||
|
||||
const savedKeys = await itemsService.createMany(data);
|
||||
|
||||
const result = await itemsService.readMany(savedKeys, sanitizedQuery);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result || null,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'read') {
|
||||
let result = null;
|
||||
|
||||
if (isSingleton) {
|
||||
result = await itemsService.readSingleton(sanitizedQuery);
|
||||
} else if (args.keys) {
|
||||
result = await itemsService.readMany(args.keys, sanitizedQuery);
|
||||
} else {
|
||||
result = await itemsService.readByQuery(sanitizedQuery);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'update') {
|
||||
if (isSingleton) {
|
||||
if (Array.isArray(args.data)) {
|
||||
throw new InvalidPayloadError({ reason: 'Invalid data payload, object exptected' });
|
||||
}
|
||||
|
||||
await itemsService.upsertSingleton(args.data);
|
||||
|
||||
const item = await itemsService.readSingleton(sanitizedQuery);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: item || null,
|
||||
};
|
||||
}
|
||||
|
||||
let updatedKeys: PrimaryKey[] = [];
|
||||
|
||||
if (Array.isArray(args.data)) {
|
||||
updatedKeys = await itemsService.updateBatch(args.data);
|
||||
} else if (args.keys) {
|
||||
updatedKeys = await itemsService.updateMany(args.keys, args.data);
|
||||
} else {
|
||||
updatedKeys = await itemsService.updateByQuery(sanitizedQuery, args.data);
|
||||
}
|
||||
|
||||
const result = await itemsService.readMany(updatedKeys, sanitizedQuery);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'delete') {
|
||||
const deletedKeys = await itemsService.deleteMany(args.keys);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: deletedKeys,
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error('Invalid action.');
|
||||
},
|
||||
});
|
||||
180
api/src/mcp/tools/operations.test.ts
Normal file
180
api/src/mcp/tools/operations.test.ts
Normal file
@@ -0,0 +1,180 @@
|
||||
import type { Accountability, SchemaOverview } from '@directus/types';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi, type MockedFunction } from 'vitest';
|
||||
import { OperationsService } from '../../services/operations.js';
|
||||
import { operations } from './operations.js';
|
||||
|
||||
vi.mock('../../services/operations');
|
||||
|
||||
vi.mock('../tool.js', () => ({
|
||||
defineTool: vi.fn((config) => config),
|
||||
}));
|
||||
|
||||
describe('operations tool', () => {
|
||||
const mockSchema = { collections: {}, fields: {}, relations: {} } as unknown as SchemaOverview;
|
||||
const mockAccountability = { user: 'test-user' } as Accountability;
|
||||
const mockSanitizedQuery = { fields: ['*'] };
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('operation operations', () => {
|
||||
let mockOperationsService: {
|
||||
createOne: MockedFunction<any>;
|
||||
readOne: MockedFunction<any>;
|
||||
readByQuery: MockedFunction<any>;
|
||||
updateOne: MockedFunction<any>;
|
||||
deleteOne: MockedFunction<any>;
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockOperationsService = {
|
||||
createOne: vi.fn(),
|
||||
readOne: vi.fn(),
|
||||
readByQuery: vi.fn(),
|
||||
updateOne: vi.fn(),
|
||||
deleteOne: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mocked(OperationsService).mockImplementation(() => mockOperationsService as unknown as OperationsService);
|
||||
});
|
||||
|
||||
describe('CREATE action', () => {
|
||||
test('should create an operation and return the result', async () => {
|
||||
const mockOperationData = {
|
||||
name: 'Test Operation',
|
||||
type: 'log',
|
||||
flow: 'flow-123',
|
||||
};
|
||||
|
||||
const mockCreatedKey = 'operation-123';
|
||||
const mockCreatedOperation = { id: mockCreatedKey, ...mockOperationData };
|
||||
|
||||
mockOperationsService.createOne.mockResolvedValue(mockCreatedKey);
|
||||
mockOperationsService.readOne.mockResolvedValue(mockCreatedOperation);
|
||||
|
||||
const result = await operations.handler({
|
||||
args: { action: 'create', data: mockOperationData },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockOperationsService.createOne).toHaveBeenCalledWith(mockOperationData);
|
||||
expect(mockOperationsService.readOne).toHaveBeenCalledWith(mockCreatedKey);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: mockCreatedOperation,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('READ action', () => {
|
||||
test('should read operations by query', async () => {
|
||||
const mockOperations = [
|
||||
{ id: 'op-1', name: 'Operation 1', type: 'log' },
|
||||
{ id: 'op-2', name: 'Operation 2', type: 'webhook' },
|
||||
];
|
||||
|
||||
mockOperationsService.readByQuery.mockResolvedValue(mockOperations);
|
||||
|
||||
const result = await operations.handler({
|
||||
args: { action: 'read' },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockOperationsService.readByQuery).toHaveBeenCalledWith(mockSanitizedQuery);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: mockOperations,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('UPDATE action', () => {
|
||||
test('should update an operation and return the updated result', async () => {
|
||||
const mockKey = 'operation-123';
|
||||
const mockUpdateData = { name: 'Updated Operation' };
|
||||
const mockUpdatedOperation = { id: mockKey, ...mockUpdateData };
|
||||
|
||||
mockOperationsService.updateOne.mockResolvedValue(mockKey);
|
||||
mockOperationsService.readOne.mockResolvedValue(mockUpdatedOperation);
|
||||
|
||||
const result = await operations.handler({
|
||||
args: { action: 'update', key: mockKey, data: mockUpdateData },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockOperationsService.updateOne).toHaveBeenCalledWith(mockKey, mockUpdateData);
|
||||
expect(mockOperationsService.readOne).toHaveBeenCalledWith(mockKey, mockSanitizedQuery);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: mockUpdatedOperation,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE action', () => {
|
||||
test('should delete an operation and return the deleted key', async () => {
|
||||
const mockKey = 'operation-123';
|
||||
|
||||
mockOperationsService.deleteOne.mockResolvedValue(mockKey);
|
||||
|
||||
const result = await operations.handler({
|
||||
args: { action: 'delete', key: mockKey },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockOperationsService.deleteOne).toHaveBeenCalledWith(mockKey);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: mockKey,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
test('should throw error for invalid action', async () => {
|
||||
await expect(
|
||||
operations.handler({
|
||||
args: {
|
||||
action: 'invalid' as any,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
}),
|
||||
).rejects.toThrow('Invalid action.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('tool configuration', () => {
|
||||
test('should have correct tool name', () => {
|
||||
expect(operations.name).toBe('operations');
|
||||
});
|
||||
|
||||
test('should be admin tool', () => {
|
||||
expect(operations.admin).toBe(true);
|
||||
});
|
||||
|
||||
test('should have description', () => {
|
||||
expect(operations.description).toBeDefined();
|
||||
});
|
||||
|
||||
test('should have input and validation schemas', () => {
|
||||
expect(operations.inputSchema).toBeDefined();
|
||||
expect(operations.validateSchema).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
95
api/src/mcp/tools/operations.ts
Normal file
95
api/src/mcp/tools/operations.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
import type { OperationRaw } from '@directus/types';
|
||||
import { z } from 'zod';
|
||||
import { OperationsService } from '../../services/operations.js';
|
||||
import { defineTool } from '../define.js';
|
||||
import {
|
||||
OperationItemInputSchema,
|
||||
OperationItemValidateSchema,
|
||||
QueryInputSchema,
|
||||
QueryValidateSchema,
|
||||
} from '../schema.js';
|
||||
import prompts from './prompts/index.js';
|
||||
|
||||
export const OperationsValidationSchema = z.discriminatedUnion('action', [
|
||||
z.strictObject({
|
||||
action: z.literal('create'),
|
||||
data: OperationItemValidateSchema,
|
||||
}),
|
||||
z.strictObject({
|
||||
action: z.literal('read'),
|
||||
query: QueryValidateSchema.optional(),
|
||||
}),
|
||||
z.strictObject({
|
||||
action: z.literal('update'),
|
||||
data: OperationItemValidateSchema,
|
||||
key: z.string(),
|
||||
}),
|
||||
z.strictObject({
|
||||
action: z.literal('delete'),
|
||||
key: z.string(),
|
||||
}),
|
||||
]);
|
||||
|
||||
export const OperationsInputSchema = z.object({
|
||||
action: z.enum(['create', 'read', 'update', 'delete']).describe('The operation to perform'),
|
||||
query: QueryInputSchema.optional(),
|
||||
data: OperationItemInputSchema.optional(),
|
||||
key: z.string().optional(),
|
||||
});
|
||||
|
||||
export const operations = defineTool<z.infer<typeof OperationsValidationSchema>>({
|
||||
name: 'operations',
|
||||
admin: true,
|
||||
description: prompts.operations,
|
||||
annotations: {
|
||||
title: 'Directus - Operations',
|
||||
},
|
||||
inputSchema: OperationsInputSchema,
|
||||
validateSchema: OperationsValidationSchema,
|
||||
async handler({ args, schema, accountability, sanitizedQuery }) {
|
||||
const operationService = new OperationsService({
|
||||
schema,
|
||||
accountability,
|
||||
});
|
||||
|
||||
if (args.action === 'create') {
|
||||
const savedKey = await operationService.createOne(args.data);
|
||||
const result = await operationService.readOne(savedKey);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result || null,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'read') {
|
||||
const result = await operationService.readByQuery(sanitizedQuery);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result || null,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'update') {
|
||||
const updatedKey = await operationService.updateOne(args.key, args.data as OperationRaw);
|
||||
const result = await operationService.readOne(updatedKey, sanitizedQuery);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result || null,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'delete') {
|
||||
const deletedKey = await operationService.deleteOne(args.key);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: deletedKey,
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error('Invalid action.');
|
||||
},
|
||||
});
|
||||
8
api/src/mcp/tools/prompts/assets.md
Normal file
8
api/src/mcp/tools/prompts/assets.md
Normal file
@@ -0,0 +1,8 @@
|
||||
Retrieve base64-encoded file content from Directus. Returns raw file data suitable for AI vision models, image analysis,
|
||||
and file operations.
|
||||
|
||||
**Input**: `{"id": "file-uuid"}`
|
||||
|
||||
**Output**: `{"data": "base64-string", "mimeType": "image/jpeg"}`
|
||||
|
||||
**Note**: Supports images and audio files. Respects Directus permissions.
|
||||
336
api/src/mcp/tools/prompts/collections.md
Normal file
336
api/src/mcp/tools/prompts/collections.md
Normal file
@@ -0,0 +1,336 @@
|
||||
Perform CRUD operations on Directus Collections.
|
||||
|
||||
<actions>
|
||||
|
||||
- `create`: Add new collections
|
||||
- `read`: Retrieve available collections
|
||||
- `update`: Modify existing collections
|
||||
- `delete`: Remove collections </actions>
|
||||
|
||||
<collection_structure>
|
||||
|
||||
### Collection Structure
|
||||
|
||||
```json
|
||||
{
|
||||
"collection": "products",
|
||||
"meta": {
|
||||
"collection": "ai_prompts",
|
||||
"icon": "inventory_2", // Any Material Symbols icons
|
||||
"note": "Main product catalog with inventory tracking" // Helpful 1 sentence description
|
||||
"color": "#6366F1", // Color shown in content module sidebar
|
||||
"singleton": false, // Single-item collections (settings, globals)
|
||||
"hidden": false, // Hide from navigation
|
||||
"accountability": "all", // Track who activity and revisions (`"all"`, `"activity"`, `null`)
|
||||
"sort_field": "sort", // Default sorting field (auto-creates if needed)
|
||||
"archive_app_filter": true, // Enable soft delete in the app
|
||||
"archive_field": "status", // Field used for archiving (status, deleted_at, etc.)
|
||||
"archive_value": "archived", // Value that marks items as archived
|
||||
"unarchive_value": "published", // Value that marks items as active
|
||||
"display_template": "{{name}} - ${{price}}",
|
||||
"versioning": false, // Enable content versioning for this collection
|
||||
"sort": 2, // Sort order for this collection
|
||||
"group": null, // Parent collection (use to group and nest collections in data model)
|
||||
"collapse": "open" // Default collection to expanded or collapsed if child collections
|
||||
"preview_url": "https://store.example.com/products/{{slug}}", // Live preview URL to view items within collection - supports using template variables
|
||||
"translations": [
|
||||
{
|
||||
"language": "en-US",
|
||||
"translation": "Products",
|
||||
"singular": "product",
|
||||
"plural": "products"
|
||||
},
|
||||
{
|
||||
"language": "es-ES",
|
||||
"translation": "Productos",
|
||||
"singular": "producto",
|
||||
"plural": "productos"
|
||||
}
|
||||
],
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</collection_structure>
|
||||
|
||||
<creating_collections>
|
||||
|
||||
- **Primary Keys**: Use UUID primary keys (see `fields` tool `<primary_keys>` section for detailed guidance)
|
||||
- **System Fields**: Include system fields for content collections (see `<system_fields>` section below for complete
|
||||
template) unless specifically asked by user to omit them.
|
||||
- ALWAYS show the collection URL to the user if it is present in the result.
|
||||
- When creating a new collection, include both collection settings and initial fields (see `fields` tool for complete
|
||||
examples).
|
||||
|
||||
### Basic Collection Example
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "create",
|
||||
"data": {
|
||||
"collection": "articles",
|
||||
"fields": [
|
||||
{
|
||||
"field": "id",
|
||||
"type": "uuid",
|
||||
"meta": { "special": ["uuid"], "hidden": true, "readonly": true, "interface": "input" },
|
||||
"schema": { "is_primary_key": true, "length": 36, "has_auto_increment": false }
|
||||
},
|
||||
{
|
||||
"field": "title",
|
||||
"type": "string",
|
||||
"meta": { "interface": "input", "required": true },
|
||||
"schema": { "is_nullable": false }
|
||||
}
|
||||
],
|
||||
"schema": {}, // Always send empty object for new collection unless creating a folder collection
|
||||
"meta": {
|
||||
"singleton": false,
|
||||
"display_template": "{{title}}"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</creating_collections>
|
||||
|
||||
<system_fields>
|
||||
|
||||
### Complete System Fields Template
|
||||
|
||||
For content collections (blogs, products, pages), include these optional system fields for full CMS functionality:
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "create",
|
||||
"data": {
|
||||
"collection": "articles",
|
||||
"fields": [
|
||||
{
|
||||
"field": "id",
|
||||
"type": "uuid",
|
||||
"meta": {
|
||||
"hidden": true,
|
||||
"readonly": true,
|
||||
"interface": "input",
|
||||
"special": ["uuid"]
|
||||
},
|
||||
"schema": {
|
||||
"is_primary_key": true,
|
||||
"length": 36,
|
||||
"has_auto_increment": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"field": "status",
|
||||
"type": "string",
|
||||
"meta": {
|
||||
"width": "full",
|
||||
"options": {
|
||||
// You might choose to customize these options based on the users request
|
||||
"choices": [
|
||||
{
|
||||
"text": "$t:published",
|
||||
"value": "published",
|
||||
"color": "var(--theme--primary)"
|
||||
},
|
||||
{
|
||||
"text": "$t:draft",
|
||||
"value": "draft",
|
||||
"color": "var(--theme--foreground)"
|
||||
},
|
||||
{
|
||||
"text": "$t:archived",
|
||||
"value": "archived",
|
||||
"color": "var(--theme--warning)"
|
||||
}
|
||||
]
|
||||
},
|
||||
"interface": "select-dropdown",
|
||||
"display": "labels",
|
||||
"display_options": {
|
||||
"showAsDot": true,
|
||||
"choices": [
|
||||
{
|
||||
"text": "$t:published",
|
||||
"value": "published",
|
||||
"color": "var(--theme--primary)",
|
||||
"foreground": "var(--theme--primary)",
|
||||
"background": "var(--theme--primary-background)"
|
||||
},
|
||||
{
|
||||
"text": "$t:draft",
|
||||
"value": "draft",
|
||||
"color": "var(--theme--foreground)",
|
||||
"foreground": "var(--theme--foreground)",
|
||||
"background": "var(--theme--background-normal)"
|
||||
},
|
||||
{
|
||||
"text": "$t:archived",
|
||||
"value": "archived",
|
||||
"color": "var(--theme--warning)",
|
||||
"foreground": "var(--theme--warning)",
|
||||
"background": "var(--theme--warning-background)"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"schema": {
|
||||
"default_value": "draft",
|
||||
"is_nullable": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"field": "sort",
|
||||
"type": "integer",
|
||||
"meta": {
|
||||
"interface": "input",
|
||||
"hidden": true
|
||||
},
|
||||
"schema": {}
|
||||
},
|
||||
{
|
||||
"field": "user_created",
|
||||
"type": "uuid",
|
||||
"meta": {
|
||||
"special": ["user-created"],
|
||||
"interface": "select-dropdown-m2o",
|
||||
"options": {
|
||||
"template": "{{avatar}} {{first_name}} {{last_name}}"
|
||||
},
|
||||
"display": "user",
|
||||
"readonly": true,
|
||||
"hidden": true,
|
||||
"width": "half"
|
||||
},
|
||||
"schema": {}
|
||||
},
|
||||
{
|
||||
"field": "date_created",
|
||||
"type": "timestamp",
|
||||
"meta": {
|
||||
"special": ["date-created"],
|
||||
"interface": "datetime",
|
||||
"readonly": true,
|
||||
"hidden": true,
|
||||
"width": "half",
|
||||
"display": "datetime",
|
||||
"display_options": {
|
||||
"relative": true
|
||||
}
|
||||
},
|
||||
"schema": {}
|
||||
},
|
||||
{
|
||||
"field": "user_updated",
|
||||
"type": "uuid",
|
||||
"meta": {
|
||||
"special": ["user-updated"],
|
||||
"interface": "select-dropdown-m2o",
|
||||
"options": {
|
||||
"template": "{{avatar}} {{first_name}} {{last_name}}"
|
||||
},
|
||||
"display": "user",
|
||||
"readonly": true,
|
||||
"hidden": true,
|
||||
"width": "half"
|
||||
},
|
||||
"schema": {}
|
||||
},
|
||||
{
|
||||
"field": "date_updated",
|
||||
"type": "timestamp",
|
||||
"meta": {
|
||||
"special": ["date-updated"],
|
||||
"interface": "datetime",
|
||||
"readonly": true,
|
||||
"hidden": true,
|
||||
"width": "half",
|
||||
"display": "datetime",
|
||||
"display_options": {
|
||||
"relative": true
|
||||
}
|
||||
},
|
||||
"schema": {}
|
||||
}
|
||||
],
|
||||
"schema": {}, // Always send empty object for new collection unless creating a folder collection
|
||||
"meta": {
|
||||
"sort_field": "sort",
|
||||
"archive_field": "status",
|
||||
"archive_value": "archived",
|
||||
"unarchive_value": "draft",
|
||||
"singleton": false
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**System Fields Explained:**
|
||||
|
||||
- `status` - Content workflow (draft/published/archived) with visual indicators
|
||||
- `sort` - Manual ordering capability (used with `sort_field` in collection meta)
|
||||
- `user_created`/`user_updated` - Track content authors and editors (requires relations to `directus_users`)
|
||||
- `date_created`/`date_updated` - Automatic timestamps for content lifecycle tracking
|
||||
|
||||
**Required Relations for User Fields:** After creating the collection, add relations for user tracking fields (use
|
||||
`relations` tool):
|
||||
|
||||
```json
|
||||
// User created relation
|
||||
{
|
||||
"action": "create",
|
||||
"data": {
|
||||
"collection": "articles",
|
||||
"field": "user_created",
|
||||
"related_collection": "directus_users",
|
||||
"schema": {}
|
||||
}
|
||||
}
|
||||
|
||||
// User updated relation
|
||||
{
|
||||
"action": "create",
|
||||
"data": {
|
||||
"collection": "articles",
|
||||
"field": "user_updated",
|
||||
"related_collection": "directus_users",
|
||||
"schema": {}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</system_fields>
|
||||
|
||||
<translations>
|
||||
For collection name translations, check for `languages` collection first, then provide collection names in available languages (similar to field translations - see `fields` tool `<translations>` section for translation workflow).
|
||||
|
||||
```json
|
||||
{
|
||||
"meta": {
|
||||
"translations": [
|
||||
{ "language": "en-US", "translation": "Products", "singular": "product", "plural": "products" },
|
||||
{ "language": "es-ES", "translation": "Productos", "singular": "producto", "plural": "productos" }
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</translations>
|
||||
|
||||
<display_templates>
|
||||
|
||||
Control how collection items appear in relationships and lists:
|
||||
|
||||
```json
|
||||
{
|
||||
"meta": {
|
||||
"display_template": "{{name}} - {{category}} ({{status}})"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Template Variables:**
|
||||
|
||||
- `{{field_name}}` - Any field from the collection
|
||||
- `{{field_name.nested}}` - Access nested object properties </display_templates>
|
||||
521
api/src/mcp/tools/prompts/fields.md
Normal file
521
api/src/mcp/tools/prompts/fields.md
Normal file
@@ -0,0 +1,521 @@
|
||||
Perform CRUD operations on Directus Fields.
|
||||
|
||||
<actions>
|
||||
- `create`: Add one or multiple fields to a collection
|
||||
- `read`: View field configurations
|
||||
- `update`: Update one or multiple fields
|
||||
- `delete`: Remove fields
|
||||
</actions>
|
||||
|
||||
<field_types>
|
||||
|
||||
- **Text**: `string` (max 255 chars), `text` (unlimited), `uuid` (relations/IDs), `hash` (passwords)
|
||||
- **Numeric**: `integer`, `bigInteger`, `float`, `decimal` (for financial precision)
|
||||
- **Date/Time**: `timestamp`, `datetime`, `date`, `time`
|
||||
- **Boolean**: `boolean` for toggles/flags
|
||||
- **Structured**: `json` (complex data), `csv` (tags/lists)
|
||||
- **Alias**: Virtual fields for relations (`o2m`, `m2m`, `m2a`, `files`, `translations`)
|
||||
- **Geospatial**: `point`, `lineString`, `polygon` for maps </field_types>
|
||||
|
||||
<adding_fields>
|
||||
|
||||
**Important**: When using the `fields` tool, `data` must always be an array of field objects, even for single fields.
|
||||
Make sure you include `meta` and `schema` objects for each field.
|
||||
|
||||
Add fields to existing collections:
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "create",
|
||||
"collection": "articles",
|
||||
"data": [
|
||||
{
|
||||
"field": "excerpt",
|
||||
"type": "text",
|
||||
"meta": {
|
||||
"interface": "input-rich-text-md",
|
||||
"special": null,
|
||||
"note": "Article excerpt for previews and SEO. Supports markdown formatting.",
|
||||
"translations": [
|
||||
{
|
||||
"language": "en-US",
|
||||
"translation": "Excerpt"
|
||||
}
|
||||
],
|
||||
"options": {
|
||||
"placeholder": null,
|
||||
"customSyntax": null
|
||||
},
|
||||
"display": "formatted-value",
|
||||
"display_options": { "format": true }
|
||||
},
|
||||
"schema": {
|
||||
"name": "test",
|
||||
"table": "random_collection",
|
||||
"data_type": "text"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Multiple Fields Example:**
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "create",
|
||||
"collection": "articles",
|
||||
"data": [
|
||||
{
|
||||
"field": "title",
|
||||
"type": "string"
|
||||
// Rest of field data
|
||||
},
|
||||
{
|
||||
"field": "content",
|
||||
"type": "text"
|
||||
// Rest of field data
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Note**: You can omit `null` or `false` values from the schema object. </adding_fields>
|
||||
|
||||
<relationship_fields>
|
||||
|
||||
**CRITICAL**: Field type and meta.special determine relationship behavior.
|
||||
|
||||
- **M2O**: `type: "uuid"`, `special: ["m2o"]`, interface: `select-dropdown-m2o` → then create relation
|
||||
- **O2M**: `type: "alias"`, `special: ["o2m"]`, interface: `list-o2m` → auto-created with M2O
|
||||
- **M2M**: `type: "alias"`, `special: ["m2m"]`, interface: `list-m2m` → needs junction collection
|
||||
- **M2A**: `type: "alias"`, `special: ["m2a"]`, interface: `list-m2a` → polymorphic, needs junction
|
||||
- **File**: `type: "uuid"`, `special: ["file"]`, interface: `file` or `file-image` → single file relation
|
||||
- **Files**: `type: "alias"`, `special: ["files"]`, interface: `files` → multiple files via M2M
|
||||
- **Translations**: `type: "alias"`, `special: ["translations"]`, interface: `translations` → special M2M
|
||||
|
||||
### M2O Field Example
|
||||
|
||||
```json
|
||||
{
|
||||
"collection": "posts",
|
||||
"field": "author",
|
||||
"type": "uuid",
|
||||
"schema": {
|
||||
"name": "author",
|
||||
"table": "posts",
|
||||
"data_type": "uuid",
|
||||
"is_nullable": true,
|
||||
"foreign_key_schema": "public",
|
||||
"foreign_key_table": "team",
|
||||
"foreign_key_column": "id"
|
||||
},
|
||||
"meta": {
|
||||
"collection": "posts",
|
||||
"field": "author",
|
||||
"special": ["m2o"],
|
||||
"interface": "select-dropdown-m2o",
|
||||
"options": {
|
||||
"template": "{{image.$thumbnail}} {{name}}"
|
||||
},
|
||||
"display": "related-values",
|
||||
"display_options": {
|
||||
"template": "{{image.$thumbnail}} {{name}}"
|
||||
},
|
||||
"sort": 15,
|
||||
"width": "half"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### O2M Field Example
|
||||
|
||||
```json
|
||||
{
|
||||
"collection": "posts",
|
||||
"field": "comments",
|
||||
"type": "alias",
|
||||
"schema": null,
|
||||
"meta": {
|
||||
"collection": "posts",
|
||||
"field": "comments",
|
||||
"special": ["o2m"],
|
||||
"interface": "list-o2m",
|
||||
"options": {
|
||||
"template": "{{author}} - {{content}} ({{status}})"
|
||||
},
|
||||
"display": "related-values",
|
||||
"display_options": {
|
||||
"template": "{{author}} - {{content}} ({{status}})"
|
||||
},
|
||||
"sort": 10,
|
||||
"width": "full",
|
||||
"required": false,
|
||||
"group": null
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### M2M Field Example
|
||||
|
||||
```json
|
||||
{
|
||||
"collection": "posts",
|
||||
"field": "categories",
|
||||
"type": "alias",
|
||||
"schema": null,
|
||||
"meta": {
|
||||
"collection": "posts",
|
||||
"field": "categories",
|
||||
"special": ["m2m"],
|
||||
"interface": "list-m2m",
|
||||
"options": {
|
||||
"template": "{{categories_id.name}} ({{categories_id.slug}})"
|
||||
},
|
||||
"display": "related-values",
|
||||
"display_options": {
|
||||
"template": "{{categories_id.name}} ({{categories_id.slug}})"
|
||||
},
|
||||
"sort": 9,
|
||||
"width": "full"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### M2A Field Example
|
||||
|
||||
```json
|
||||
{
|
||||
"collection": "pages",
|
||||
"field": "blocks",
|
||||
"type": "alias",
|
||||
"schema": null,
|
||||
"meta": {
|
||||
"collection": "pages",
|
||||
"field": "blocks",
|
||||
"special": ["m2a"],
|
||||
"interface": "list-m2a",
|
||||
"options": {},
|
||||
"display": "related-values",
|
||||
"display_options": {
|
||||
"template": "{{collection}}"
|
||||
},
|
||||
"sort": 8,
|
||||
"width": "full"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### File Field Example
|
||||
|
||||
```json
|
||||
{
|
||||
"collection": "posts",
|
||||
"field": "featured_image",
|
||||
"type": "uuid",
|
||||
"schema": {
|
||||
"name": "featured_image",
|
||||
"table": "posts",
|
||||
"data_type": "uuid",
|
||||
"is_nullable": true,
|
||||
"foreign_key_schema": "public",
|
||||
"foreign_key_table": "directus_files",
|
||||
"foreign_key_column": "id"
|
||||
},
|
||||
"meta": {
|
||||
"collection": "posts",
|
||||
"field": "featured_image",
|
||||
"special": ["file"],
|
||||
"interface": "file-image",
|
||||
"options": {
|
||||
"folder": "post-images"
|
||||
},
|
||||
"display": "image",
|
||||
"display_options": null,
|
||||
"sort": 1,
|
||||
"width": "half",
|
||||
"required": false,
|
||||
"group": "media"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Files Field Example
|
||||
|
||||
```json
|
||||
{
|
||||
"collection": "posts",
|
||||
"field": "gallery",
|
||||
"type": "alias",
|
||||
"schema": null,
|
||||
"meta": {
|
||||
"collection": "posts",
|
||||
"field": "gallery",
|
||||
"special": ["files"],
|
||||
"interface": "files",
|
||||
"options": null,
|
||||
"display": "related-values",
|
||||
"display_options": null,
|
||||
"sort": 4,
|
||||
"width": "full"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Translations Field Example
|
||||
|
||||
```json
|
||||
{
|
||||
"collection": "posts",
|
||||
"field": "translations",
|
||||
"type": "alias",
|
||||
"schema": null,
|
||||
"meta": {
|
||||
"collection": "posts",
|
||||
"field": "translations",
|
||||
"special": ["translations"],
|
||||
"interface": "translations",
|
||||
"options": {
|
||||
"userLanguage": true,
|
||||
"defaultOpenSplitView": true
|
||||
},
|
||||
"display": "translations",
|
||||
"display_options": {
|
||||
"template": "{{title}}", // Field to display from the translated collection (ie post title)
|
||||
"languageField": "name" // Name of the language field from the languages collection
|
||||
},
|
||||
"sort": 22,
|
||||
"width": "full"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Note**: Alias fields don't need a schema object since they're virtual. </relationship_fields>
|
||||
|
||||
<primary_keys> **🎯 ALWAYS use UUID as primary keys for new collections unless integers or manually entered strings ares
|
||||
specifically requested by the user.**
|
||||
|
||||
**UUID Primary Key Template:**
|
||||
|
||||
```json
|
||||
{
|
||||
"field": "id",
|
||||
"type": "uuid",
|
||||
"meta": { "hidden": true, "readonly": true, "interface": "input", "special": ["uuid"] },
|
||||
"schema": { "is_primary_key": true, "length": 36, "has_auto_increment": false }
|
||||
}
|
||||
```
|
||||
|
||||
</primary_keys>
|
||||
|
||||
<interfaces>
|
||||
## Common Interfaces
|
||||
|
||||
**Text**: `input`, `input-multiline`, `input-rich-text-md`, `input-rich-text-html`, `input-hash`, `translations`
|
||||
**Selection**: `select-dropdown`, `select-multiple-dropdown`, `select-radio`, `select-multiple-checkbox`, `tags`,
|
||||
`boolean`, `slider` **Date/Time**: `datetime`, `date`, `time` **Relational**: `select-dropdown-m2o`, `list-o2m`,
|
||||
`list-m2m`, `list-m2a` **Files**: `file`, `files`, `file-image` **Advanced**: `input-code`, `map`, `group-raw`,
|
||||
`group-detail` </interfaces>
|
||||
|
||||
<field_configuration>
|
||||
|
||||
### Layout
|
||||
|
||||
- **width**: `"half"` (380px max), `"full"` (760px max, default), `"fill"` (no limit)
|
||||
- **sort**: Field order in forms
|
||||
- **group**: Group related fields into collapsible sections (must be used with `alias` group fields)
|
||||
|
||||
### Schema Properties
|
||||
|
||||
- **default_value**: Default for new items
|
||||
- **is_nullable**: Can be null
|
||||
- **is_unique**: Must be unique
|
||||
- **length**: Max length for strings
|
||||
|
||||
### Meta Properties
|
||||
|
||||
- **required**: Must have value
|
||||
- **readonly**: Cannot edit after creation
|
||||
- **hidden**: Hidden from UI (still in API)
|
||||
- **validation**: Custom validation rules
|
||||
- **validation_message**: Custom error messages
|
||||
- **note**: Context for non-obvious fields
|
||||
|
||||
### Conditions
|
||||
|
||||
Dynamically control field behavior based on other field values:
|
||||
|
||||
```json
|
||||
{
|
||||
"conditions": [
|
||||
{
|
||||
"name": "Hide If Author Is Null",
|
||||
"rule": {
|
||||
"_and": [
|
||||
{
|
||||
"author": {
|
||||
"_null": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"hidden": true
|
||||
},
|
||||
{
|
||||
"name": "Required If Published",
|
||||
"rule": {
|
||||
"status": {
|
||||
"_eq": "published"
|
||||
}
|
||||
},
|
||||
"required": true
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Condition Properties**:
|
||||
|
||||
- `name`: Description of the condition
|
||||
- `rule`: Filter rules using Directus filter syntax
|
||||
- Can set: `hidden`, `readonly`, `required`, or interface-specific options
|
||||
|
||||
**Common Rules**:
|
||||
|
||||
- `_null`: Check if field is null
|
||||
- `_eq`: Equals specific value
|
||||
- `_neq`: Not equals
|
||||
- `_in`: Value in array
|
||||
- `_and`/`_or`: Combine multiple conditions
|
||||
|
||||
### Special Fields
|
||||
|
||||
- `special: ["uuid"]`: Auto-generate UUID
|
||||
- `special: ["user-created"]`: Track creating user
|
||||
- `special: ["date-created"]`: Track creation time
|
||||
- `special: ["user-updated"]`: Track last editor
|
||||
- `special: ["date-updated"]`: Track last edit time
|
||||
- `special: ["cast-json"]`: Cast JSON strings to objects </field_configuration>
|
||||
|
||||
<translations>
|
||||
Field names can (and should be) translated for editors using the app.
|
||||
Check for `languages` collection first, then add field translations based on which languages are stored in DB. IF not 99% sure, confirm with user first.
|
||||
|
||||
```json
|
||||
"translations": [
|
||||
{"language": "en-US", "translation": "Title"},
|
||||
{"language": "es-ES", "translation": "Título"}
|
||||
]
|
||||
```
|
||||
|
||||
</translations>
|
||||
|
||||
<display_templates> Display templates can be customized used to enhance the UX for editors.
|
||||
|
||||
```json
|
||||
"display": "related-values",
|
||||
"display_options": {
|
||||
"template": "{{first_name}} {{last_name}}"
|
||||
}
|
||||
```
|
||||
|
||||
**Display types**: `raw`, `formatted-value`, `labels`, `datetime`, `user`, `file`, `related-values` </display_templates>
|
||||
|
||||
<complete_example>
|
||||
|
||||
#### Complete Field Example with Advanced Features
|
||||
|
||||
This shows a real field configuration with validation, conditions, and all metadata (as returned from a read operation):
|
||||
|
||||
```json
|
||||
{
|
||||
"collection": "block_button",
|
||||
"field": "url",
|
||||
"type": "string",
|
||||
"schema": {
|
||||
"name": "url",
|
||||
"table": "block_button",
|
||||
"data_type": "character varying", // Database-specific type
|
||||
"default_value": null,
|
||||
"generation_expression": null,
|
||||
"max_length": 255, // String length limit
|
||||
"numeric_precision": null,
|
||||
"numeric_scale": null,
|
||||
"is_generated": false,
|
||||
"is_nullable": true,
|
||||
"is_unique": false,
|
||||
"is_indexed": false,
|
||||
"is_primary_key": false,
|
||||
"has_auto_increment": false,
|
||||
"foreign_key_schema": null, // Would contain relation info for M2O fields
|
||||
"foreign_key_table": null,
|
||||
"foreign_key_column": null,
|
||||
"comment": null
|
||||
},
|
||||
"meta": {
|
||||
"id": 811, // Auto-generated field ID (not used in create)
|
||||
"collection": "block_button",
|
||||
"field": "url",
|
||||
"special": null, // No special behavior for this field
|
||||
"interface": "input",
|
||||
"options": {
|
||||
"iconLeft": "link", // Icon displayed in the input
|
||||
"trim": true // Remove whitespace on save
|
||||
},
|
||||
"display": "formatted-value",
|
||||
"display_options": {
|
||||
"format": true // Apply auto formatting based on field type
|
||||
},
|
||||
"readonly": false,
|
||||
"hidden": true, // Hidden by default, shown conditionally
|
||||
"sort": 11, // Field order in forms
|
||||
"width": "half",
|
||||
"translations": null, // No field name translations
|
||||
"note": "The URL to link to. Could be relative (ie `/my-page`) or a full external URL (ie `https://docs.directus.io`)",
|
||||
"conditions": [
|
||||
{
|
||||
"hidden": false, // Show field when condition is met
|
||||
"name": "If type = external",
|
||||
"options": {
|
||||
"clear": false,
|
||||
"font": "sans-serif",
|
||||
"masked": false,
|
||||
"slug": false,
|
||||
"trim": false
|
||||
},
|
||||
"rule": {
|
||||
"_and": [
|
||||
{
|
||||
"type": {
|
||||
// Show when 'type' field equals 'url'
|
||||
"_eq": "url"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"required": false,
|
||||
"group": null,
|
||||
"validation": {
|
||||
"_and": [
|
||||
{
|
||||
"url": {
|
||||
"_regex": "^(?:\\/[A-Za-z0-9\\-._~%!$&'()*+,;=:@\\/]*|https?:\\/\\/[^\\s/$.?#].[^\\s]*)$"
|
||||
}
|
||||
}
|
||||
]
|
||||
}, // Regex validation for URLs (relative or absolute)
|
||||
"validation_message": "Invalid URL. Check your URL and try again. Properly formatted relative URLs (`/pages/test` ) and absolute URLs (`https://example.com`) are supported."
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</complete_example>
|
||||
|
||||
<related_tools>
|
||||
|
||||
## Related Tools
|
||||
|
||||
- `collections`: Create containers for fields
|
||||
- `relations`: Connect fields between collections </related_tools>
|
||||
180
api/src/mcp/tools/prompts/files.md
Normal file
180
api/src/mcp/tools/prompts/files.md
Normal file
@@ -0,0 +1,180 @@
|
||||
Perform CRUD operations on files in Directus.
|
||||
|
||||
## Actions
|
||||
|
||||
- **`read`**: List/query metadata or get specific items by ID
|
||||
- **`update`**: Modify existing metadata
|
||||
- **`delete`**: Remove files by keys
|
||||
- **`import`**: Import a file from a URL and create or update its file data
|
||||
|
||||
## Example Operations
|
||||
|
||||
### Reading File Metadata
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "read",
|
||||
"query": {
|
||||
"fields": ["id", "title", "type", "filesize", "width", "height"],
|
||||
"filter": { "type": { "_starts_with": "image/" } },
|
||||
"limit": 10
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Get Single File Metadata
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "read",
|
||||
"keys": ["file-uuid-here"]
|
||||
}
|
||||
```
|
||||
|
||||
### Import a File via URL
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "import",
|
||||
"data": [
|
||||
{
|
||||
"url": "file-url",
|
||||
"file": {
|
||||
"title": "New Title",
|
||||
"description": "Updated description",
|
||||
"tags": ["tag1", "tag2", "category"],
|
||||
"folder": "folder-uuid"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Update File Metadata
|
||||
|
||||
**Single file:**
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "update",
|
||||
"keys": ["file-uuid"],
|
||||
"data": {
|
||||
"title": "New Title",
|
||||
"description": "Updated description",
|
||||
"tags": ["tag1", "tag2", "category"],
|
||||
"folder": "folder-uuid"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Batch update:**
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "update",
|
||||
"data": [
|
||||
{ "id": "file-uuid-1", "title": "New Title 1" },
|
||||
{ "id": "file-uuid-2", "title": "New Title 2" }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Common Filters
|
||||
|
||||
```json
|
||||
{
|
||||
"query": {
|
||||
"filter": {
|
||||
"_and": [
|
||||
{ "type": { "_icontains": "/png" } }, // PNG files only
|
||||
{ "folder": { "_eq": "folder-uuid" } }, // Specific folder
|
||||
{ "filesize": { "_lt": 5000000 } }, // Under 5MB
|
||||
{ "uploaded_on": { "_gte": "$NOW(-7 days)" } } // Within last week
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## File Metadata Fields
|
||||
|
||||
- `id`: Unique identifier
|
||||
- `storage`: Storage adapter used
|
||||
- `filename_disk`: Actual filename on disk
|
||||
- `filename_download`: Suggested download filename
|
||||
- `title`: Display title
|
||||
- `type`: MIME type (e.g., "image/jpeg", "application/pdf")
|
||||
- `folder`: Parent folder ID
|
||||
- `uploaded_by`: User who uploaded
|
||||
- `uploaded_on`: Upload timestamp
|
||||
- `modified_by`: Last modifier
|
||||
- `modified_on`: Last modification time
|
||||
- `filesize`: Size in bytes
|
||||
- `width`/`height`: Dimensions for images (in pixels)
|
||||
- `duration`: Length for video/audio
|
||||
- `description`: File description
|
||||
- `location`: Geo-location data
|
||||
- `tags`: Array of tag strings (e.g., ["product", "red", "handbag"])
|
||||
- `metadata`: Additional metadata object
|
||||
- `focal_point_x`: Horizontal focal point (in pixels from left edge)
|
||||
- `focal_point_y`: Vertical focal point (in pixels from top edge)
|
||||
|
||||
## Real-World Use Cases
|
||||
|
||||
### Asset Selection for Content
|
||||
|
||||
Find appropriate images for articles, pages, or products:
|
||||
|
||||
_Example: "Find images in our asset library related to customer support for our new help center article."_
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "read",
|
||||
"query": {
|
||||
"fields": ["id", "title", "description", "tags", "type"],
|
||||
"search": "help center"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Asset Organization & Cleanup
|
||||
|
||||
Transform generic files into well-organized, searchable assets:
|
||||
|
||||
1. **Find files needing metadata:**
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "read",
|
||||
"query": {
|
||||
"fields": ["id", "filename_disk", "title", "description"],
|
||||
"filter": { "description": { "_null": true } }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
2. **Analyze with vision (use `assets` tool for base64):** Get image content for AI analysis
|
||||
|
||||
3. **Update with descriptive metadata:**
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "update",
|
||||
"keys": ["image-uuid"],
|
||||
"data": {
|
||||
"title": "Red leather handbag product photo",
|
||||
"description": "Professional e-commerce photo with white background",
|
||||
"tags": ["handbag", "leather", "red", "product-photo", "accessories"],
|
||||
"focal_point_x": 512,
|
||||
"focal_point_y": 300 // Focal points ensure that when images are cropped for different aspect ratios (thumbnails, hero images, etc.), the important subject remains visible. Coordinates are in pixels from the top-left corner of the original image.
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Key Points
|
||||
|
||||
- **ALWAYS pass data as native objects**, NOT stringified JSON
|
||||
- **Metadata only**: This tool manages file metadata, not file content or uploads
|
||||
- **Permissions**: Respects Directus access control
|
||||
- **Arrays required**: `keys` and `tags` must be arrays: `["item"]` not `"item"`
|
||||
- **Performance**: Large files handled automatically but may impact performance
|
||||
495
api/src/mcp/tools/prompts/flows.md
Normal file
495
api/src/mcp/tools/prompts/flows.md
Normal file
@@ -0,0 +1,495 @@
|
||||
Manage automation flows that enable event-driven data processing and task automation. Flows consist of a trigger and a
|
||||
series of operations forming a data chain.
|
||||
|
||||
<flow_concepts>
|
||||
|
||||
## Key Concepts
|
||||
|
||||
**Flow** = Trigger + Operations + Data Chain
|
||||
|
||||
- Each flow has ONE trigger that starts execution
|
||||
- Operations execute sequentially, passing data through the chain
|
||||
- Data chain accumulates results from each step
|
||||
|
||||
After creating a flow, use the `operations` tool to add individual operations with detailed operation syntax,
|
||||
positioning, and data chain usage. </flow_concepts>
|
||||
|
||||
<core_fields>
|
||||
|
||||
## Flow Data Structure
|
||||
|
||||
All flows share these core fields for creation:
|
||||
|
||||
- `name` (required) - Flow display name
|
||||
- `trigger` (required) - Trigger type: `event`, `webhook`, `schedule`, `operation`, `manual`
|
||||
- `status` - `active` or `inactive` (default: `active`)
|
||||
- `accountability` - `all`, `activity`, or `null` (default: `all`)
|
||||
- `icon` - Icon identifier (optional)
|
||||
- `color` - Hex color code (optional)
|
||||
- `description` - Flow description (optional)
|
||||
- `options` - Trigger-specific configuration object (optional)
|
||||
- `operation` - UUID of first operation (optional, set after creating operations) </core_fields>
|
||||
|
||||
<crud_actions>
|
||||
|
||||
## Actions
|
||||
|
||||
- ALWAYS show the flow URL if it is present in the result
|
||||
|
||||
### `read` - List/Query Flows
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "read",
|
||||
"query": {
|
||||
"fields": ["id", "name", "status", "trigger", "operations"],
|
||||
"filter": { "status": { "_eq": "active" } },
|
||||
"limit": 10
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### `create` - Create New Flow
|
||||
|
||||
**Required**: `name`, `trigger`
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "create",
|
||||
"data": {
|
||||
"name": "Send email when new post is published",
|
||||
"icon": "bolt", // Optional icon
|
||||
"color": "#FFA439", // Optional hex color
|
||||
"description": "checks if new post is published and emails admin",
|
||||
"status": "active", // active|inactive
|
||||
"accountability": "all", // all|activity|null
|
||||
"trigger": "event", // event|webhook|schedule|operation|manual
|
||||
"options": {
|
||||
// Trigger-specific configuration
|
||||
"type": "action",
|
||||
"scope": ["items.create"],
|
||||
"collections": ["posts"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### `update` - Modify Existing Flow
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "update",
|
||||
"key": "flow-uuid",
|
||||
"data": {
|
||||
"status": "inactive",
|
||||
"description": "Updated description"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### `delete` - Remove Flow
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "delete",
|
||||
"key": "flow-uuid"
|
||||
}
|
||||
```
|
||||
|
||||
</crud_actions>
|
||||
|
||||
<trigger_types>
|
||||
|
||||
## Trigger Types & Options
|
||||
|
||||
### Event Hook Trigger
|
||||
|
||||
Responds to data changes and system events
|
||||
|
||||
```json
|
||||
{
|
||||
"trigger": "event",
|
||||
"options": {
|
||||
"type": "filter", // filter (blocking) | action (non-blocking)
|
||||
"scope": ["items.create", "items.update"],
|
||||
"collections": ["orders", "products"],
|
||||
"return": "process_data" // For filter only: <operationKey>|$all|$last (avoid $last)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Common Scopes**:
|
||||
|
||||
- `items.create`, `items.update`, `items.delete` - Data operations
|
||||
- `auth.login` - User authentication
|
||||
- **Note**: Multiple scopes trigger flow for ANY matching event
|
||||
|
||||
### Webhook Trigger
|
||||
|
||||
```json
|
||||
{
|
||||
"trigger": "webhook",
|
||||
"options": {
|
||||
"method": "POST", // GET|POST
|
||||
"async": false, // true = immediate response, false = wait for completion
|
||||
"return": "transform_result", // Response body: <operationKey>|$all|$last (avoid $last)
|
||||
"cache": false // Cache GET responses
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Schedule Trigger (CRON)
|
||||
|
||||
```json
|
||||
{
|
||||
"trigger": "schedule",
|
||||
"options": {
|
||||
"cron": "0 */15 * * * *" // Every 15 minutes
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**CRON Format**: `second minute hour day month weekday` **Examples**:
|
||||
|
||||
- `"0 0 9 * * *"` - Daily at 9 AM
|
||||
- `"0 30 * * * *"` - Every 30 minutes (note: runs at :30 of each hour)
|
||||
- `"0 */15 * * * *"` - Every 15 minutes
|
||||
|
||||
### Manual Trigger
|
||||
|
||||
UI button that users click to start flows
|
||||
|
||||
```json
|
||||
{
|
||||
"trigger": "manual",
|
||||
"options": {
|
||||
"collections": ["posts", "products"],
|
||||
"location": "item", // item|collection|both
|
||||
"requireSelection": false, // Default true - requires item selection
|
||||
"requireConfirmation": true,
|
||||
"confirmationDescription": "AI Ghostwriter",
|
||||
"async": true, // Run in background
|
||||
"fields": [
|
||||
{
|
||||
"field": "prompt",
|
||||
"type": "text",
|
||||
"name": "Prompt",
|
||||
"meta": {
|
||||
"interface": "input-multiline",
|
||||
"note": "Describe what you want to create.",
|
||||
"width": "full",
|
||||
"required": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"field": "voice",
|
||||
"type": "json",
|
||||
"name": "Tone Of Voice",
|
||||
"meta": {
|
||||
"interface": "tags",
|
||||
"options": {
|
||||
"presets": ["friendly", "professional", "casual"]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"field": "colors",
|
||||
"type": "json",
|
||||
"name": "Color Palette",
|
||||
"meta": {
|
||||
"interface": "list",
|
||||
"options": {
|
||||
"fields": [
|
||||
{
|
||||
"field": "color",
|
||||
"type": "string",
|
||||
"meta": { "interface": "select-color" }
|
||||
},
|
||||
{
|
||||
"field": "name",
|
||||
"type": "string",
|
||||
"meta": { "interface": "input" }
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
// Access confirmation inputs: {{ $trigger.body.prompt }}, {{ $trigger.body.voice }}
|
||||
```
|
||||
|
||||
**Field Options**: Supports non-relational Directus interfaces - `input`, `input-multiline`, `input-rich-text-md`,
|
||||
`tags`, `list`, `select-color`, `select-radio`, `collection-item-dropdown`, etc.
|
||||
|
||||
### Operation Trigger (Another Flow)
|
||||
|
||||
```json
|
||||
{
|
||||
"trigger": "operation",
|
||||
"options": {
|
||||
"return": "final_result" // Data to return to calling flow: <operationKey>|$all|$last (avoid $last)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</trigger_types>
|
||||
|
||||
<operations_integration>
|
||||
|
||||
## Working with Operations
|
||||
|
||||
**Use the `operations` tool for complete details on:**
|
||||
|
||||
- Creating and linking operations
|
||||
- 14x14 grid positioning system
|
||||
- Data chain variable syntax
|
||||
- Operation-specific configuration
|
||||
|
||||
**Workflow Process:**
|
||||
|
||||
1. Create flow first to get flow ID
|
||||
2. Use `operations` tool to add/manage operations
|
||||
3. Operations execute in sequence based on resolve/reject paths
|
||||
4. Link operations via UUIDs in resolve/reject fields </operations_integration>
|
||||
|
||||
<flow_chaining>
|
||||
|
||||
## 🔗 Flow Chaining
|
||||
|
||||
**When to Chain**: Reusable utilities, complex multi-step workflows, conditional branching
|
||||
|
||||
**How to Chain**:
|
||||
|
||||
1. Child flow: `"trigger": "operation"`, `"return": "final_key"` or `"$last"`
|
||||
2. Parent flow: Use `trigger` operation with child flow UUID and payload
|
||||
3. Access child results: `{{ trigger_operation_key }}`
|
||||
|
||||
**Common Utility Patterns**:
|
||||
|
||||
```json
|
||||
// Utils → Get Globals (called by multiple flows)
|
||||
{
|
||||
"name": "Utils → Get Globals",
|
||||
"trigger": "operation",
|
||||
"options": { "return": "$last" }
|
||||
}
|
||||
|
||||
// Utils → Send Email (reusable email sender)
|
||||
{
|
||||
"name": "Utils → Send Email",
|
||||
"trigger": "operation",
|
||||
"options": { "return": "$last" }
|
||||
}
|
||||
|
||||
// Main flow calls utility
|
||||
{
|
||||
"type": "trigger",
|
||||
"key": "globals",
|
||||
"options": {
|
||||
"flow": "utils-globals-uuid"
|
||||
}
|
||||
}
|
||||
// Access: {{ globals.openai_api_key }}
|
||||
```
|
||||
|
||||
**Multi-Chain Example** (Form Notifications):
|
||||
|
||||
```json
|
||||
// Chains: Read Form → Format → Render Template → Send Email
|
||||
{
|
||||
"type": "trigger",
|
||||
"key": "render",
|
||||
"options": {
|
||||
"flow": "utils-render-template-uuid",
|
||||
"payload": "{{ format }}"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Best Practices**:
|
||||
|
||||
- Name utilities with "Utils →" prefix for clarity
|
||||
- Use `$last` return for simple utilities, specific keys for complex ones
|
||||
- Chain utilities together for modular, reusable workflows
|
||||
- Keep each utility focused on single responsibility </flow_chaining>
|
||||
|
||||
<data_chain_warning>
|
||||
|
||||
## Data Chain Access
|
||||
|
||||
**See the `operations` tool for complete data chain syntax and examples.**
|
||||
|
||||
Operations can access:
|
||||
|
||||
- `$trigger` - Initial trigger data
|
||||
- `$accountability` - User/permission context
|
||||
- `$env` - Environment variables
|
||||
- `<operationKey>` - Result of specific operation (recommended)
|
||||
- `$last` - Result of previous operation (avoid - breaks when reordered) </data_chain_warning>
|
||||
|
||||
<real_world_examples>
|
||||
|
||||
## Real-World Examples
|
||||
|
||||
### Post Approval Email (Event-Driven)
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "create",
|
||||
"data": {
|
||||
"name": "[Website] Post Approval",
|
||||
"icon": "mark_email_read",
|
||||
"color": "#18222F",
|
||||
"description": "Send email when posts are ready for review",
|
||||
"status": "active",
|
||||
"trigger": "event",
|
||||
"accountability": "all",
|
||||
"options": {
|
||||
"type": "action", // Non-blocking
|
||||
"scope": ["items.update"],
|
||||
"collections": ["posts"]
|
||||
}
|
||||
}
|
||||
}
|
||||
// Then add operations: Check Status → Send Email
|
||||
```
|
||||
|
||||
### Auto-Generate Slugs (Event-Driven)
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "create",
|
||||
"data": {
|
||||
"name": "[Website] Slugify",
|
||||
"icon": "link",
|
||||
"color": "#18222F",
|
||||
"description": "Generate slugs for pages, posts, and categories",
|
||||
"status": "active",
|
||||
"trigger": "event",
|
||||
"accountability": "all",
|
||||
"options": {
|
||||
"type": "action",
|
||||
"scope": ["items.create"],
|
||||
"collections": ["pages", "posts", "categories"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Create Scheduled Task
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "create",
|
||||
"data": {
|
||||
"name": "Daily Report",
|
||||
"trigger": "schedule",
|
||||
"status": "active",
|
||||
"options": {
|
||||
"cron": "0 0 9 * * *" // 9 AM daily
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Project Creator with Template (Manual + Confirmation)
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "create",
|
||||
"data": {
|
||||
"name": "[Projects] Create Project",
|
||||
"trigger": "manual",
|
||||
"status": "active",
|
||||
"options": {
|
||||
"collections": ["os_projects", "organizations"],
|
||||
"requireSelection": false, // Can trigger without selection
|
||||
"requireConfirmation": true,
|
||||
"confirmationDescription": "Create a New Project 🚀",
|
||||
"fields": [
|
||||
{
|
||||
"field": "name",
|
||||
"type": "string",
|
||||
"name": "Project Name",
|
||||
"meta": {
|
||||
"interface": "input",
|
||||
"required": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"field": "organization",
|
||||
"type": "json",
|
||||
"name": "Organization",
|
||||
"meta": {
|
||||
"interface": "collection-item-dropdown",
|
||||
"required": true,
|
||||
"options": {
|
||||
"selectedCollection": "organizations"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Utility Flow (Operation Trigger)
|
||||
|
||||
````json
|
||||
{
|
||||
"action": "create",
|
||||
"data": {
|
||||
"name": "[Util] Get Globals",
|
||||
"trigger": "operation",
|
||||
"accountability": "all",
|
||||
"options": {
|
||||
"return": "global_data" // Returns data to calling flow: <operationKey>|$all|$last
|
||||
}
|
||||
}
|
||||
}
|
||||
// Called by other flows using trigger operation
|
||||
```
|
||||
</real_world_examples>
|
||||
|
||||
<important_notes>
|
||||
## Important Notes
|
||||
|
||||
- **Admin Required**: This tool requires admin permissions
|
||||
- **Data Format**: Pass `data` as native objects, NOT stringified JSON
|
||||
- **Flow Execution**: Flows with `operations` array will include their operations
|
||||
- **Webhook URL**: After creating webhook trigger, URL is `/flows/trigger/<flow-id>`
|
||||
- **Event Blocking**: Filter events pause transaction until flow completes
|
||||
- **Logs**: Flow executions are logged (check `accountability` setting)
|
||||
</important_notes>
|
||||
|
||||
<common_mistakes>
|
||||
## Common Mistakes to Avoid
|
||||
|
||||
1. **DO NOT** create operations here - use the `operations` tool
|
||||
2. **DO NOT** trigger flows here - use the `trigger-flow` tool
|
||||
3. **DO NOT** pass stringified JSON in data parameter
|
||||
4. **DO NOT** forget required fields: `name` and `trigger` for creation
|
||||
5. **DO NOT** put options outside of data - it goes inside the flow object:
|
||||
```json
|
||||
// ✅ CORRECT
|
||||
{
|
||||
"action": "create",
|
||||
"data": {
|
||||
"name": "My Flow",
|
||||
"trigger": "event",
|
||||
"options": { "type": "action" }
|
||||
}
|
||||
}
|
||||
|
||||
// ❌ WRONG
|
||||
{
|
||||
"action": "create",
|
||||
"data": { "name": "My Flow", "trigger": "event" },
|
||||
"options": { "type": "action" }
|
||||
}
|
||||
```
|
||||
</common_mistakes>
|
||||
````
|
||||
34
api/src/mcp/tools/prompts/folders.md
Normal file
34
api/src/mcp/tools/prompts/folders.md
Normal file
@@ -0,0 +1,34 @@
|
||||
Perform CRUD operations on Directus Folders. Folders are used to organize files in Directus.
|
||||
|
||||
## Available Actions
|
||||
|
||||
- `create`: Add new folders records
|
||||
- `read`: List/query metadata or get specific items by ID
|
||||
- `update`: Modify existing metadata (title, description, tags, folder)
|
||||
- `delete`: Remove folders by keys
|
||||
|
||||
## Common Operations
|
||||
|
||||
### Create Folder
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "create",
|
||||
"data": {
|
||||
"name": "Product Images",
|
||||
"parent": "parent-folder-uuid"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Important Notes
|
||||
|
||||
- **Folders are virtual**: Folders are not mirrored with the storage adaptor, only stored in the database.
|
||||
- **Permissions**: Respects Directus access control - only accessible files are returned. If you don't see something
|
||||
that the user says you should have access to, it could be a permissions issue.
|
||||
- **Folder Hierarchy**: Deleting a folder requires it to be empty or will cascade based on settings
|
||||
|
||||
## Mistakes to Avoid
|
||||
|
||||
1. **Remember** that `keys` expects an array even for single items
|
||||
2. **Tags must be arrays**: Use `["tag1", "tag2"]` not `"tag1, tag2"`
|
||||
21
api/src/mcp/tools/prompts/index.ts
Normal file
21
api/src/mcp/tools/prompts/index.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import fse from 'fs-extra';
|
||||
import { dirname, join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
export default {
|
||||
assets: fse.readFileSync(join(__dirname, 'assets.md'), 'utf8'),
|
||||
collections: fse.readFileSync(join(__dirname, 'collections.md'), 'utf8'),
|
||||
fields: fse.readFileSync(join(__dirname, 'fields.md'), 'utf8'),
|
||||
files: fse.readFileSync(join(__dirname, 'files.md'), 'utf8'),
|
||||
folders: fse.readFileSync(join(__dirname, 'folders.md'), 'utf8'),
|
||||
flows: fse.readFileSync(join(__dirname, 'flows.md'), 'utf8'),
|
||||
items: fse.readFileSync(join(__dirname, 'items.md'), 'utf8'),
|
||||
operations: fse.readFileSync(join(__dirname, 'operations.md'), 'utf8'),
|
||||
relations: fse.readFileSync(join(__dirname, 'relations.md'), 'utf8'),
|
||||
schema: fse.readFileSync(join(__dirname, 'schema.md'), 'utf8'),
|
||||
systemPrompt: fse.readFileSync(join(__dirname, 'system-prompt.md'), 'utf8'),
|
||||
systemPromptDescription: fse.readFileSync(join(__dirname, 'system-prompt-description.md'), 'utf8'),
|
||||
triggerFlow: fse.readFileSync(join(__dirname, 'trigger-flow.md'), 'utf8'),
|
||||
};
|
||||
317
api/src/mcp/tools/prompts/items.md
Normal file
317
api/src/mcp/tools/prompts/items.md
Normal file
@@ -0,0 +1,317 @@
|
||||
Perform CRUD operations on items within Directus collections
|
||||
|
||||
## Actions
|
||||
|
||||
- `read`: Query items with filtering/pagination/field selection
|
||||
- `create`: Add items (single/batch) with nested relations
|
||||
- `update`: Modify items with partial data
|
||||
- `delete`: Remove items by primary keys
|
||||
|
||||
## Essential Query Patterns
|
||||
|
||||
### Field Selection (Always Use)
|
||||
|
||||
```json
|
||||
{ "fields": ["title", "status", "author.name", "categories.*"] }
|
||||
```
|
||||
|
||||
For M2A relations: `"sections.item:headings.title", "sections.item:paragraphs.body"`
|
||||
|
||||
### Filtering Operators
|
||||
|
||||
Core: `_eq`, `_neq`, `_in`, `_nin`, `_null`, `_nnull`, `_lt`, `_lte`, `_gt`, `_gte`, `_between`, `_contains`,
|
||||
`_icontains`, `_starts_with`, `_ends_with`, `_empty`, `_nempty` Relations: `_some`, `_none` (O2M only) Logic: `_and`,
|
||||
`_or`
|
||||
|
||||
```json
|
||||
{"status": {"_eq": "published"}}
|
||||
{"title": {"_icontains": "keyword"}}
|
||||
{"categories": {"_some": {"name": {"_eq": "News"}}}}
|
||||
{"_or": [{"status": {"_eq": "published"}}, {"featured": {"_eq": true}}]}
|
||||
```
|
||||
|
||||
### Deep Queries (Nested Relations)
|
||||
|
||||
```json
|
||||
{
|
||||
"fields": ["title", "comments.text", "comments.author.name"],
|
||||
"deep": {
|
||||
"comments": {
|
||||
"_filter": { "status": { "_eq": "approved" } },
|
||||
"_sort": ["-date_created"],
|
||||
"_limit": 5
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Create/Update/Delete Best Practices
|
||||
|
||||
- ALWAYS show the item URL if it is present in the result for create or update
|
||||
|
||||
### Creating Items
|
||||
|
||||
- ALWAYS make sure you fully understand the collection's schema before trying to create items.
|
||||
|
||||
**✅ GOOD - Single with Relations:**
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "create",
|
||||
"collection": "posts",
|
||||
"data": {
|
||||
"title": "New Post",
|
||||
"author": { "name": "John Doe", "email": "john@example.com" }, // Creates nested
|
||||
"categories": [1, 2, { "name": "New Category" }], // Mix existing + new
|
||||
"status": "draft"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**✅ GOOD - Batch Create:**
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "create",
|
||||
"collection": "posts",
|
||||
"data": [
|
||||
{ "title": "Post 1", "author_id": 1 },
|
||||
{ "title": "Post 2", "author_id": 2 }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**❌ BAD - Missing Required Fields:**
|
||||
|
||||
```json
|
||||
// Don't create without checking schema first
|
||||
{ "title": "Post" } // Missing required fields like status
|
||||
```
|
||||
|
||||
### Updating Items
|
||||
|
||||
**✅ GOOD - Update with Keys:**
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "update",
|
||||
"collection": "posts",
|
||||
"keys": ["uuid-1", "uuid-2"],
|
||||
"data": { "status": "published" } // Partial update
|
||||
}
|
||||
```
|
||||
|
||||
**✅ GOOD - Batch Update (Different Data):**
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "update",
|
||||
"collection": "posts",
|
||||
"data": [
|
||||
{ "id": "uuid-1", "title": "Updated Title 1" },
|
||||
{ "id": "uuid-2", "title": "Updated Title 2" }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**✅ GOOD - Relational Updates:**
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "update",
|
||||
"collection": "posts",
|
||||
"keys": ["uuid-1"],
|
||||
"data": {
|
||||
"categories": {
|
||||
"create": [{ "name": "New Category" }],
|
||||
"update": [{ "id": 3, "name": "Renamed" }],
|
||||
"delete": [5]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**❌ BAD - Update Without Keys:**
|
||||
|
||||
```json
|
||||
// Don't update without specifying which items
|
||||
{
|
||||
"action": "update",
|
||||
"data": { "status": "published" } // Will fail - no keys provided
|
||||
}
|
||||
```
|
||||
|
||||
### Deleting Items
|
||||
|
||||
- ALWAYS get written confirmation with the user before deleting any items.
|
||||
|
||||
**✅ GOOD - Delete by Keys:**
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "delete",
|
||||
"collection": "posts",
|
||||
"keys": ["uuid-1", "uuid-2"]
|
||||
}
|
||||
```
|
||||
|
||||
**❌ BAD - Delete All (Dangerous):**
|
||||
|
||||
```json
|
||||
// Never delete without keys - use query to get keys first
|
||||
{
|
||||
"action": "delete",
|
||||
"collection": "posts" // Will fail - keys required
|
||||
}
|
||||
```
|
||||
|
||||
### Singleton Collections
|
||||
|
||||
**✅ GOOD - Singleton Read:**
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "read",
|
||||
"collection": "settings", // Singleton collection
|
||||
"query": { "fields": ["site_name", "logo"] }
|
||||
}
|
||||
```
|
||||
|
||||
**✅ GOOD - Singleton Update:**
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "update",
|
||||
"collection": "settings",
|
||||
"data": { "site_name": "New Name" } // No keys needed for singleton
|
||||
}
|
||||
```
|
||||
|
||||
## Advanced Relationship Patterns
|
||||
|
||||
### Many-to-One (M2O)
|
||||
|
||||
```json
|
||||
// Create with nested author
|
||||
{"title": "Article", "author": {"name": "New Author"}}
|
||||
// Link existing author
|
||||
{"title": "Article", "author": "existing-uuid"}
|
||||
// Remove relation
|
||||
{"author": null}
|
||||
```
|
||||
|
||||
### One-to-Many (O2M)
|
||||
|
||||
```json
|
||||
// Link to existing comments
|
||||
{"comments": [1, 5, 9]}
|
||||
// Create/update/delete operations
|
||||
{
|
||||
"comments": {
|
||||
"create": [{"text": "New comment"}],
|
||||
"update": [{"id": 5, "text": "Updated"}],
|
||||
"delete": [1, 9]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Many-to-Any (M2A)
|
||||
|
||||
```json
|
||||
{
|
||||
"sections": [
|
||||
{ "collection": "headings", "item": { "text": "Title", "level": 1 } },
|
||||
{ "collection": "paragraphs", "item": { "content": "Body text" } }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Translations
|
||||
|
||||
```json
|
||||
// Create with multiple languages
|
||||
{
|
||||
"title": "Main Title",
|
||||
"translations": [
|
||||
{"languages_code": "en-US", "title": "English Title", "content": "English content"},
|
||||
{"languages_code": "fr-FR", "title": "Titre Français", "content": "Contenu français"}
|
||||
]
|
||||
}
|
||||
|
||||
// Read specific language
|
||||
{
|
||||
"fields": ["title", "translations.title", "translations.content"],
|
||||
"deep": {
|
||||
"translations": {
|
||||
"_filter": {"languages_code": {"_eq": "fr-FR"}}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## LLM Decision Rules
|
||||
|
||||
### Schema-First Pattern (Critical)
|
||||
|
||||
1. **Always call `schema()` first** to discover collections
|
||||
2. **Examine specific schemas**: `schema(keys: ["collection"])` for field details based on users query
|
||||
3. **Follow relation chains**: Check `relation.related_collections` in field definitions when it's relevant to your task
|
||||
4. **IMPORTANT: Never guess field names** - Always use exact names from schema. If you're not 100% sure, ask the user
|
||||
|
||||
### Before Creating Items
|
||||
|
||||
1. **Check required fields** in schema
|
||||
2. **Validate field types** match schema definitions
|
||||
3. **Check for singleton collections** (`collection.singleton: true`)
|
||||
4. **Verify relation targets exist** before linking. You can also create new relation items by nesting
|
||||
|
||||
### Before Updating Items
|
||||
|
||||
1. **Use keys parameter** for bulk updates with same data
|
||||
2. **Use data array** for batch updates with different data per item
|
||||
3. **Include primary key in data** when using batch update
|
||||
4. **Check field permissions** - some fields may be read-only
|
||||
|
||||
### Before Deleting Items
|
||||
|
||||
1. **Always require explicit keys** - never delete by query alone
|
||||
2. **Check for related data** that might be affected
|
||||
3. **Validate cascade behavior** for relationships
|
||||
4. **Consider soft delete** if collection has status field
|
||||
|
||||
### Performance & Safety
|
||||
|
||||
- **Use `fields`** to minimize payload size
|
||||
- **Apply `limit`** for large result sets (default: no limit)
|
||||
- **Batch operations** are transactional - all succeed or all fail
|
||||
- **Primary keys returned** from create/update operations for chaining
|
||||
- **Validation errors** are thrown before database operations
|
||||
|
||||
### Error Patterns to Avoid
|
||||
|
||||
- Creating without checking required fields through `scehma(keys: ["collection"])`
|
||||
- Updating without keys or with invalid primary keys
|
||||
- Deleting system collections (directus\_\*)
|
||||
- Assuming field names without schema verification
|
||||
- Missing foreign key references in relations
|
||||
- Exceeding mutation limits in batch operations
|
||||
|
||||
### Singleton vs Regular Collections
|
||||
|
||||
**Regular Collections**: Require keys for update/delete, return arrays **Singleton Collections**: No keys needed, return
|
||||
single objects, auto-upsert behavior
|
||||
|
||||
## Functions & Aggregation
|
||||
|
||||
Date: `year(field)`, `month(field)`, `day(field)`, `hour(field)` Aggregate: `count`, `sum`, `avg`, `min`, `max`
|
||||
|
||||
```json
|
||||
{"filter": {"year(date_created)": {"_eq": 2024}}}
|
||||
{"aggregate": {"count": "*", "sum": "price"}, "groupBy": ["category"]}
|
||||
```
|
||||
|
||||
## Restrictions
|
||||
|
||||
- Cannot operate on `directus_*` collections
|
||||
- Respects user permissions/RBAC
|
||||
- Delete operations may be environment-disabled
|
||||
721
api/src/mcp/tools/prompts/operations.md
Normal file
721
api/src/mcp/tools/prompts/operations.md
Normal file
@@ -0,0 +1,721 @@
|
||||
Perform CRUD on Directus Operations within Flows. Operations are individual actions that execute sequentially in a flow,
|
||||
processing and transforming data through the data chain.
|
||||
|
||||
<key_concepts>
|
||||
|
||||
- **Operations** are the building blocks of flows
|
||||
- Each operation has a unique `key` that identifies it in the data chain
|
||||
- Operations connect via `resolve` (success) and `reject` (failure) paths
|
||||
- Data from each operation is stored under its key in the data chain </key_concepts>
|
||||
|
||||
<uuid_vs_keys>
|
||||
|
||||
## UUID vs Keys - Critical Distinction
|
||||
|
||||
**UUIDs** (System identifiers): `"abc-123-def-456"`
|
||||
|
||||
- Use in: `resolve`, `reject`, `flow`, operation `key` field in CRUD
|
||||
- Generated automatically when operations are created
|
||||
- Required for connecting operations
|
||||
|
||||
**Keys** (Human-readable names): `"send_email"`, `"check_status"`
|
||||
|
||||
- Use in: Data chain variables `{{ operation_key }}`
|
||||
- You define these when creating operations
|
||||
- Used to access operation results in subsequent operations </uuid_vs_keys>
|
||||
|
||||
<critical_syntax>
|
||||
|
||||
## Critical Syntax Rules - MEMORIZE THESE
|
||||
|
||||
**Condition Filters**: Use nested objects, NEVER dot notation
|
||||
|
||||
- ❌ `"$trigger.payload.status"`
|
||||
- ✅ `{"$trigger": {"payload": {"status": {"_eq": "published"}}}}`
|
||||
|
||||
**Request Headers**: Array of objects, NOT simple objects
|
||||
|
||||
- ❌ `{"Authorization": "Bearer token"}`
|
||||
- ✅ `[{"header": "Authorization", "value": "Bearer token"}]`
|
||||
|
||||
**Request Body**: Stringified JSON, NOT native objects
|
||||
|
||||
- ❌ `"body": {"data": "value"}`
|
||||
- ✅ `"body": "{\"data\": \"value\"}"`
|
||||
|
||||
**Data Chain Variables**: Use operation keys, avoid `$last`
|
||||
|
||||
- ❌ `{{ $last }}` (breaks when reordered)
|
||||
- ✅ `{{ operation_key }}` (reliable) </critical_syntax>
|
||||
|
||||
<required_fields>
|
||||
|
||||
## Required Fields Summary
|
||||
|
||||
**All Operations:**
|
||||
|
||||
- `flow` - UUID of parent flow
|
||||
- `key` - Unique operation identifier
|
||||
- `type` - Operation type
|
||||
- `position_x`, `position_y` - Grid coordinates
|
||||
- `resolve`, `reject` - Next operation UUIDs (null initially) </required_fields>
|
||||
|
||||
<available_operations> Core operations available in Directus:
|
||||
|
||||
- **condition** - Evaluate filter rules to determine execution path
|
||||
- **exec** - Execute custom JavaScript/TypeScript code in sandboxed environment
|
||||
- **item-create** - Create items in a collection
|
||||
- **item-read** - Retrieve items from a collection
|
||||
- **item-update** - Update existing items in a collection
|
||||
- **item-delete** - Remove items from a collection
|
||||
- **json-web-token** - Sign, verify, or decode JWT tokens
|
||||
- **log** - Output debug messages to console
|
||||
- **mail** - Send email notifications with templates
|
||||
- **notification** - Send in-app notifications to users
|
||||
- **request** - Make HTTP requests to external services
|
||||
- **sleep** - Delay execution for specified time
|
||||
- **throw-error** - Throw custom errors to stop flow execution
|
||||
- **transform** - Create custom JSON payloads
|
||||
- **trigger** - Execute another flow with iteration modes
|
||||
|
||||
If user has installed extensions from the Directus Marketplace, there may be more operations available than this. You
|
||||
can read existing operations to see if they are using extensions operations. </available_operations>
|
||||
|
||||
<crud_actions>
|
||||
|
||||
### `read` - List Flow Operations
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "read",
|
||||
"query": {
|
||||
"fields": ["id", "name", "key", "type", "flow", "resolve", "reject"],
|
||||
"filter": { "flow": { "_eq": "flow-uuid" } },
|
||||
"sort": ["position_x", "position_y"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### `create` - Add Operation to Flow
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "create",
|
||||
"data": {
|
||||
"flow": "flow-uuid", // Required: Flow this operation belongs to
|
||||
"key": "notify_user", // Required: Unique key for this operation
|
||||
"type": "notification", // Required: Operation type
|
||||
"name": "Send Notification", // Optional: Display name
|
||||
"position_x": 19, // Required: Grid X position (use 19, 37, 55, 73...)
|
||||
"position_y": 1, // Required: Grid Y position (use 1, 19, 37...)
|
||||
"options": {
|
||||
// Optional: Type-specific configuration (default: {})
|
||||
// Configuration based on operation type
|
||||
},
|
||||
"resolve": null, // Required: UUID of next operation on success (null initially)
|
||||
"reject": null // Required: UUID of next operation on failure (null initially)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### `update` - Modify Operation
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "update",
|
||||
"key": "operation-uuid",
|
||||
"data": {
|
||||
"options": {
|
||||
// Updated configuration
|
||||
},
|
||||
"resolve": "operation-uuid-here"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### `delete` - Remove Operation
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "delete",
|
||||
"key": "operation-uuid"
|
||||
}
|
||||
```
|
||||
|
||||
</crud_actions>
|
||||
|
||||
<workflow_creation>
|
||||
|
||||
## Workflow Creation Process - **ESSENTIAL READING**
|
||||
|
||||
**⚠️ CRITICAL**: Follow this exact order or operations will fail
|
||||
|
||||
<workflow_steps>
|
||||
|
||||
### Step-by-Step Process:
|
||||
|
||||
1. **Create the flow** using the `flows` tool
|
||||
2. **Create all operations** with null resolve/reject initially
|
||||
3. **Link operations together** using the UUIDs returned from step 2
|
||||
4. **Update the flow** to set the first operation as the entry point
|
||||
|
||||
### Why This Order Matters:
|
||||
|
||||
- Operations must exist before they can be referenced in resolve/reject fields
|
||||
- UUIDs are only available after operations are created
|
||||
- The flow needs at least one operation created before setting its entry point </workflow_steps>
|
||||
|
||||
<workflow_example>
|
||||
|
||||
### Complete Workflow Example:
|
||||
|
||||
```json
|
||||
// Step 1: Create the flow first (using flows tool)
|
||||
{
|
||||
"action": "create",
|
||||
"data": {
|
||||
"name": "Email on Post Published",
|
||||
"trigger": "event",
|
||||
"options": {
|
||||
"type": "action",
|
||||
"scope": ["items.create"],
|
||||
"collections": ["posts"]
|
||||
}
|
||||
}
|
||||
}
|
||||
// Returns: {"id": "flow-uuid-123", ...}
|
||||
|
||||
// Step 2: Create operations with null connections initially
|
||||
{"action": "create", "data": {
|
||||
"flow": "flow-uuid-123",
|
||||
"key": "check_status",
|
||||
"type": "condition",
|
||||
"position_x": 19, // First operation position
|
||||
"position_y": 1,
|
||||
"options": {
|
||||
"filter": {
|
||||
"$trigger": {
|
||||
"payload": {
|
||||
"status": {"_eq": "published"}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"resolve": null, // Set to null initially
|
||||
"reject": null // Set to null initially
|
||||
}}
|
||||
// Returns: {"id": "condition-uuid-456", ...}
|
||||
|
||||
{"action": "create", "data": {
|
||||
"flow": "flow-uuid-123",
|
||||
"key": "send_email",
|
||||
"type": "mail",
|
||||
"position_x": 37, // Second operation position
|
||||
"position_y": 1,
|
||||
"options": {
|
||||
"to": ["admin@example.com"],
|
||||
"subject": "New post published",
|
||||
"body": "Post '{{$trigger.payload.title}}' was published"
|
||||
},
|
||||
"resolve": null,
|
||||
"reject": null
|
||||
}}
|
||||
// Returns: {"id": "email-uuid-789", ...}
|
||||
|
||||
// Step 3: Connect operations using UUIDs (NOT keys)
|
||||
{"action": "update", "key": "condition-uuid-456", "data": {
|
||||
"resolve": "email-uuid-789", // Use UUID from step 2
|
||||
"reject": null // No error handling operation
|
||||
}}
|
||||
|
||||
// Step 4: Update flow to set first operation (using flows tool)
|
||||
{"action": "update", "key": "flow-uuid-123", "data": {
|
||||
"operation": "condition-uuid-456" // First operation UUID
|
||||
}}
|
||||
```
|
||||
|
||||
</workflow_example> </workflow_creation>
|
||||
|
||||
<positioning_system>
|
||||
|
||||
## Grid-Based Positioning - **ALWAYS SET POSITIONS**
|
||||
|
||||
**Grid Rules:**
|
||||
|
||||
- Each operation: 14x14 grid units
|
||||
- Standard spacing: 18 units (19, 37, 55, 73...)
|
||||
- Vertical start: `position_y: 1`
|
||||
- Never use (0,0) - operations will overlap
|
||||
|
||||
**Common Patterns:**
|
||||
|
||||
```json
|
||||
// Linear flow
|
||||
{"position_x": 19, "position_y": 1} // First
|
||||
{"position_x": 37, "position_y": 1} // Second
|
||||
{"position_x": 55, "position_y": 1} // Third
|
||||
|
||||
// Branching (success/error)
|
||||
{"position_x": 19, "position_y": 1} // Main
|
||||
{"position_x": 37, "position_y": 1} // Success (same row)
|
||||
{"position_x": 37, "position_y": 19} // Error (lower row)
|
||||
```
|
||||
|
||||
</positioning_system>
|
||||
|
||||
<operation_examples> <condition> Evaluates filter rules to determine path
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "condition",
|
||||
"options": {
|
||||
"filter": {
|
||||
"$trigger": {
|
||||
"payload": {
|
||||
"status": { "_eq": "published" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
<filter_examples>
|
||||
|
||||
```json
|
||||
// Check if field exists
|
||||
{
|
||||
"filter": {
|
||||
"$trigger": {
|
||||
"payload": {
|
||||
"website": {"_nnull": true}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Multiple conditions (AND) - CORRECTED SYNTAX
|
||||
{
|
||||
"filter": {
|
||||
"$trigger": {
|
||||
"payload": {
|
||||
"_and": [
|
||||
{"status": {"_eq": "published"}},
|
||||
{"featured": {"_eq": true}}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</filter_examples> </condition>
|
||||
|
||||
<item_operations> **Create Items:**
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "item-create",
|
||||
"options": {
|
||||
"collection": "notifications",
|
||||
"permissions": "$trigger",
|
||||
"emitEvents": true,
|
||||
"payload": {
|
||||
"title": "{{ $trigger.payload.title }}",
|
||||
"user": "{{ $accountability.user }}"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Read Items:**
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "item-read",
|
||||
"options": {
|
||||
"collection": "products",
|
||||
"permissions": "$full",
|
||||
"query": {
|
||||
"filter": { "status": { "_eq": "active" } },
|
||||
"limit": 10
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Update Items:**
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "item-update",
|
||||
"options": {
|
||||
"collection": "orders",
|
||||
"permissions": "$trigger",
|
||||
"emitEvents": true,
|
||||
"key": "{{ $trigger.payload.id }}",
|
||||
"payload": { "status": "processed" }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Delete Items:**
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "item-delete",
|
||||
"options": {
|
||||
"collection": "temp_data",
|
||||
"permissions": "$full",
|
||||
"key": ["{{ read_items[0].id }}"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</item_operations>
|
||||
|
||||
<exec>
|
||||
Execute custom JavaScript/TypeScript in isolated sandbox
|
||||
|
||||
**⚠️ SECURITY WARNING**: Scripts run sandboxed with NO file system or network access
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "exec",
|
||||
"options": {
|
||||
"code": "module.exports = async function(data) {\n // Validate input\n if (!data.$trigger.payload.value) {\n throw new Error('Missing required value');\n }\n \n // Process data\n const result = data.$trigger.payload.value * 2;\n \n // Return must be valid JSON\n return {\n result: result,\n processed: true\n };\n}"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Common Use Cases**: Data transformation, calculations, complex logic, formatting, extracting nested values </exec>
|
||||
|
||||
<mail>
|
||||
Send email notifications with optional templates
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "mail",
|
||||
"options": {
|
||||
"to": ["user@example.com", "{{ $trigger.payload.email }}"],
|
||||
"subject": "Order Confirmation",
|
||||
"type": "markdown", // "markdown" (default), "wysiwyg", or "template"
|
||||
"body": "Your order {{ $trigger.payload.order_id }} has been confirmed.",
|
||||
"cc": ["cc@example.com"], // Optional
|
||||
"bcc": ["bcc@example.com"], // Optional
|
||||
"replyTo": ["reply@example.com"] // Optional
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Template Mode:**
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "mail",
|
||||
"options": {
|
||||
"to": ["{{ $trigger.payload.email }}"],
|
||||
"subject": "Welcome!",
|
||||
"type": "template",
|
||||
"template": "welcome-email", // Template name (default: "base")
|
||||
"data": {
|
||||
"username": "{{ $trigger.payload.name }}",
|
||||
"activation_url": "https://example.com/activate/{{ $trigger.payload.token }}"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</mail>
|
||||
|
||||
<notification>
|
||||
Send in-app notifications to users
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "notification",
|
||||
"options": {
|
||||
"recipient": ["{{ $accountability.user }}"], // User ID(s) to notify
|
||||
"subject": "Task Complete",
|
||||
"message": "Your export is ready for download",
|
||||
"permissions": "$trigger",
|
||||
"collection": "exports", // Optional: Related collection
|
||||
"item": "{{ create_export.id }}" // Optional: Related item ID
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</notification>
|
||||
|
||||
<request>
|
||||
Make HTTP requests
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "request",
|
||||
"options": {
|
||||
"method": "POST",
|
||||
"url": "https://api.example.com/webhook",
|
||||
"headers": [
|
||||
{
|
||||
"header": "Authorization",
|
||||
"value": "Bearer {{ $env.API_TOKEN }}"
|
||||
},
|
||||
{
|
||||
"header": "Content-Type",
|
||||
"value": "application/json"
|
||||
}
|
||||
],
|
||||
"body": "{\"data\": \"{{ process_data }}\", \"timestamp\": \"{{ $trigger.timestamp }}\"}"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Real Example (Netlify Deploy Hook)**:
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "request",
|
||||
"options": {
|
||||
"method": "POST",
|
||||
"url": "https://api.netlify.com/build_hooks/your-hook-id",
|
||||
"headers": [
|
||||
{
|
||||
"header": "User-Agent",
|
||||
"value": "Directus-Flow/1.0"
|
||||
}
|
||||
],
|
||||
"body": "{\"trigger\": \"content_updated\", \"item_id\": \"{{ $trigger.payload.id }}\"}"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</request>
|
||||
|
||||
<json_web_token> Sign, verify, or decode JWT tokens - **CONSOLIDATED EXAMPLE**
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "json-web-token",
|
||||
"options": {
|
||||
"operation": "sign", // "sign", "verify", or "decode"
|
||||
|
||||
// For SIGN operations:
|
||||
"payload": {
|
||||
"userId": "{{ $trigger.payload.user }}",
|
||||
"role": "{{ $trigger.payload.role }}"
|
||||
},
|
||||
"secret": "{{ $env.JWT_SECRET }}",
|
||||
"options": {
|
||||
"expiresIn": "1h",
|
||||
"algorithm": "HS256"
|
||||
},
|
||||
|
||||
// For VERIFY/DECODE operations:
|
||||
"token": "{{ $trigger.payload.token }}"
|
||||
// "secret": "{{ $env.JWT_SECRET }}", // Required for verify, not for decode
|
||||
// "options": {"algorithms": ["HS256"]}, // For verify
|
||||
// "options": {"complete": true} // For decode
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</json_web_token>
|
||||
|
||||
<other_operations> **Transform JSON:**
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "transform",
|
||||
"options": {
|
||||
"json": {
|
||||
"combined": {
|
||||
"user": "{{ $accountability.user }}",
|
||||
"items": "{{ read_items }}",
|
||||
"timestamp": "{{ $trigger.timestamp }}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Trigger Flow:**
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "trigger",
|
||||
"options": {
|
||||
"flow": "other-flow-uuid",
|
||||
"payload": { "data": "{{ transform_result }}" },
|
||||
"iterationMode": "parallel", // "parallel", "serial", "batch"
|
||||
"batchSize": 10 // Only for batch mode
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Sleep:**
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "sleep",
|
||||
"options": { "milliseconds": 5000 }
|
||||
}
|
||||
```
|
||||
|
||||
**Log:**
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "log",
|
||||
"options": { "message": "Processing item: {{ $trigger.payload.id }}" }
|
||||
}
|
||||
```
|
||||
|
||||
**Throw Error:**
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "throw-error",
|
||||
"options": {
|
||||
"code": "CUSTOM_ERROR",
|
||||
"status": "400",
|
||||
"message": "Invalid data: {{ $trigger.payload.error_details }}"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</other_operations> </operation_examples>
|
||||
|
||||
<data_chain_variables> Use `{{ variable }}` syntax to access data:
|
||||
|
||||
- `{{ $trigger.payload }}` - Trigger data
|
||||
- `{{ $accountability.user }}` - User context
|
||||
- `{{ operation_key }}` - Result from specific operation (recommended)
|
||||
- `{{ operation_key.field }}` - Specific field from operation result
|
||||
|
||||
**⚠️ Avoid `$last`:** While `{{ $last }}` references the previous operation's result, avoid using it in production
|
||||
flows. If you reorder operations, `$last` will reference a different operation, potentially breaking your flow. Always
|
||||
use specific operation keys like `{{ operation_key }}` for reliable, maintainable flows. </data_chain_variables>
|
||||
|
||||
<permission_options> For operations that support permissions:
|
||||
|
||||
- `$trigger` - Use permissions from the triggering context (default)
|
||||
- `$public` - Use public role permissions
|
||||
- `$full` - Use full system permissions
|
||||
- `role-uuid` - Use specific role's permissions </permission_options>
|
||||
|
||||
<real_world_patterns> <data_processing_pipeline>
|
||||
|
||||
### Data Processing Pipeline
|
||||
|
||||
Read → Transform → Update pattern:
|
||||
|
||||
```json
|
||||
// 1. Read with relations
|
||||
{
|
||||
"flow": "flow-uuid", "key": "invoice", "type": "item-read",
|
||||
"position_x": 19, "position_y": 1,
|
||||
"options": {
|
||||
"collection": "os_invoices",
|
||||
"key": ["{{$trigger.payload.invoice}}"],
|
||||
"query": {"fields": ["*", "line_items.*", "payments.*"]}
|
||||
},
|
||||
"resolve": "calc-operation-uuid"
|
||||
}
|
||||
// 2. Calculate totals
|
||||
{
|
||||
"flow": "flow-uuid", "key": "calculations", "type": "exec",
|
||||
"position_x": 37, "position_y": 1,
|
||||
"options": {
|
||||
"code": "module.exports = async function(data) {\n const invoice = data.invoice;\n const subtotal = invoice.line_items.reduce((sum, item) => sum + (item.price * item.quantity), 0);\n const tax = subtotal * 0.08;\n return { subtotal, tax, total: subtotal + tax };\n}"
|
||||
},
|
||||
"resolve": "update-operation-uuid"
|
||||
}
|
||||
// 3. Update with results
|
||||
{
|
||||
"flow": "flow-uuid", "key": "update_invoice", "type": "item-update",
|
||||
"position_x": 55, "position_y": 1,
|
||||
"options": {
|
||||
"collection": "os_invoices",
|
||||
"payload": "{{calculations}}",
|
||||
"key": ["{{$trigger.payload.invoice}}"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</data_processing_pipeline>
|
||||
|
||||
<error_handling_branching>
|
||||
|
||||
### Error Handling with Branching
|
||||
|
||||
```json
|
||||
// Main operation with error handling
|
||||
{
|
||||
"flow": "flow-uuid", "key": "main_operation", "type": "request",
|
||||
"position_x": 19, "position_y": 1,
|
||||
"resolve": "success-operation-uuid",
|
||||
"reject": "error-operation-uuid"
|
||||
}
|
||||
// Success path
|
||||
{
|
||||
"flow": "flow-uuid", "key": "success_notification", "type": "notification",
|
||||
"position_x": 37, "position_y": 1
|
||||
}
|
||||
// Error path (lower row)
|
||||
{
|
||||
"flow": "flow-uuid", "key": "error_log", "type": "log",
|
||||
"position_x": 37, "position_y": 19
|
||||
}
|
||||
```
|
||||
|
||||
</error_handling_branching> </real_world_patterns>
|
||||
|
||||
<common_mistakes>
|
||||
|
||||
1. **DO NOT** create operations without a flow - create flow first
|
||||
2. **DO NOT** use operation keys in resolve/reject - use UUIDs (see <workflow_example> above)
|
||||
3. **DO NOT** try to reference operations that do not exist yet
|
||||
4. **DO NOT** use duplicate keys within the same flow
|
||||
5. **DO NOT** create circular references in resolve/reject paths
|
||||
6. **DO NOT** forget to handle both success and failure paths
|
||||
7. **DO NOT** pass stringified JSON - use native objects (except request body)
|
||||
8. **DO NOT** leave operations at default position (0,0) - see <positioning_system> above
|
||||
9. **DO NOT** use dot notation in condition filters - see <critical_syntax> above
|
||||
10. **DO NOT** use wrong format for request operations - see <critical_syntax> above </common_mistakes>
|
||||
|
||||
<troubleshooting>
|
||||
<invalid_foreign_key>
|
||||
### "Invalid foreign key" Errors
|
||||
|
||||
This typically means you're trying to reference an operation that doesn't exist:
|
||||
|
||||
- Verify the operation UUID exists by reading operations for the flow
|
||||
- Check that you're using UUIDs (36 characters) not keys (short names)
|
||||
- Ensure operations are created before being referenced </invalid_foreign_key>
|
||||
|
||||
<operation_not_executing>
|
||||
|
||||
### Operation Not Executing
|
||||
|
||||
- Check the resolve/reject chain for breaks
|
||||
- Verify the first operation is set as the flow's `operation` field
|
||||
- Confirm all required operation options are provided </operation_not_executing>
|
||||
|
||||
<overlapping_operations>
|
||||
|
||||
### Overlapping Operations in Visual Editor
|
||||
|
||||
If operations appear stacked at (0,0) in the flow editor:
|
||||
|
||||
```json
|
||||
// Fix by updating each operation's position
|
||||
{"action": "update", "key": "operation-uuid", "data": {
|
||||
"position_x": 19, "position_y": 1
|
||||
}}
|
||||
{"action": "update", "key": "other-operation-uuid", "data": {
|
||||
"position_x": 37, "position_y": 1
|
||||
}}
|
||||
```
|
||||
|
||||
</overlapping_operations> </troubleshooting>
|
||||
386
api/src/mcp/tools/prompts/relations.md
Normal file
386
api/src/mcp/tools/prompts/relations.md
Normal file
@@ -0,0 +1,386 @@
|
||||
Create and manage relationships between Directus Collections.
|
||||
|
||||
<prerequisites>
|
||||
Before creating relations:
|
||||
✓ Collections must exist (use `collections` tool)
|
||||
✓ Fields must be created with correct types (use `fields` tool)
|
||||
✓ Junction collections must exist for M2M/M2A relationships
|
||||
✓ Optional system user fields (`user_created`/`user_updated`) require relations to `directus_users` (see `collections` tool `<system_fields>` section)
|
||||
</prerequisites>
|
||||
|
||||
<actions>
|
||||
- `create`: Establish relationship between collections
|
||||
- `read`: View existing relationships
|
||||
- `update`: Modify relationship settings (only schema.on_delete/on_update and meta can be updated)
|
||||
- `delete`: Remove relationships
|
||||
</actions>
|
||||
|
||||
<basic_relation> After creating relational fields, define the relationship:
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "create",
|
||||
"data": {
|
||||
"collection": "articles",
|
||||
"field": "author",
|
||||
"related_collection": "directus_users",
|
||||
"meta": { "sort_field": null },
|
||||
"schema": { "on_delete": "SET NULL" }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</basic_relation>
|
||||
|
||||
<relationship_types> <m2o_workflow>
|
||||
|
||||
### M2O (Many-to-One)
|
||||
|
||||
Multiple items in one collection relate to one item in another.
|
||||
|
||||
**Example**: Many articles → one author
|
||||
|
||||
**Complete M2O Workflow**:
|
||||
|
||||
1. **Add M2O field** to the "many" collection → Use `fields` tool with `type: "uuid"` and
|
||||
`interface: "select-dropdown-m2o"` (see `fields` tool `<relationship_fields>` M2O example)
|
||||
|
||||
2. **Create relation** (use `relations` tool):
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "create",
|
||||
"collection": "articles",
|
||||
"field": "author",
|
||||
"data": {
|
||||
"collection": "articles",
|
||||
"field": "author",
|
||||
"related_collection": "directus_users",
|
||||
"schema": { "on_delete": "SET NULL" }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</m2o_workflow>
|
||||
|
||||
<o2m_workflow>
|
||||
|
||||
### O2M (One-to-Many)
|
||||
|
||||
One item in a collection relates to many items in another collection.
|
||||
|
||||
**Example**: One author → many articles
|
||||
|
||||
**Complete O2M Workflow**:
|
||||
|
||||
1. **Add O2M field** to the "one" collection → Use `fields` tool with `type: "alias"`, `special: ["o2m"]`, and
|
||||
`interface: "list-o2m"`
|
||||
|
||||
2. **Create relation** connecting to existing M2O field (use `relations` tool):
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "create",
|
||||
"collection": "articles",
|
||||
"field": "author",
|
||||
"data": {
|
||||
"collection": "articles",
|
||||
"field": "author",
|
||||
"related_collection": "authors",
|
||||
"meta": {
|
||||
"one_field": "articles",
|
||||
"sort_field": null
|
||||
},
|
||||
"schema": {
|
||||
"on_delete": "SET NULL"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</o2m_workflow>
|
||||
|
||||
<m2m_workflow>
|
||||
|
||||
### M2M (Many-to-Many)
|
||||
|
||||
Items in both collections can relate to multiple items in the other.
|
||||
|
||||
**Example**: Articles ↔ Tags
|
||||
|
||||
**Complete M2M Workflow**:
|
||||
|
||||
1. **Create junction collection** → Use `collections` tool to create `article_tags` with UUID primary key
|
||||
|
||||
2. **Add alias fields** to both collections → Use `fields` tool with `type: "alias"`, `special: ["m2m"]`, and
|
||||
`interface: "list-m2m"` (see `fields` tool `<relationship_fields>` M2M example)
|
||||
|
||||
3. **Add junction fields** → Use `fields` tool to add `article_id` (UUID), `tag_id` (UUID), and optional `sort`
|
||||
(integer) fields to junction collection
|
||||
|
||||
4. **Create bidirectional relations** (use `relations` tool with CASCADE):
|
||||
|
||||
```json
|
||||
// First relation
|
||||
{
|
||||
"action": "create",
|
||||
"collection": "article_tags",
|
||||
"field": "article_id",
|
||||
"data": {
|
||||
"collection": "article_tags",
|
||||
"field": "article_id",
|
||||
"related_collection": "articles",
|
||||
"meta": {
|
||||
"one_field": "tags",
|
||||
"junction_field": "tag_id",
|
||||
"sort_field": "sort"
|
||||
},
|
||||
"schema": {"on_delete": "CASCADE"}
|
||||
}
|
||||
}
|
||||
|
||||
// Second relation
|
||||
{
|
||||
"action": "create",
|
||||
"collection": "article_tags",
|
||||
"field": "tag_id",
|
||||
"data": {
|
||||
"collection": "article_tags",
|
||||
"field": "tag_id",
|
||||
"related_collection": "tags",
|
||||
"meta": {
|
||||
"one_field": "articles",
|
||||
"junction_field": "article_id"
|
||||
},
|
||||
"schema": {"on_delete": "CASCADE"}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</m2m_workflow>
|
||||
|
||||
<m2a_workflow>
|
||||
|
||||
### M2A (Many-to-Any)
|
||||
|
||||
Items can relate to items from multiple different collections.
|
||||
|
||||
**Example**: Page blocks (hero, text, gallery)
|
||||
|
||||
**Complete M2A Workflow**:
|
||||
|
||||
1. **Create block collections** → Use `collections` tool to create each block type (e.g., `block_hero`, `block_text`,
|
||||
`block_gallery`) with UUID primary keys and specific fields
|
||||
|
||||
2. **Create junction collection** → Use `collections` tool to create `page_blocks` junction (hidden collection with UUID
|
||||
primary key)
|
||||
|
||||
3. **Add M2A field** → Use `fields` tool with `type: "alias"`, `special: ["m2a"]`, and `interface: "list-m2a"`
|
||||
|
||||
4. **Add junction fields** → Use `fields` tool to add `page` (UUID), `item` (string), `collection` (string), and `sort`
|
||||
(integer) fields to junction
|
||||
|
||||
5. **Create relations** (use `relations` tool):
|
||||
|
||||
```json
|
||||
// Item relation (polymorphic)
|
||||
{
|
||||
"action": "create",
|
||||
"collection": "page_blocks",
|
||||
"field": "item",
|
||||
"data": {
|
||||
"collection": "page_blocks",
|
||||
"field": "item",
|
||||
"related_collection": null,
|
||||
"meta": {
|
||||
"one_allowed_collections": ["block_hero", "block_text", "block_gallery"],
|
||||
"one_collection_field": "collection",
|
||||
"junction_field": "page_id"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Page relation
|
||||
{
|
||||
"action": "create",
|
||||
"collection": "page_blocks",
|
||||
"field": "page_id",
|
||||
"data": {
|
||||
"collection": "page_blocks",
|
||||
"field": "page_id",
|
||||
"related_collection": "pages",
|
||||
"meta": {
|
||||
"one_field": "blocks",
|
||||
"junction_field": "item",
|
||||
"sort_field": "sort"
|
||||
},
|
||||
"schema": {"on_delete": "CASCADE"}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</m2a_workflow>
|
||||
|
||||
<file_relationships>
|
||||
|
||||
### File/Files Relationships
|
||||
|
||||
**Single File (M2O)**:
|
||||
|
||||
1. **Add file field** → Use `fields` tool with `type: "uuid"`, `special: ["file"]`, and `interface: "file"`
|
||||
|
||||
2. **Create relation** (use `relations` tool):
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "create",
|
||||
"collection": "articles",
|
||||
"field": "cover_image",
|
||||
"data": {
|
||||
"collection": "articles",
|
||||
"field": "cover_image",
|
||||
"related_collection": "directus_files",
|
||||
"schema": { "on_delete": "SET NULL" }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Multiple Files (M2M)**:
|
||||
|
||||
**Complete Files Workflow**:
|
||||
|
||||
1. **Create junction collection** → Use `collections` tool to create junction with UUID primary key
|
||||
|
||||
2. **Add files field** to main collection → Use `fields` tool with `type: "alias"`, `special: ["files"]`, and
|
||||
`interface: "files"` (see `fields` tool `<relationship_fields>` Files example)
|
||||
|
||||
3. **Add junction fields** → Use `fields` tool to add hidden `article_id` and `directus_files_id` (both UUID) fields to
|
||||
junction
|
||||
|
||||
4. **Create relations** (use `relations` tool):
|
||||
|
||||
```json
|
||||
// Article relation
|
||||
{
|
||||
"action": "create",
|
||||
"collection": "article_images",
|
||||
"field": "article_id",
|
||||
"data": {
|
||||
"collection": "article_images",
|
||||
"field": "article_id",
|
||||
"related_collection": "articles",
|
||||
"meta": {
|
||||
"one_field": "images",
|
||||
"junction_field": "directus_files_id"
|
||||
},
|
||||
"schema": {"on_delete": "CASCADE"}
|
||||
}
|
||||
}
|
||||
|
||||
// File relation
|
||||
{
|
||||
"action": "create",
|
||||
"collection": "article_images",
|
||||
"field": "directus_files_id",
|
||||
"data": {
|
||||
"collection": "article_images",
|
||||
"field": "directus_files_id",
|
||||
"related_collection": "directus_files",
|
||||
"meta": {
|
||||
"junction_field": "article_id"
|
||||
},
|
||||
"schema": {"on_delete": "CASCADE"}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</file_relationships>
|
||||
|
||||
<translations_workflow>
|
||||
|
||||
### Translations
|
||||
|
||||
Special M2M relationship with `languages` collection.
|
||||
|
||||
**Complete Translations Workflow**:
|
||||
|
||||
1. **Ensure languages collection exists** (use `schema` tool to check)
|
||||
|
||||
2. **Create translations junction** → Use `collections` tool to create junction with UUID primary key
|
||||
|
||||
3. **Add translations field** → Use `fields` tool with `type: "alias"`, `special: ["translations"]`, and
|
||||
`interface: "translations"` (see `fields` tool `<relationship_fields>` Translations example)
|
||||
|
||||
4. **Configure junction fields and relations** → Follow M2M pattern with languages collection </translations_workflow>
|
||||
</relationship_types>
|
||||
|
||||
<relation_settings>
|
||||
|
||||
## Relation Settings
|
||||
|
||||
### Schema Options
|
||||
|
||||
- `on_delete`:
|
||||
- `CASCADE` (default for M2M) - Delete related items
|
||||
- `SET NULL` (default for M2O) - Set field to null
|
||||
- `NO ACTION` - Prevent deletion
|
||||
- `RESTRICT` - Prevent if related items exist
|
||||
- `SET DEFAULT` - Set to default value
|
||||
|
||||
- `on_update`:
|
||||
- Same options as `on_delete`
|
||||
|
||||
### Meta Options
|
||||
|
||||
- `one_field`: Field name in related collection (for O2M side)
|
||||
- `junction_field`: Opposite field in junction table
|
||||
- `sort_field`: Enable manual sorting (typically an integer field)
|
||||
- `one_deselect_action`: `nullify` or `delete`
|
||||
- `one_allowed_collections`: Array of collection names for M2A
|
||||
- `one_collection_field`: Field that stores collection name in M2A </relation_settings>
|
||||
|
||||
<common_patterns>
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Blog System
|
||||
|
||||
1. `articles` M2O `directus_users` (author)
|
||||
2. `articles` M2M `tags`
|
||||
3. `articles` M2O `directus_files` (cover_image)
|
||||
4. `articles` M2M `directus_files` (gallery)
|
||||
5. `articles` O2M `comments`
|
||||
6. `comments` M2O `directus_users` (author)
|
||||
|
||||
### E-commerce
|
||||
|
||||
1. `products` M2M `categories`
|
||||
2. `products` M2O `brands`
|
||||
3. `products` O2M `reviews`
|
||||
4. `products` M2M `directus_files` (gallery)
|
||||
5. `orders` O2M `order_items`
|
||||
6. `order_items` M2O `products`
|
||||
7. `orders` M2O `directus_users` (customer)
|
||||
8. `reviews` M2O `directus_users` (reviewer)
|
||||
|
||||
### Page Builder
|
||||
|
||||
- `pages` M2A `blocks` field (`page_blocks` junction collection)
|
||||
- Collections: `block_hero`, `block_text`, `block_gallery` </common_patterns>
|
||||
|
||||
<naming_conventions>
|
||||
|
||||
## Naming Conventions
|
||||
|
||||
- Junction collections: `{singular}_{plural}` (e.g., `product_categories`)
|
||||
- Junction fields: Singular form of related collection (e.g., `product_id`, `category_id`)
|
||||
- Alias fields: Plural form for many relations (e.g., `tags`, `categories`)
|
||||
- M2O fields: Singular form (e.g., `author`, `brand`) </naming_conventions>
|
||||
|
||||
<related_tools>
|
||||
|
||||
## Related Tools
|
||||
|
||||
- `collections`: Create collections and junctions first
|
||||
- `fields`: Add relational fields before creating relations
|
||||
- `schema`: View complete relationship structure </related_tools>
|
||||
130
api/src/mcp/tools/prompts/schema.md
Normal file
130
api/src/mcp/tools/prompts/schema.md
Normal file
@@ -0,0 +1,130 @@
|
||||
Retrieve essential Directus schema information to understand the data structure - collections, fields, and
|
||||
relationships. This is a **READ-ONLY discovery tool** designed to help you explore and comprehend existing schema. It is
|
||||
for schema exploration and understanding only. For schema modifications, use the dedicated `collections`, `fields`, and
|
||||
`relations` tools.
|
||||
|
||||
**Note**: This tool provides an extremly curated response optimized for LLM understanding, not the raw Directus API
|
||||
schema response.
|
||||
|
||||
## Operation Modes
|
||||
|
||||
### Discovery Mode (Default)
|
||||
|
||||
**Usage**: Call without parameters or with empty `keys` array
|
||||
|
||||
```json
|
||||
{}
|
||||
```
|
||||
|
||||
**Returns**: Lightweight schema overview
|
||||
|
||||
- `collections`: Alphabetically sorted array of real collection names (database tables)
|
||||
- `collection_folders`: Alphabetically sorted array of folder names (UI-only, not real tables). Distinct from file
|
||||
folders. They are used for grouping different collections together in the UI.
|
||||
- `notes`: Descriptions for both collections and folders (where available)
|
||||
|
||||
**Important**: Folders share the same namespace as collections. Before creating a new collection, check the `folders`
|
||||
array to avoid naming conflicts (e.g., can't create a 'website' collection if a 'website' folder exists).
|
||||
|
||||
**Sample Response**:
|
||||
|
||||
```json
|
||||
{
|
||||
"collections": ["categories", "contacts", "organizations", "pages", "posts", "products"],
|
||||
"collection_folders": ["content", "marketing", "website"],
|
||||
"notes": {
|
||||
"contacts": "People at the organizations you work with",
|
||||
"organizations": "Your clients and customers",
|
||||
"pages": "Static pages with page builder blocks",
|
||||
"posts": "Blog posts and articles",
|
||||
"content": "Content management folder"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Use case**: Initial exploration, getting oriented with available data structures
|
||||
|
||||
### Detailed Mode
|
||||
|
||||
**Usage**: Specify collections to examine
|
||||
|
||||
```json
|
||||
{ "keys": ["products", "categories", "users"] }
|
||||
```
|
||||
|
||||
**Returns**: Object of collections with field and relation details
|
||||
|
||||
- Field definitions (type, validation, defaults)
|
||||
- Relationship mappings (foreign keys, junction tables)
|
||||
- Interface configurations and display options
|
||||
- Field metadata and constraints
|
||||
- **Nested field structures** for JSON fields with repeaters/lists (includes recursive nesting)
|
||||
|
||||
**Sample Response**:
|
||||
|
||||
```json
|
||||
{
|
||||
"posts": {
|
||||
"id": {
|
||||
"type": "uuid",
|
||||
"primary_key": true,
|
||||
"readonly": true
|
||||
},
|
||||
"title": {
|
||||
"type": "string",
|
||||
"required": true
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"interface": {
|
||||
"type": "select-dropdown",
|
||||
"choices": ["draft", "published", "archived"]
|
||||
}
|
||||
},
|
||||
"category": {
|
||||
"type": "uuid",
|
||||
"relation": {
|
||||
"type": "m2o",
|
||||
"related_collections": ["categories"]
|
||||
}
|
||||
}
|
||||
},
|
||||
"block_faqs": {
|
||||
"headline": {
|
||||
"type": "text",
|
||||
"interface": {
|
||||
"type": "input-rich-text-html"
|
||||
}
|
||||
},
|
||||
"faqs": {
|
||||
"type": "json",
|
||||
"interface": {
|
||||
"type": "list"
|
||||
},
|
||||
"fields": {
|
||||
"title": {
|
||||
"type": "text",
|
||||
"interface": {
|
||||
"type": "input-multiline"
|
||||
}
|
||||
},
|
||||
"answer": {
|
||||
"type": "text",
|
||||
"interface": {
|
||||
"type": "input-multiline"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Use case**: Deep-dive analysis of specific collections before working with data or schema.
|
||||
|
||||
## Recommended Workflow
|
||||
|
||||
1. **Discover**: Call without `keys` to see all available collections
|
||||
2. **Analyze**: Based on user requirements, identify relevant collections
|
||||
3. **Detail**: Call with specific collection names to understand field structures
|
||||
4. **Implement**: Use appropriate CRUD tools with schema knowledge
|
||||
1
api/src/mcp/tools/prompts/system-prompt-description.md
Normal file
1
api/src/mcp/tools/prompts/system-prompt-description.md
Normal file
@@ -0,0 +1 @@
|
||||
IMPORTANT! Always call this tool first. It will retrieve important information about your role.
|
||||
44
api/src/mcp/tools/prompts/system-prompt.md
Normal file
44
api/src/mcp/tools/prompts/system-prompt.md
Normal file
@@ -0,0 +1,44 @@
|
||||
You are **Directus Assistant**, an expert in Directus CMS with direct access to a Directus instance through specialized
|
||||
tools.
|
||||
|
||||
## Core Expertise
|
||||
|
||||
- **Content Specialist**: Content management, editing, and optimization
|
||||
- **Schema Architect**: Database design, relationships, and data modeling
|
||||
- **Automation Expert**: Flows, webhooks, and workflow configuration
|
||||
- **API Integration**: REST/GraphQL patterns and system integration
|
||||
|
||||
## Communication Style
|
||||
|
||||
- **Be concise**: Users prefer short, direct responses. One-line confirmations: "Created collection 'products'"
|
||||
- **Match the audience**: Technical for developers, plain language for content editors
|
||||
- **NEVER guess**: If not at least 99% about field values or user intent, ask for clarification
|
||||
|
||||
## Critical Operations
|
||||
|
||||
### Schema & Data Changes
|
||||
|
||||
- **Confirm before modifying**: Collections, fields, relations always need approval from the user.
|
||||
- **Check namespace conflicts**: Collection folders and regular collections share namespace. Collection folders are
|
||||
distinct from file folders. Collection folders are just collection entries without a corresponding table in the
|
||||
database used for grouping.
|
||||
- **Respect workflows**: Check draft/published states before modifications
|
||||
|
||||
### Safety Rules
|
||||
|
||||
- **Deletions require confirmation**: ALWAYS ask before deleting anything
|
||||
- **Warn on bulk operations**: Alert when affecting many items ("This updates 500 items")
|
||||
- **Avoid duplicates**: Never create duplicates if you can't modify existing items
|
||||
- **Use semantic HTML**: No classes, IDs, or inline styles in content fields (unless explictly asked for by the user)
|
||||
|
||||
### Error Recovery
|
||||
|
||||
- **Auto-fix clear errors**: Retry once for obvious issues like "field X required"
|
||||
- **Stop after 2 attempts**: Consult user if errors persist or are unclear
|
||||
- **Optimize queries**: Use `fields` param to minimize overfetching and pagination for large datasets
|
||||
|
||||
## Workflow
|
||||
|
||||
1. Start with `schema()` to discover collections
|
||||
2. Use `schema(keys: ["collection_name"])` for field details relevant to the user task
|
||||
3. Perform operations based on user needs and permissions
|
||||
214
api/src/mcp/tools/prompts/trigger-flow.md
Normal file
214
api/src/mcp/tools/prompts/trigger-flow.md
Normal file
@@ -0,0 +1,214 @@
|
||||
# Directus Trigger Flow Tool
|
||||
|
||||
Execute flows programmatically. This tool allows you to trigger manual flows, pass data to flows, and chain flows
|
||||
together for complex automation.
|
||||
|
||||
## 🔑 Key Concepts
|
||||
|
||||
- **Prerequisite**: ALWAYS read the flow first using `flows` tool to get the full definition
|
||||
- **Manual Flows**: Flows with `trigger: "manual"` are designed to be triggered via UI or this tool
|
||||
- **Flow Chaining**: Any flow can be triggered, receiving data in `$trigger.body`
|
||||
- **Validation**: Tool validates collection support, required fields, and selection requirements
|
||||
|
||||
## Required Parameters
|
||||
|
||||
```json
|
||||
{
|
||||
"flowDefinition": {}, // FULL flow object from flows.read
|
||||
"flowId": "uuid", // Flow ID to trigger
|
||||
"collection": "name", // Collection context
|
||||
"keys": ["id1"], // Item IDs (required if flow needs selection)
|
||||
"method": "POST", // GET or POST (default: GET)
|
||||
"data": {}, // Optional payload data
|
||||
"query": {}, // Optional query parameters
|
||||
"headers": {} // Optional headers
|
||||
}
|
||||
```
|
||||
|
||||
## 📋 Flow Types & Requirements
|
||||
|
||||
### Manual Flow with Selection
|
||||
|
||||
For flows with `requireSelection: true` or undefined:
|
||||
|
||||
```json
|
||||
{
|
||||
"flowDefinition": {
|
||||
"id": "abc-123",
|
||||
"trigger": "manual",
|
||||
"options": {
|
||||
"collections": ["products", "orders"],
|
||||
"requireSelection": true,
|
||||
"fields": [
|
||||
{
|
||||
"field": "reason",
|
||||
"name": "Reason",
|
||||
"meta": { "required": true }
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"flowId": "abc-123",
|
||||
"collection": "products",
|
||||
"keys": ["prod-1", "prod-2"], // REQUIRED
|
||||
"data": {
|
||||
"reason": "Bulk update" // Required field
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Manual Flow without Selection
|
||||
|
||||
For flows with `requireSelection: false`:
|
||||
|
||||
```json
|
||||
{
|
||||
"flowDefinition": {
|
||||
"id": "xyz-456",
|
||||
"trigger": "manual",
|
||||
"options": {
|
||||
"collections": ["reports"],
|
||||
"requireSelection": false
|
||||
}
|
||||
},
|
||||
"flowId": "xyz-456",
|
||||
"collection": "reports",
|
||||
"keys": [], // Optional when requireSelection: false
|
||||
"data": {
|
||||
"type": "monthly"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Webhook/Operation Flows
|
||||
|
||||
Flows with webhook or operation triggers:
|
||||
|
||||
```json
|
||||
{
|
||||
"flowDefinition": {
|
||||
"id": "webhook-flow",
|
||||
"trigger": "webhook",
|
||||
"options": {
|
||||
"collections": ["*"] // or specific collections
|
||||
}
|
||||
},
|
||||
"flowId": "webhook-flow",
|
||||
"collection": "any_collection",
|
||||
"method": "POST",
|
||||
"data": {
|
||||
"custom": "payload"
|
||||
},
|
||||
"headers": {
|
||||
"X-Custom-Header": "value"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## 🔄 Validation Rules
|
||||
|
||||
The tool validates:
|
||||
|
||||
1. **Flow Definition**: Must provide complete flow object
|
||||
2. **Flow ID Match**: Definition ID must match flowId parameter
|
||||
3. **Collection Support**: Collection must be in flow's collections array
|
||||
4. **Selection Required**: Keys required when `requireSelection !== false`
|
||||
5. **Required Fields**: All required fields must be present in data
|
||||
|
||||
## ⚡ Common Workflows
|
||||
|
||||
### 1. Export Selected Items
|
||||
|
||||
```json
|
||||
// Step 1: Get flow definition
|
||||
flows.read({ filter: { name: { _eq: "Export Items" }}})
|
||||
|
||||
// Step 2: Trigger with selection
|
||||
{
|
||||
"flowDefinition": { /* from step 1 */ },
|
||||
"flowId": "export-flow-id",
|
||||
"collection": "products",
|
||||
"keys": ["1", "2", "3"],
|
||||
"data": {
|
||||
"format": "csv",
|
||||
"email": "user@example.com"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Process Batch Without Selection
|
||||
|
||||
```json
|
||||
// For flows that process all items
|
||||
{
|
||||
"flowDefinition": {
|
||||
/* flow with requireSelection: false */
|
||||
},
|
||||
"flowId": "batch-process",
|
||||
"collection": "orders",
|
||||
"keys": [], // Empty when not required
|
||||
"data": {
|
||||
"status": "pending",
|
||||
"date_range": "last_30_days"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Chain Flows Together
|
||||
|
||||
```json
|
||||
// Trigger a flow from another flow
|
||||
{
|
||||
"flowDefinition": {
|
||||
/* operation trigger flow */
|
||||
},
|
||||
"flowId": "child-flow",
|
||||
"collection": "notifications",
|
||||
"data": {
|
||||
"parent_result": "{{ $last }}", // Data from parent flow
|
||||
"step": 2
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## 📊 Data Access in Triggered Flow
|
||||
|
||||
The triggered flow receives:
|
||||
|
||||
- `$trigger.body` - The `data` parameter you send
|
||||
- `$trigger.query` - The `query` parameter
|
||||
- `$trigger.headers` - The `headers` parameter
|
||||
- `$trigger.collection` - The collection context
|
||||
- `$trigger.keys` - The selected item IDs
|
||||
- `$accountability` - User/permission context
|
||||
|
||||
## ⚠️ Important Notes
|
||||
|
||||
- **Read First**: ALWAYS read the flow definition before triggering
|
||||
- **Collection Wildcard**: `"*"` in collections means any collection is accepted
|
||||
- **Required Fields**: Check `options.fields` for required inputs
|
||||
- **For-Each**: If you pass an array, flow runs once per array item
|
||||
- **Response**: Flow can return data via its trigger's `return` option
|
||||
- **Permissions**: Respects flow's `accountability` setting
|
||||
|
||||
## 🚨 Common Mistakes to Avoid
|
||||
|
||||
1. **Don't** trigger without reading flow definition first
|
||||
2. **Don't** omit keys when flow requires selection
|
||||
3. **Don't** ignore required fields in flow configuration
|
||||
4. **Don't** use wrong collection not in flow's collections list
|
||||
5. **Don't** assume `requireSelection` - check explicitly
|
||||
6. **Don't** pass stringified JSON - use native objects
|
||||
|
||||
## Decision Tree
|
||||
|
||||
```
|
||||
1. Read flow definition using flows tool
|
||||
2. Check trigger type:
|
||||
- manual → Check requireSelection
|
||||
- webhook/operation → Keys optional
|
||||
3. Validate collection in flow.options.collections
|
||||
4. If requireSelection !== false → keys required
|
||||
5. Check flow.options.fields for required data fields
|
||||
6. Trigger with all validated parameters
|
||||
```
|
||||
209
api/src/mcp/tools/relation.test.ts
Normal file
209
api/src/mcp/tools/relation.test.ts
Normal file
@@ -0,0 +1,209 @@
|
||||
import type { Accountability, Relation, SchemaOverview } from '@directus/types';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi, type MockedFunction } from 'vitest';
|
||||
import { RelationsService } from '../../services/relations.js';
|
||||
import { relations } from './relations.js';
|
||||
|
||||
vi.mock('../../services/relations.js');
|
||||
|
||||
vi.mock('../../utils/get-snapshot.js', () => ({
|
||||
getSnapshot: vi.fn(),
|
||||
}));
|
||||
|
||||
describe('relations tool ', () => {
|
||||
const mockSchema = { collections: {}, fields: {}, relations: {} } as unknown as SchemaOverview;
|
||||
const mockAccountability = { user: 'test-user', admin: true } as Accountability;
|
||||
const mockSanitizedQuery = { fields: ['*'] };
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('file operations', () => {
|
||||
let mockRelationsService: {
|
||||
createOne: MockedFunction<any>;
|
||||
readOne: MockedFunction<any>;
|
||||
readAll: MockedFunction<any>;
|
||||
updateOne: MockedFunction<any>;
|
||||
deleteOne: MockedFunction<any>;
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockRelationsService = {
|
||||
createOne: vi.fn(),
|
||||
readOne: vi.fn(),
|
||||
readAll: vi.fn(),
|
||||
updateOne: vi.fn(),
|
||||
deleteOne: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mocked(RelationsService).mockImplementation(() => mockRelationsService as unknown as RelationsService);
|
||||
});
|
||||
|
||||
describe('CREATE action', () => {
|
||||
test('should create a relation', async () => {
|
||||
const relationData = {
|
||||
collection: 'articles',
|
||||
field: 'category_id',
|
||||
related_collection: 'categories',
|
||||
} as Relation;
|
||||
|
||||
mockRelationsService.createOne.mockResolvedValue([1]);
|
||||
mockRelationsService.readOne.mockResolvedValue([relationData]);
|
||||
|
||||
const result = await relations.handler({
|
||||
args: {
|
||||
action: 'create',
|
||||
collection: 'articles',
|
||||
data: relationData,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(RelationsService).toHaveBeenCalledWith({
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
});
|
||||
|
||||
expect(result).toEqual({ type: 'text', data: [relationData] });
|
||||
});
|
||||
});
|
||||
|
||||
describe('READ action', () => {
|
||||
test('should read relation by field', async () => {
|
||||
const collection = 'articles';
|
||||
const field = 'category_id';
|
||||
const expectedRelations = { collection, field, related_collection: 'categories' };
|
||||
mockRelationsService.readOne.mockResolvedValue(expectedRelations);
|
||||
|
||||
const result = await relations.handler({
|
||||
args: {
|
||||
collection,
|
||||
field,
|
||||
action: 'read',
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockRelationsService.readOne).toHaveBeenCalledWith(collection, field);
|
||||
expect(mockRelationsService.readAll).not.toHaveBeenCalled();
|
||||
expect(result).toEqual({ type: 'text', data: expectedRelations });
|
||||
});
|
||||
|
||||
test('should read relations', async () => {
|
||||
const expectedRelations = [
|
||||
{ collection: 'articles', field: 'category_id', related_collection: 'categories' },
|
||||
{ collection: 'articles', field: 'author_id', related_collection: 'users' },
|
||||
];
|
||||
|
||||
mockRelationsService.readAll.mockResolvedValue(expectedRelations);
|
||||
|
||||
const result = await relations.handler({
|
||||
args: {
|
||||
collection: 'articles',
|
||||
action: 'read',
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockRelationsService.readAll).toHaveBeenCalled();
|
||||
expect(result).toEqual({ type: 'text', data: expectedRelations });
|
||||
});
|
||||
});
|
||||
|
||||
describe('UPDATE action', () => {
|
||||
test('should update relation by field', async () => {
|
||||
const collection = 'articles';
|
||||
const field = 'category_id';
|
||||
|
||||
const updateData = {
|
||||
meta: { one_field: 'updated_field' },
|
||||
} as Relation;
|
||||
|
||||
const expectedResult = {
|
||||
collection,
|
||||
field,
|
||||
related_collection: 'categories',
|
||||
meta: { one_field: 'updated_field' },
|
||||
};
|
||||
|
||||
mockRelationsService.readOne.mockResolvedValue(expectedResult);
|
||||
|
||||
const result = await relations.handler({
|
||||
args: {
|
||||
collection,
|
||||
field,
|
||||
action: 'update',
|
||||
data: updateData,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockRelationsService.updateOne).toHaveBeenCalledWith(collection, field, updateData);
|
||||
expect(result).toEqual({ type: 'text', data: expectedResult });
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE action', () => {
|
||||
test('should delete relation by collection + field', async () => {
|
||||
const collection = 'articles';
|
||||
const field = 'category_id';
|
||||
|
||||
const result = await relations.handler({
|
||||
args: {
|
||||
collection,
|
||||
field,
|
||||
action: 'delete',
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(mockRelationsService.deleteOne).toHaveBeenCalledWith(collection, field);
|
||||
expect(result).toEqual({ type: 'text', data: { collection, field } });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
test('should throw error for invalid action', async () => {
|
||||
await expect(
|
||||
relations.handler({
|
||||
args: {
|
||||
action: 'invalid' as any,
|
||||
},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
}),
|
||||
).rejects.toThrow('Invalid action.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('tool configuration', () => {
|
||||
test('should have correct tool name', () => {
|
||||
expect(relations.name).toBe('relations');
|
||||
});
|
||||
|
||||
test('should be admin tool', () => {
|
||||
expect(relations.admin).toBe(true);
|
||||
});
|
||||
|
||||
test('should have description', () => {
|
||||
expect(relations.description).toBeDefined();
|
||||
});
|
||||
|
||||
test('should have input and validation schemas', () => {
|
||||
expect(relations.inputSchema).toBeDefined();
|
||||
expect(relations.validateSchema).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
111
api/src/mcp/tools/relations.ts
Normal file
111
api/src/mcp/tools/relations.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import { InvalidPayloadError } from '@directus/errors';
|
||||
import type { Relation } from '@directus/types';
|
||||
import { z } from 'zod';
|
||||
import { RelationsService } from '../../services/relations.js';
|
||||
import { defineTool } from '../define.js';
|
||||
import {
|
||||
RelationItemInputSchema,
|
||||
RelationItemValidateCreateSchema,
|
||||
RelationItemValidateUpdateSchema,
|
||||
} from '../schema.js';
|
||||
import prompts from './prompts/index.js';
|
||||
|
||||
const RelationsValidateSchema = z.discriminatedUnion('action', [
|
||||
z.object({
|
||||
action: z.literal('create'),
|
||||
collection: z.string(),
|
||||
field: z.string().optional(),
|
||||
data: RelationItemValidateCreateSchema,
|
||||
}),
|
||||
z.object({
|
||||
action: z.literal('read'),
|
||||
collection: z.string().optional(),
|
||||
field: z.string().optional(),
|
||||
}),
|
||||
z.object({
|
||||
action: z.literal('update'),
|
||||
collection: z.string(),
|
||||
field: z.string(),
|
||||
data: RelationItemValidateUpdateSchema,
|
||||
}),
|
||||
z.object({
|
||||
action: z.literal('delete'),
|
||||
collection: z.string(),
|
||||
field: z.string(),
|
||||
}),
|
||||
]);
|
||||
|
||||
const RelationsInputSchema = z.object({
|
||||
action: z.enum(['create', 'read', 'update', 'delete']).describe('The operation to perform'),
|
||||
collection: z.string().describe('The name of the collection (required for create, update, delete)').optional(),
|
||||
field: z.string().describe('The name of the field (required for create, update, delete)').optional(),
|
||||
data: RelationItemInputSchema.optional().describe('The relation data. (required for create, update)'),
|
||||
});
|
||||
|
||||
export const relations = defineTool<z.infer<typeof RelationsValidateSchema>>({
|
||||
name: 'relations',
|
||||
admin: true,
|
||||
description: prompts.relations,
|
||||
annotations: {
|
||||
title: 'Directus - Relations',
|
||||
},
|
||||
inputSchema: RelationsInputSchema,
|
||||
validateSchema: RelationsValidateSchema,
|
||||
async handler({ args, schema, accountability }) {
|
||||
const service = new RelationsService({
|
||||
schema,
|
||||
accountability,
|
||||
});
|
||||
|
||||
if (args.action === 'create') {
|
||||
await service.createOne(args.data as Partial<Relation>);
|
||||
|
||||
const result = await service.readOne(args.collection, args.field || args.data.field);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result || null,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'read') {
|
||||
let result = null;
|
||||
|
||||
if (args.field && args.collection) {
|
||||
result = await service.readOne(args.collection, args.field);
|
||||
} else if (args.collection) {
|
||||
result = await service.readAll(args.collection);
|
||||
} else {
|
||||
result = await service.readAll();
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result || null,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'update') {
|
||||
await service.updateOne(args.collection, args.field, args.data as Partial<Relation>);
|
||||
|
||||
const result = await service.readOne(args.collection, args.field);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: result || null,
|
||||
};
|
||||
}
|
||||
|
||||
if (args.action === 'delete') {
|
||||
const { collection, field } = args;
|
||||
await service.deleteOne(collection, field);
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: { collection, field },
|
||||
};
|
||||
}
|
||||
|
||||
throw new InvalidPayloadError({ reason: 'Invalid action' });
|
||||
},
|
||||
});
|
||||
951
api/src/mcp/tools/schema.test.ts
Normal file
951
api/src/mcp/tools/schema.test.ts
Normal file
@@ -0,0 +1,951 @@
|
||||
import type { Accountability, SchemaOverview } from '@directus/types';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi, type MockedFunction } from 'vitest';
|
||||
import { CollectionsService } from '../../services/collections.js';
|
||||
import { FieldsService } from '../../services/fields.js';
|
||||
import { RelationsService } from '../../services/relations.js';
|
||||
import { schema } from './schema.js';
|
||||
|
||||
vi.mock('../../services/collections.js');
|
||||
vi.mock('../../services/fields.js');
|
||||
vi.mock('../../services/relations.js');
|
||||
|
||||
describe('schema tool', () => {
|
||||
const mockSchema = { collections: {}, fields: {}, relations: {} } as unknown as SchemaOverview;
|
||||
const mockAccountability = { user: 'test-user', admin: true } as Accountability;
|
||||
const mockSanitizedQuery = { fields: ['*'] };
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('overview', () => {
|
||||
let mockRelationsService: {
|
||||
readAll: MockedFunction<any>;
|
||||
};
|
||||
|
||||
let mockFieldsService: {
|
||||
readAll: MockedFunction<any>;
|
||||
};
|
||||
|
||||
let mockCollectionsService: {
|
||||
readByQuery: MockedFunction<any>;
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockRelationsService = {
|
||||
readAll: vi.fn(),
|
||||
};
|
||||
|
||||
mockFieldsService = {
|
||||
readAll: vi.fn(),
|
||||
};
|
||||
|
||||
mockCollectionsService = {
|
||||
readByQuery: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mocked(RelationsService).mockImplementation(() => mockRelationsService as unknown as RelationsService);
|
||||
vi.mocked(FieldsService).mockImplementation(() => mockFieldsService as unknown as FieldsService);
|
||||
vi.mocked(CollectionsService).mockImplementation(() => mockCollectionsService as unknown as CollectionsService);
|
||||
});
|
||||
|
||||
describe('LIGHTWEIGHT', () => {
|
||||
test.each([undefined, []])('should return collections and folders when no keys provided', async (keys) => {
|
||||
mockCollectionsService.readByQuery.mockResolvedValue([
|
||||
{
|
||||
collection: 'users',
|
||||
schema: { name: 'users' },
|
||||
meta: { note: 'User data' },
|
||||
},
|
||||
{
|
||||
collection: 'posts',
|
||||
schema: { name: 'posts' },
|
||||
meta: null,
|
||||
},
|
||||
{
|
||||
collection: 'folder1',
|
||||
schema: null,
|
||||
meta: { note: 'A folder' },
|
||||
},
|
||||
]);
|
||||
|
||||
mockFieldsService.readAll.mockResolvedValue([]);
|
||||
mockRelationsService.readAll.mockResolvedValue([]);
|
||||
|
||||
const result = await schema.handler({
|
||||
args: { keys },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: {
|
||||
collections: ['users', 'posts'],
|
||||
collection_folders: ['folder1'],
|
||||
notes: {
|
||||
users: 'User data',
|
||||
folder1: 'A folder',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle collections without notes', async () => {
|
||||
mockCollectionsService.readByQuery.mockResolvedValue([
|
||||
{
|
||||
collection: 'users',
|
||||
schema: { name: 'users' },
|
||||
meta: null,
|
||||
},
|
||||
]);
|
||||
|
||||
mockFieldsService.readAll.mockResolvedValue([]);
|
||||
mockRelationsService.readAll.mockResolvedValue([]);
|
||||
|
||||
const result = await schema.handler({
|
||||
args: {},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: {
|
||||
collections: ['users'],
|
||||
collection_folders: [],
|
||||
notes: {},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle folders', async () => {
|
||||
mockCollectionsService.readByQuery.mockResolvedValue([
|
||||
{
|
||||
collection: 'my_folder',
|
||||
schema: null,
|
||||
meta: null,
|
||||
},
|
||||
]);
|
||||
|
||||
mockFieldsService.readAll.mockResolvedValue([]);
|
||||
mockRelationsService.readAll.mockResolvedValue([]);
|
||||
|
||||
const result = await schema.handler({
|
||||
args: {},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: {
|
||||
collections: [],
|
||||
collection_folders: ['my_folder'],
|
||||
notes: {},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle empty collections array', async () => {
|
||||
mockCollectionsService.readByQuery.mockResolvedValue([]);
|
||||
mockFieldsService.readAll.mockResolvedValue([]);
|
||||
mockRelationsService.readAll.mockResolvedValue([]);
|
||||
|
||||
const result = await schema.handler({
|
||||
args: {},
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: {
|
||||
collections: [],
|
||||
collection_folders: [],
|
||||
notes: {},
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('DETAILED', () => {
|
||||
test('should return detailed field information for requested collections', async () => {
|
||||
mockCollectionsService.readByQuery.mockResolvedValue([]);
|
||||
|
||||
mockFieldsService.readAll.mockResolvedValue([
|
||||
{
|
||||
collection: 'users',
|
||||
field: 'id',
|
||||
type: 'integer',
|
||||
schema: { is_primary_key: true },
|
||||
meta: {
|
||||
required: true,
|
||||
readonly: false,
|
||||
note: 'Primary key',
|
||||
interface: 'input',
|
||||
options: null,
|
||||
special: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
collection: 'users',
|
||||
field: 'name',
|
||||
type: 'string',
|
||||
schema: { is_primary_key: false },
|
||||
meta: {
|
||||
required: false,
|
||||
readonly: false,
|
||||
note: null,
|
||||
interface: 'input',
|
||||
options: null,
|
||||
special: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
collection: 'posts',
|
||||
field: 'title',
|
||||
type: 'string',
|
||||
schema: { is_primary_key: false },
|
||||
meta: {
|
||||
required: true,
|
||||
readonly: false,
|
||||
note: null,
|
||||
interface: 'input',
|
||||
options: null,
|
||||
special: null,
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
mockRelationsService.readAll.mockResolvedValue([]);
|
||||
|
||||
const result = await schema.handler({
|
||||
args: { keys: ['users'] },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: {
|
||||
users: {
|
||||
id: {
|
||||
type: 'integer',
|
||||
primary_key: true,
|
||||
required: true,
|
||||
note: 'Primary key',
|
||||
interface: {
|
||||
type: 'input',
|
||||
},
|
||||
},
|
||||
name: {
|
||||
type: 'string',
|
||||
interface: {
|
||||
type: 'input',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('should skip UI-only alias fields', async () => {
|
||||
mockCollectionsService.readByQuery.mockResolvedValue([]);
|
||||
|
||||
mockFieldsService.readAll.mockResolvedValue([
|
||||
{
|
||||
collection: 'users',
|
||||
field: 'id',
|
||||
type: 'integer',
|
||||
schema: { is_primary_key: true },
|
||||
meta: {
|
||||
required: true,
|
||||
readonly: false,
|
||||
note: null,
|
||||
interface: 'input',
|
||||
options: null,
|
||||
special: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
collection: 'users',
|
||||
field: 'ui_field',
|
||||
type: 'alias',
|
||||
schema: null,
|
||||
meta: {
|
||||
required: false,
|
||||
readonly: false,
|
||||
note: null,
|
||||
interface: 'presentation-divider',
|
||||
options: null,
|
||||
special: ['no-data'],
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
mockRelationsService.readAll.mockResolvedValue([]);
|
||||
|
||||
const result = await schema.handler({
|
||||
args: { keys: ['users'] },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: {
|
||||
users: {
|
||||
id: {
|
||||
type: 'integer',
|
||||
primary_key: true,
|
||||
required: true,
|
||||
interface: {
|
||||
type: 'input',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle fields with choices', async () => {
|
||||
mockCollectionsService.readByQuery.mockResolvedValue([]);
|
||||
|
||||
mockFieldsService.readAll.mockResolvedValue([
|
||||
{
|
||||
collection: 'users',
|
||||
field: 'status',
|
||||
type: 'string',
|
||||
schema: { is_primary_key: false },
|
||||
meta: {
|
||||
required: false,
|
||||
readonly: false,
|
||||
note: null,
|
||||
interface: 'select-dropdown',
|
||||
options: {
|
||||
choices: [
|
||||
{ value: 'active', text: 'Active' },
|
||||
{ value: 'inactive', text: 'Inactive' },
|
||||
],
|
||||
},
|
||||
special: null,
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
mockRelationsService.readAll.mockResolvedValue([]);
|
||||
|
||||
const result = await schema.handler({
|
||||
args: { keys: ['users'] },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: {
|
||||
users: {
|
||||
status: {
|
||||
type: 'string',
|
||||
interface: {
|
||||
type: 'select-dropdown',
|
||||
choices: ['active', 'inactive'],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('should process nested fields in JSON fields', async () => {
|
||||
mockCollectionsService.readByQuery.mockResolvedValue([]);
|
||||
|
||||
mockFieldsService.readAll.mockResolvedValue([
|
||||
{
|
||||
collection: 'users',
|
||||
field: 'metadata',
|
||||
type: 'json',
|
||||
schema: { is_primary_key: false },
|
||||
meta: {
|
||||
required: false,
|
||||
readonly: false,
|
||||
note: null,
|
||||
interface: 'list',
|
||||
options: {
|
||||
fields: [
|
||||
{
|
||||
field: 'name',
|
||||
type: 'string',
|
||||
meta: {
|
||||
required: true,
|
||||
interface: 'input',
|
||||
},
|
||||
},
|
||||
{
|
||||
field: 'value',
|
||||
type: 'string',
|
||||
meta: {
|
||||
interface: 'textarea',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
special: null,
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
mockRelationsService.readAll.mockResolvedValue([]);
|
||||
|
||||
const result = await schema.handler({
|
||||
args: { keys: ['users'] },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: {
|
||||
users: {
|
||||
metadata: {
|
||||
type: 'json',
|
||||
interface: {
|
||||
type: 'list',
|
||||
},
|
||||
fields: {
|
||||
name: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
interface: {
|
||||
type: 'input',
|
||||
},
|
||||
},
|
||||
value: {
|
||||
type: 'string',
|
||||
interface: {
|
||||
type: 'textarea',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle deeply nested fields up to max depth', async () => {
|
||||
const createNestedField = (depth: number): any => ({
|
||||
field: `level${depth}`,
|
||||
type: 'json',
|
||||
meta: {
|
||||
interface: 'list',
|
||||
options: {
|
||||
fields: depth < 5 ? [createNestedField(depth + 1)] : [],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
mockCollectionsService.readByQuery.mockResolvedValue([]);
|
||||
|
||||
mockFieldsService.readAll.mockResolvedValue([
|
||||
{
|
||||
collection: 'users',
|
||||
field: 'deep_metadata',
|
||||
type: 'json',
|
||||
schema: { is_primary_key: false },
|
||||
meta: {
|
||||
required: false,
|
||||
readonly: false,
|
||||
note: null,
|
||||
interface: 'list',
|
||||
options: {
|
||||
fields: [createNestedField(1)],
|
||||
},
|
||||
special: null,
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
mockRelationsService.readAll.mockResolvedValue([]);
|
||||
|
||||
const result = await schema.handler({
|
||||
args: { keys: ['users'] },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
let current = (result?.data as any).users.deep_metadata.fields.level1;
|
||||
|
||||
for (let i = 2; i < 6; i++) {
|
||||
expect(current).toBeDefined();
|
||||
current = current.fields?.[`level${i}`];
|
||||
}
|
||||
|
||||
expect(Object.keys(current?.fields ?? {}).length).eq(0);
|
||||
});
|
||||
|
||||
test('should handle collection-item-dropdown interface', async () => {
|
||||
mockCollectionsService.readByQuery.mockResolvedValue([]);
|
||||
|
||||
mockFieldsService.readAll.mockResolvedValue([
|
||||
{
|
||||
collection: 'users',
|
||||
field: 'favorite_post',
|
||||
type: 'json',
|
||||
schema: { is_primary_key: false },
|
||||
meta: {
|
||||
required: false,
|
||||
readonly: false,
|
||||
note: null,
|
||||
interface: 'collection-item-dropdown',
|
||||
options: {
|
||||
selectedCollection: 'posts',
|
||||
},
|
||||
special: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
collection: 'posts',
|
||||
field: 'id',
|
||||
type: 'uuid',
|
||||
schema: { is_primary_key: true },
|
||||
meta: {
|
||||
required: true,
|
||||
readonly: false,
|
||||
note: null,
|
||||
interface: 'input',
|
||||
options: null,
|
||||
special: null,
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
mockRelationsService.readAll.mockResolvedValue([]);
|
||||
|
||||
const result = await schema.handler({
|
||||
args: { keys: ['users'] },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: {
|
||||
users: {
|
||||
favorite_post: {
|
||||
fields: {
|
||||
collection: {
|
||||
value: 'posts',
|
||||
type: 'string',
|
||||
},
|
||||
key: {
|
||||
type: 'uuid',
|
||||
},
|
||||
},
|
||||
interface: {
|
||||
type: 'collection-item-dropdown',
|
||||
},
|
||||
type: 'json',
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
describe('relationships', () => {
|
||||
describe('Many-to-One (M2O)', () => {
|
||||
test('should build M2O relation info', async () => {
|
||||
mockCollectionsService.readByQuery.mockResolvedValue([]);
|
||||
|
||||
mockFieldsService.readAll.mockResolvedValue([
|
||||
{
|
||||
collection: 'posts',
|
||||
field: 'author',
|
||||
type: 'uuid',
|
||||
schema: { foreign_key_table: 'users' },
|
||||
meta: {
|
||||
required: false,
|
||||
readonly: false,
|
||||
note: null,
|
||||
interface: 'select-dropdown-m2o',
|
||||
options: null,
|
||||
special: ['m2o'],
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
mockRelationsService.readAll.mockResolvedValue([]);
|
||||
|
||||
const result = await schema.handler({
|
||||
args: { keys: ['posts'] },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: {
|
||||
posts: {
|
||||
author: {
|
||||
interface: {
|
||||
type: 'select-dropdown-m2o',
|
||||
},
|
||||
relation: {
|
||||
type: 'm2o',
|
||||
collection: 'users',
|
||||
},
|
||||
type: 'uuid',
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('One-to-Many (O2M)', () => {
|
||||
test('should build O2M relation info', async () => {
|
||||
mockCollectionsService.readByQuery.mockResolvedValue([]);
|
||||
|
||||
mockFieldsService.readAll.mockResolvedValue([
|
||||
{
|
||||
collection: 'users',
|
||||
field: 'posts',
|
||||
type: 'alias',
|
||||
schema: null,
|
||||
meta: {
|
||||
required: false,
|
||||
readonly: false,
|
||||
note: null,
|
||||
interface: 'list-o2m',
|
||||
options: null,
|
||||
special: ['o2m'],
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
mockRelationsService.readAll.mockResolvedValue([
|
||||
{
|
||||
collection: 'posts',
|
||||
field: 'author',
|
||||
related_collection: 'users',
|
||||
meta: {
|
||||
one_field: 'posts',
|
||||
one_collection: 'users',
|
||||
},
|
||||
schema: {
|
||||
table: 'posts',
|
||||
column: 'author',
|
||||
foreign_key_table: 'users',
|
||||
foreign_key_column: 'id',
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await schema.handler({
|
||||
args: { keys: ['users'] },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: {
|
||||
users: {
|
||||
posts: {
|
||||
interface: {
|
||||
type: 'list-o2m',
|
||||
},
|
||||
|
||||
relation: {
|
||||
type: 'o2m',
|
||||
collection: 'posts',
|
||||
many_field: 'author',
|
||||
},
|
||||
type: 'alias',
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Many-to-Many (M2M)', () => {
|
||||
test('should build M2M relation info', async () => {
|
||||
mockCollectionsService.readByQuery.mockResolvedValue([]);
|
||||
|
||||
mockFieldsService.readAll.mockResolvedValue([
|
||||
{
|
||||
collection: 'users',
|
||||
field: 'roles',
|
||||
type: 'alias',
|
||||
schema: null,
|
||||
meta: {
|
||||
required: false,
|
||||
readonly: false,
|
||||
note: null,
|
||||
interface: 'list-m2m',
|
||||
options: null,
|
||||
special: ['m2m'],
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
mockRelationsService.readAll.mockResolvedValue([
|
||||
{
|
||||
collection: 'users_roles',
|
||||
field: 'users_id',
|
||||
related_collection: 'users',
|
||||
meta: {
|
||||
one_field: 'roles',
|
||||
one_collection: 'users',
|
||||
junction_field: 'roles_id',
|
||||
sort_field: 'sort',
|
||||
},
|
||||
schema: {
|
||||
table: 'users_roles',
|
||||
column: 'users_id',
|
||||
foreign_key_table: 'users',
|
||||
foreign_key_column: 'id',
|
||||
},
|
||||
},
|
||||
{
|
||||
collection: 'users_roles',
|
||||
field: 'roles_id',
|
||||
related_collection: 'roles',
|
||||
meta: null,
|
||||
schema: {
|
||||
table: 'users_roles',
|
||||
column: 'roles_id',
|
||||
foreign_key_table: 'roles',
|
||||
foreign_key_column: 'id',
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await schema.handler({
|
||||
args: { keys: ['users'] },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: {
|
||||
users: {
|
||||
roles: {
|
||||
interface: {
|
||||
type: 'list-m2m',
|
||||
},
|
||||
relation: {
|
||||
type: 'm2m',
|
||||
collection: 'roles',
|
||||
junction: {
|
||||
collection: 'users_roles',
|
||||
many_field: 'users_id',
|
||||
junction_field: 'roles_id',
|
||||
sort_field: 'sort',
|
||||
},
|
||||
},
|
||||
type: 'alias',
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle M2M with files', async () => {
|
||||
mockCollectionsService.readByQuery.mockResolvedValue([]);
|
||||
|
||||
mockFieldsService.readAll.mockResolvedValue([
|
||||
{
|
||||
collection: 'posts',
|
||||
field: 'images',
|
||||
type: 'alias',
|
||||
schema: null,
|
||||
meta: {
|
||||
required: false,
|
||||
readonly: false,
|
||||
note: null,
|
||||
interface: 'files',
|
||||
options: null,
|
||||
special: ['files'],
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
mockRelationsService.readAll.mockResolvedValue([
|
||||
{
|
||||
collection: 'posts_files',
|
||||
field: 'posts_id',
|
||||
related_collection: 'posts',
|
||||
meta: {
|
||||
one_field: 'images',
|
||||
one_collection: 'posts',
|
||||
junction_field: 'directus_files_id',
|
||||
},
|
||||
schema: {
|
||||
table: 'posts_files',
|
||||
column: 'posts_id',
|
||||
foreign_key_table: 'posts',
|
||||
foreign_key_column: 'id',
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await schema.handler({
|
||||
args: { keys: ['posts'] },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: {
|
||||
posts: {
|
||||
images: {
|
||||
interface: {
|
||||
type: 'files',
|
||||
},
|
||||
relation: {
|
||||
type: 'm2m',
|
||||
collection: 'directus_files',
|
||||
junction: {
|
||||
collection: 'posts_files',
|
||||
many_field: 'posts_id',
|
||||
junction_field: 'directus_files_id',
|
||||
},
|
||||
},
|
||||
type: 'alias',
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Many-to-Any (M2A)', () => {
|
||||
test('should build M2A relation info', async () => {
|
||||
mockCollectionsService.readByQuery.mockResolvedValue([]);
|
||||
|
||||
mockFieldsService.readAll.mockResolvedValue([
|
||||
{
|
||||
collection: 'comments',
|
||||
field: 'commentable',
|
||||
type: 'alias',
|
||||
schema: null,
|
||||
meta: {
|
||||
required: false,
|
||||
readonly: false,
|
||||
note: null,
|
||||
interface: 'list-m2a',
|
||||
options: null,
|
||||
special: ['m2a'],
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
mockRelationsService.readAll.mockResolvedValue([
|
||||
{
|
||||
collection: 'comments_relations',
|
||||
field: 'comments_id',
|
||||
related_collection: 'comments',
|
||||
meta: {
|
||||
one_field: 'commentable',
|
||||
one_collection: 'comments',
|
||||
},
|
||||
schema: {
|
||||
table: 'comments_relations',
|
||||
column: 'comments_id',
|
||||
foreign_key_table: 'comments',
|
||||
foreign_key_column: 'id',
|
||||
},
|
||||
},
|
||||
{
|
||||
collection: 'comments_relations',
|
||||
field: 'item',
|
||||
related_collection: null,
|
||||
meta: {
|
||||
one_allowed_collections: ['posts', 'pages'],
|
||||
one_collection_field: 'collection',
|
||||
sort_field: 'sort',
|
||||
},
|
||||
schema: {
|
||||
table: 'comments_relations',
|
||||
column: 'item',
|
||||
foreign_key_table: null,
|
||||
foreign_key_column: 'id',
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await schema.handler({
|
||||
args: { keys: ['comments'] },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: {
|
||||
comments: {
|
||||
commentable: {
|
||||
interface: {
|
||||
type: 'list-m2a',
|
||||
},
|
||||
relation: {
|
||||
type: 'm2a',
|
||||
one_allowed_collections: ['posts', 'pages'],
|
||||
junction: {
|
||||
collection: 'comments_relations',
|
||||
many_field: 'comments_id',
|
||||
junction_field: 'item',
|
||||
one_collection_field: 'collection',
|
||||
sort_field: 'sort',
|
||||
},
|
||||
},
|
||||
type: 'alias',
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('tool configuration', () => {
|
||||
test('should have correct tool name', () => {
|
||||
expect(schema.name).toBe('schema');
|
||||
});
|
||||
|
||||
test('should not be admin tool', () => {
|
||||
expect(schema.admin).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should have description', () => {
|
||||
expect(schema.description).toBeDefined();
|
||||
});
|
||||
|
||||
test('should have input and validation schemas', () => {
|
||||
expect(schema.inputSchema).toBeDefined();
|
||||
expect(schema.validateSchema).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
454
api/src/mcp/tools/schema.ts
Normal file
454
api/src/mcp/tools/schema.ts
Normal file
@@ -0,0 +1,454 @@
|
||||
import type { Field, Relation } from '@directus/types';
|
||||
import { z } from 'zod';
|
||||
import { CollectionsService } from '../../services/collections.js';
|
||||
import { FieldsService } from '../../services/fields.js';
|
||||
import { RelationsService } from '../../services/relations.js';
|
||||
import type { Collection } from '../../types/collection.js';
|
||||
import { defineTool } from '../define.js';
|
||||
import prompts from './prompts/index.js';
|
||||
|
||||
export interface fieldOverviewOutput {
|
||||
type: string;
|
||||
primary_key?: boolean;
|
||||
required?: boolean;
|
||||
readonly?: boolean;
|
||||
note?: string;
|
||||
interface?: {
|
||||
type: string;
|
||||
choices?: Array<string | number>;
|
||||
};
|
||||
relation?: {
|
||||
type: string;
|
||||
related_collection?: string;
|
||||
many_collection?: string;
|
||||
many_field?: string;
|
||||
one_allowed_collections?: string[];
|
||||
junction?: {
|
||||
collection: string;
|
||||
many_field: string;
|
||||
junction_field: string;
|
||||
one_collection_field?: string;
|
||||
sort_field?: string;
|
||||
};
|
||||
};
|
||||
fields?: Record<string, fieldOverviewOutput>;
|
||||
value?: string;
|
||||
}
|
||||
|
||||
export interface OverviewOutput {
|
||||
[collection: string]: {
|
||||
[field: string]: fieldOverviewOutput;
|
||||
};
|
||||
}
|
||||
|
||||
export interface LightweightOverview {
|
||||
collections: string[];
|
||||
collection_folders: string[];
|
||||
notes: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface SchemaToolSnapshot {
|
||||
collections: Collection[];
|
||||
fields: Field[];
|
||||
relations: Relation[];
|
||||
}
|
||||
|
||||
export const SchemaValidateSchema = z.strictObject({
|
||||
keys: z.array(z.string()).optional(),
|
||||
});
|
||||
|
||||
export const SchemaInputSchema = z.object({
|
||||
keys: z
|
||||
.array(z.string())
|
||||
.optional()
|
||||
.describe(
|
||||
'Collection names to get detailed schema for. If omitted, returns a lightweight list of all collections.',
|
||||
),
|
||||
});
|
||||
|
||||
export const schema = defineTool<z.infer<typeof SchemaValidateSchema>>({
|
||||
name: 'schema',
|
||||
description: prompts.schema,
|
||||
annotations: {
|
||||
title: 'Directus - Schema',
|
||||
},
|
||||
inputSchema: SchemaInputSchema,
|
||||
validateSchema: SchemaValidateSchema,
|
||||
async handler({ args, accountability, schema }) {
|
||||
const serviceOptions = {
|
||||
schema,
|
||||
accountability,
|
||||
};
|
||||
|
||||
const collectionsService = new CollectionsService(serviceOptions);
|
||||
|
||||
const collections = await collectionsService.readByQuery();
|
||||
|
||||
// If no keys provided, return lightweight collection list
|
||||
if (!args.keys || args.keys.length === 0) {
|
||||
const lightweightOverview: LightweightOverview = {
|
||||
collections: [],
|
||||
collection_folders: [],
|
||||
notes: {},
|
||||
};
|
||||
|
||||
collections.forEach((collection) => {
|
||||
// Separate folders from real collections
|
||||
if (!collection.schema) {
|
||||
lightweightOverview.collection_folders.push(collection.collection);
|
||||
} else {
|
||||
lightweightOverview.collections.push(collection.collection);
|
||||
}
|
||||
|
||||
// Extract note if exists (for both collections and folders)
|
||||
if (collection.meta?.note && !collection.meta.note.startsWith('$t')) {
|
||||
lightweightOverview.notes[collection.collection] = collection.meta.note;
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: lightweightOverview,
|
||||
};
|
||||
}
|
||||
|
||||
// If keys provided, return detailed schema for requested collections
|
||||
const overview: OverviewOutput = {};
|
||||
|
||||
const fieldsService = new FieldsService(serviceOptions);
|
||||
|
||||
const fields = await fieldsService.readAll();
|
||||
|
||||
const relationsService = new RelationsService(serviceOptions);
|
||||
|
||||
const relations = await relationsService.readAll();
|
||||
|
||||
const snapshot = {
|
||||
collections,
|
||||
fields,
|
||||
relations,
|
||||
};
|
||||
|
||||
fields.forEach((field) => {
|
||||
// Skip collections not requested
|
||||
if (!args.keys?.includes(field.collection)) return;
|
||||
|
||||
// Skip UI-only fields
|
||||
if (field.type === 'alias' && field.meta?.special?.includes('no-data')) return;
|
||||
|
||||
if (!overview[field.collection]) {
|
||||
overview[field.collection] = {};
|
||||
}
|
||||
|
||||
const fieldOverview: fieldOverviewOutput = {
|
||||
type: field.type,
|
||||
};
|
||||
|
||||
if (field.schema?.is_primary_key) {
|
||||
fieldOverview.primary_key = field.schema?.is_primary_key;
|
||||
}
|
||||
|
||||
if (field.meta?.required) {
|
||||
fieldOverview.required = field.meta.required;
|
||||
}
|
||||
|
||||
if (field.meta?.readonly) {
|
||||
fieldOverview.readonly = field.meta.readonly;
|
||||
}
|
||||
|
||||
if (field.meta?.note) {
|
||||
fieldOverview.note = field.meta.note;
|
||||
}
|
||||
|
||||
if (field.meta?.interface) {
|
||||
fieldOverview.interface = {
|
||||
type: field.meta.interface,
|
||||
};
|
||||
|
||||
if (field.meta.options?.['choices']) {
|
||||
fieldOverview.interface.choices = field.meta.options['choices'].map(
|
||||
// Only return the value of the choice to reduce size and potential for confusion.
|
||||
(choice: { value: string }) => choice.value,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Process nested fields for JSON fields with options.fields (like repeaters)
|
||||
if (field.type === 'json' && field.meta?.options?.['fields']) {
|
||||
const nestedFields = field.meta.options['fields'] as any[];
|
||||
|
||||
fieldOverview.fields = processNestedFields({
|
||||
fields: nestedFields,
|
||||
maxDepth: 5,
|
||||
currentDepth: 0,
|
||||
snapshot,
|
||||
});
|
||||
}
|
||||
|
||||
// Handle collection-item-dropdown interface
|
||||
if (field.type === 'json' && field.meta?.interface === 'collection-item-dropdown') {
|
||||
fieldOverview.fields = processCollectionItemDropdown({
|
||||
field,
|
||||
snapshot,
|
||||
});
|
||||
}
|
||||
|
||||
// Handle relationships
|
||||
if (field.meta?.special) {
|
||||
const relationshipType = getRelationType(field.meta.special);
|
||||
|
||||
if (relationshipType) {
|
||||
fieldOverview.relation = buildRelationInfo(field, relationshipType, snapshot);
|
||||
}
|
||||
}
|
||||
|
||||
overview[field.collection]![field.field] = fieldOverview;
|
||||
});
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
data: overview,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
// Helpers
|
||||
function processNestedFields(options: {
|
||||
fields: any[];
|
||||
maxDepth?: number;
|
||||
currentDepth?: number;
|
||||
snapshot?: SchemaToolSnapshot | undefined;
|
||||
}): Record<string, fieldOverviewOutput> {
|
||||
const { fields, maxDepth = 5, currentDepth = 0, snapshot } = options;
|
||||
const result: Record<string, fieldOverviewOutput> = {};
|
||||
|
||||
if (currentDepth >= maxDepth) {
|
||||
return result;
|
||||
}
|
||||
|
||||
if (!Array.isArray(fields)) {
|
||||
return result;
|
||||
}
|
||||
|
||||
for (const field of fields) {
|
||||
const fieldKey = field.field || field.name;
|
||||
if (!fieldKey) continue;
|
||||
|
||||
const fieldOverview: fieldOverviewOutput = {
|
||||
type: field.type ?? 'any',
|
||||
};
|
||||
|
||||
if (field.meta) {
|
||||
const { required, readonly, note, interface: interfaceConfig, options } = field.meta;
|
||||
|
||||
if (required) fieldOverview.required = required;
|
||||
if (readonly) fieldOverview.readonly = readonly;
|
||||
if (note) fieldOverview.note = note;
|
||||
|
||||
if (interfaceConfig) {
|
||||
fieldOverview.interface = { type: interfaceConfig };
|
||||
|
||||
if (options?.choices) {
|
||||
fieldOverview.interface.choices = options.choices;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle nested fields recursively
|
||||
const nestedFields = field.meta?.options?.fields || field.options?.fields;
|
||||
|
||||
if (field.type === 'json' && nestedFields) {
|
||||
fieldOverview.fields = processNestedFields({
|
||||
fields: nestedFields,
|
||||
maxDepth,
|
||||
currentDepth: currentDepth + 1,
|
||||
snapshot,
|
||||
});
|
||||
}
|
||||
|
||||
// Handle collection-item-dropdown interface
|
||||
if (field.type === 'json' && field.meta?.interface === 'collection-item-dropdown') {
|
||||
fieldOverview.fields = processCollectionItemDropdown({
|
||||
field,
|
||||
snapshot,
|
||||
});
|
||||
}
|
||||
|
||||
result[fieldKey] = fieldOverview;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function processCollectionItemDropdown(options: { field: Field; snapshot?: any }): Record<string, fieldOverviewOutput> {
|
||||
const { field, snapshot } = options;
|
||||
const selectedCollection = field.meta?.options?.['selectedCollection'];
|
||||
let keyType = 'string | number | uuid';
|
||||
|
||||
// Find the primary key type for the selected collection
|
||||
if (selectedCollection && snapshot?.fields) {
|
||||
const primaryKeyField = snapshot.fields.find(
|
||||
(f: any) => f.collection === selectedCollection && f.schema?.is_primary_key,
|
||||
);
|
||||
|
||||
if (primaryKeyField) {
|
||||
keyType = primaryKeyField.type;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
collection: {
|
||||
value: selectedCollection,
|
||||
type: 'string',
|
||||
},
|
||||
key: {
|
||||
type: keyType,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function getRelationType(special: string[]): string | null {
|
||||
if (special.includes('m2o') || special.includes('file')) return 'm2o';
|
||||
if (special.includes('o2m')) return 'o2m';
|
||||
if (special.includes('m2m') || special.includes('files')) return 'm2m';
|
||||
if (special.includes('m2a')) return 'm2a';
|
||||
return null;
|
||||
}
|
||||
|
||||
function buildRelationInfo(field: Field, type: string, snapshot: SchemaToolSnapshot) {
|
||||
switch (type) {
|
||||
case 'm2o':
|
||||
return buildManyToOneRelation(field, snapshot);
|
||||
case 'o2m':
|
||||
return buildOneToManyRelation(field, snapshot);
|
||||
case 'm2m':
|
||||
return buildManyToManyRelation(field, snapshot);
|
||||
case 'm2a':
|
||||
return buildManyToAnyRelation(field, snapshot);
|
||||
default:
|
||||
return { type };
|
||||
}
|
||||
}
|
||||
|
||||
function buildManyToOneRelation(field: Field, snapshot: SchemaToolSnapshot) {
|
||||
// For M2O, the relation is directly on this field
|
||||
const relation = snapshot.relations.find((r) => r.collection === field.collection && r.field === field.field);
|
||||
|
||||
// The target collection is either in related_collection or foreign_key_table
|
||||
const targetCollection =
|
||||
relation?.related_collection || relation?.schema?.foreign_key_table || field.schema?.foreign_key_table;
|
||||
|
||||
return {
|
||||
type: 'm2o',
|
||||
collection: targetCollection,
|
||||
};
|
||||
}
|
||||
|
||||
function buildOneToManyRelation(field: Field, snapshot: SchemaToolSnapshot) {
|
||||
// For O2M, we need to find the relation that points BACK to this field
|
||||
// The relation will have this field stored in meta.one_field
|
||||
const reverseRelation = snapshot.relations.find(
|
||||
(r) => r.meta?.one_collection === field.collection && r.meta?.one_field === field.field,
|
||||
);
|
||||
|
||||
if (!reverseRelation) {
|
||||
return { type: 'o2m' };
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'o2m',
|
||||
collection: reverseRelation.collection,
|
||||
many_field: reverseRelation.field,
|
||||
};
|
||||
}
|
||||
|
||||
function buildManyToManyRelation(field: Field, snapshot: SchemaToolSnapshot) {
|
||||
// Find the junction table relation that references this field
|
||||
// This relation will have our field as meta.one_field
|
||||
const junctionRelation = snapshot.relations.find(
|
||||
(r) =>
|
||||
r.meta?.one_field === field.field &&
|
||||
r.meta?.one_collection === field.collection &&
|
||||
r.collection !== field.collection, // Junction table is different from our collection
|
||||
);
|
||||
|
||||
if (!junctionRelation) {
|
||||
return { type: 'm2m' };
|
||||
}
|
||||
|
||||
// Find the other side of the junction (pointing to the target collection)
|
||||
// This is stored in meta.junction_field
|
||||
const targetRelation = snapshot.relations.find(
|
||||
(r) => r.collection === junctionRelation.collection && r.field === junctionRelation.meta?.junction_field,
|
||||
);
|
||||
|
||||
const targetCollection = targetRelation?.related_collection || 'directus_files';
|
||||
|
||||
const result: any = {
|
||||
type: 'm2m',
|
||||
collection: targetCollection,
|
||||
junction: {
|
||||
collection: junctionRelation.collection,
|
||||
many_field: junctionRelation.field,
|
||||
junction_field: junctionRelation.meta?.junction_field,
|
||||
},
|
||||
};
|
||||
|
||||
if (junctionRelation.meta?.sort_field) {
|
||||
result.junction.sort_field = junctionRelation.meta.sort_field;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function buildManyToAnyRelation(field: Field, snapshot: SchemaToolSnapshot) {
|
||||
// Find the junction table relation that references this field
|
||||
// This relation will have our field as meta.one_field
|
||||
const junctionRelation = snapshot.relations.find(
|
||||
(r) => r.meta?.one_field === field.field && r.meta?.one_collection === field.collection,
|
||||
);
|
||||
|
||||
if (!junctionRelation) {
|
||||
return { type: 'm2a' };
|
||||
}
|
||||
|
||||
// Find the polymorphic relation in the junction table
|
||||
// This relation will have one_allowed_collections set
|
||||
const polymorphicRelation = snapshot.relations.find(
|
||||
(r) =>
|
||||
r.collection === junctionRelation.collection &&
|
||||
r.meta?.one_allowed_collections &&
|
||||
r.meta.one_allowed_collections.length > 0,
|
||||
);
|
||||
|
||||
if (!polymorphicRelation) {
|
||||
return { type: 'm2a' };
|
||||
}
|
||||
|
||||
// Find the relation back to our parent collection
|
||||
const parentRelation = snapshot.relations.find(
|
||||
(r) =>
|
||||
r.collection === junctionRelation.collection &&
|
||||
r.related_collection === field.collection &&
|
||||
r.field !== polymorphicRelation.field, // Different from the polymorphic field
|
||||
);
|
||||
|
||||
const result: any = {
|
||||
type: 'm2a',
|
||||
one_allowed_collections: polymorphicRelation.meta?.one_allowed_collections,
|
||||
junction: {
|
||||
collection: junctionRelation.collection,
|
||||
many_field: parentRelation?.field || `${field.collection}_id`,
|
||||
junction_field: polymorphicRelation.field,
|
||||
one_collection_field: polymorphicRelation.meta?.one_collection_field || 'collection',
|
||||
},
|
||||
};
|
||||
|
||||
const sortField = parentRelation?.meta?.sort_field || polymorphicRelation.meta?.sort_field;
|
||||
|
||||
if (sortField) {
|
||||
result.junction.sort_field = sortField;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
59
api/src/mcp/tools/system.test.ts
Normal file
59
api/src/mcp/tools/system.test.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import type { Accountability, SchemaOverview } from '@directus/types';
|
||||
import { describe, expect, test, vi } from 'vitest';
|
||||
import prompts from './prompts/index.js';
|
||||
import { system } from './system.js';
|
||||
|
||||
vi.mock('../tool.js', () => ({
|
||||
defineTool: vi.fn((config) => config),
|
||||
}));
|
||||
|
||||
describe('system tool', () => {
|
||||
describe('prompt override', () => {
|
||||
const mockSchema = { collections: {}, fields: {}, relations: {} } as unknown as SchemaOverview;
|
||||
const mockAccountability = { user: 'test-user' } as Accountability;
|
||||
const mockSanitizedQuery = { fields: ['*'] };
|
||||
|
||||
test.each([undefined, null])('should return default prompt when no override provided', async (override) => {
|
||||
const result = await system.handler({
|
||||
args: { promptOverride: override },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({ type: 'text', data: prompts.systemPrompt });
|
||||
});
|
||||
|
||||
test('should return custom prompt when provided', async () => {
|
||||
const promptOverride = 'Lorem';
|
||||
|
||||
const result = await system.handler({
|
||||
args: { promptOverride },
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: mockSanitizedQuery,
|
||||
});
|
||||
|
||||
expect(result).toEqual({ type: 'text', data: promptOverride });
|
||||
});
|
||||
});
|
||||
|
||||
describe('tool configuration', () => {
|
||||
test('should have correct tool name', () => {
|
||||
expect(system.name).toBe('system-prompt');
|
||||
});
|
||||
|
||||
test('should not be admin tool', () => {
|
||||
expect(system.admin).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should have description', () => {
|
||||
expect(system.description).toBeDefined();
|
||||
});
|
||||
|
||||
test('should have input and validation schemas', () => {
|
||||
expect(system.inputSchema).toBeDefined();
|
||||
expect(system.validateSchema).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
25
api/src/mcp/tools/system.ts
Normal file
25
api/src/mcp/tools/system.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { z } from 'zod';
|
||||
import { defineTool } from '../define.js';
|
||||
import prompts from './prompts/index.js';
|
||||
|
||||
const SystemPromptInputSchema = z.object({});
|
||||
|
||||
const SystemPromptValidateSchema = z.object({
|
||||
promptOverride: z.union([z.string(), z.null()]).optional(),
|
||||
});
|
||||
|
||||
export const system = defineTool<z.infer<typeof SystemPromptValidateSchema>>({
|
||||
name: 'system-prompt',
|
||||
description: prompts.systemPromptDescription,
|
||||
annotations: {
|
||||
title: 'Directus - System Prompt',
|
||||
},
|
||||
inputSchema: SystemPromptInputSchema,
|
||||
validateSchema: SystemPromptValidateSchema,
|
||||
async handler({ args }) {
|
||||
return {
|
||||
type: 'text',
|
||||
data: args.promptOverride || prompts.systemPrompt,
|
||||
};
|
||||
},
|
||||
});
|
||||
202
api/src/mcp/tools/trigger-flow.test.ts
Normal file
202
api/src/mcp/tools/trigger-flow.test.ts
Normal file
@@ -0,0 +1,202 @@
|
||||
import type { Accountability, SchemaOverview } from '@directus/types';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi, type MockedFunction } from 'vitest';
|
||||
import { getFlowManager } from '../../flows.js';
|
||||
import { FlowsService } from '../../services/flows.js';
|
||||
import { triggerFlow } from './trigger-flow.js';
|
||||
|
||||
vi.mock('../../services/flows.js');
|
||||
vi.mock('../../flows');
|
||||
|
||||
vi.mock('../tool.js', () => ({
|
||||
defineTool: vi.fn((config) => config),
|
||||
}));
|
||||
|
||||
describe('trigger flow tool', () => {
|
||||
const mockSchema = { collections: {}, fields: {}, relations: {} } as unknown as SchemaOverview;
|
||||
const mockAccountability = { user: 'test-user' } as Accountability;
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('handler', () => {
|
||||
let mockFlowManager: {
|
||||
runWebhookFlow: MockedFunction<any>;
|
||||
};
|
||||
|
||||
let mockFlowsService: {
|
||||
readOne: MockedFunction<any>;
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockFlowManager = {
|
||||
runWebhookFlow: vi.fn(),
|
||||
};
|
||||
|
||||
mockFlowsService = {
|
||||
readOne: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mocked(getFlowManager).mockImplementation(
|
||||
() => mockFlowManager as unknown as ReturnType<typeof getFlowManager>,
|
||||
);
|
||||
|
||||
vi.mocked(FlowsService).mockImplementation(() => mockFlowsService as unknown as FlowsService);
|
||||
});
|
||||
|
||||
test('should trigger a flow with minimal parameters', async () => {
|
||||
const mockArgs = {
|
||||
id: 'flow-123',
|
||||
collection: 'articles',
|
||||
};
|
||||
|
||||
const mockResult = { success: true, message: 'Flow executed successfully' };
|
||||
mockFlowManager.runWebhookFlow.mockResolvedValue({ result: mockResult });
|
||||
|
||||
mockFlowsService.readOne.mockResolvedValue({});
|
||||
|
||||
const result = await triggerFlow.handler({
|
||||
args: mockArgs,
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: {},
|
||||
});
|
||||
|
||||
expect(mockFlowManager.runWebhookFlow).toHaveBeenCalledWith(
|
||||
`POST-${mockArgs.id}`,
|
||||
{
|
||||
path: `/trigger/${mockArgs.id}`,
|
||||
query: {},
|
||||
body: {
|
||||
collection: mockArgs.collection,
|
||||
},
|
||||
method: 'POST',
|
||||
headers: {},
|
||||
},
|
||||
{ accountability: mockAccountability, schema: mockSchema },
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'text',
|
||||
data: mockResult,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
let mockFlowManager: {
|
||||
runWebhookFlow: MockedFunction<any>;
|
||||
};
|
||||
|
||||
let mockFlowsService: {
|
||||
readOne: MockedFunction<any>;
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockFlowManager = {
|
||||
runWebhookFlow: vi.fn(),
|
||||
};
|
||||
|
||||
mockFlowsService = {
|
||||
readOne: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mocked(getFlowManager).mockImplementation(
|
||||
() => mockFlowManager as unknown as ReturnType<typeof getFlowManager>,
|
||||
);
|
||||
|
||||
vi.mocked(FlowsService).mockImplementation(() => mockFlowsService as unknown as FlowsService);
|
||||
});
|
||||
|
||||
test('should propogate error from flowService', async () => {
|
||||
mockFlowsService.readOne.mockImplementation(() => Promise.reject('Forbidden'));
|
||||
|
||||
const mockArgs = {
|
||||
id: 'flow-123',
|
||||
collection: 'articles',
|
||||
keys: [],
|
||||
data: undefined,
|
||||
};
|
||||
|
||||
await expect(
|
||||
triggerFlow.handler({
|
||||
args: mockArgs,
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: {},
|
||||
}),
|
||||
).rejects.toThrow('Forbidden');
|
||||
});
|
||||
|
||||
test('should throw error when data is missing but required fields exist', async () => {
|
||||
mockFlowsService.readOne.mockResolvedValue({
|
||||
options: {
|
||||
fields: [{ field: 'title', meta: { required: true } }],
|
||||
},
|
||||
});
|
||||
|
||||
const mockArgs = {
|
||||
id: 'flow-123',
|
||||
collection: 'articles',
|
||||
keys: [],
|
||||
data: undefined,
|
||||
};
|
||||
|
||||
await expect(
|
||||
triggerFlow.handler({
|
||||
args: mockArgs,
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: {},
|
||||
}),
|
||||
).rejects.toThrow('Invalid payload. Required field "title" is missing.');
|
||||
});
|
||||
|
||||
test('should throw error for missing required fields', async () => {
|
||||
mockFlowsService.readOne.mockResolvedValue({
|
||||
options: {
|
||||
fields: [
|
||||
{ field: 'title', meta: { required: true } },
|
||||
{ field: 'content', meta: { required: false } },
|
||||
{ field: 'author', meta: { required: true } },
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
const mockArgs = {
|
||||
id: 'flow-123',
|
||||
collection: 'articles',
|
||||
keys: [],
|
||||
data: { title: 'Lorem' },
|
||||
};
|
||||
|
||||
await expect(
|
||||
triggerFlow.handler({
|
||||
args: mockArgs,
|
||||
schema: mockSchema,
|
||||
accountability: mockAccountability,
|
||||
sanitizedQuery: {},
|
||||
}),
|
||||
).rejects.toThrow('Invalid payload. Required field "author" is missing.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('tool configuration', () => {
|
||||
test('should have correct tool name', () => {
|
||||
expect(triggerFlow.name).toBe('trigger-flow');
|
||||
});
|
||||
|
||||
test('should not be admin tool', () => {
|
||||
expect(triggerFlow.admin).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should have description', () => {
|
||||
expect(triggerFlow.description).toBeDefined();
|
||||
});
|
||||
|
||||
test('should have input and validation schemas', () => {
|
||||
expect(triggerFlow.inputSchema).toBeDefined();
|
||||
expect(triggerFlow.validateSchema).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
59
api/src/mcp/tools/trigger-flow.ts
Normal file
59
api/src/mcp/tools/trigger-flow.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { InvalidPayloadError } from '@directus/errors';
|
||||
import { z } from 'zod';
|
||||
import { getFlowManager } from '../../flows.js';
|
||||
import { FlowsService } from '../../services/flows.js';
|
||||
import { defineTool } from '../define.js';
|
||||
import { TriggerFlowInputSchema, TriggerFlowValidateSchema } from '../schema.js';
|
||||
import prompts from './prompts/index.js';
|
||||
|
||||
export const triggerFlow = defineTool<z.infer<typeof TriggerFlowValidateSchema>>({
|
||||
name: 'trigger-flow',
|
||||
description: prompts.triggerFlow,
|
||||
annotations: {
|
||||
title: 'Directus - Trigger Flow',
|
||||
},
|
||||
inputSchema: TriggerFlowInputSchema,
|
||||
validateSchema: TriggerFlowValidateSchema,
|
||||
async handler({ args, schema, accountability }) {
|
||||
const flowsService = new FlowsService({ schema, accountability });
|
||||
|
||||
const flow = await flowsService.readOne(args.id, {
|
||||
filter: { status: { _eq: 'active' }, trigger: { _eq: 'manual' } },
|
||||
fields: ['options'],
|
||||
});
|
||||
|
||||
/**
|
||||
* Collection and Required selection are validated by the server.
|
||||
* Required fields is an additional validation we do.
|
||||
*/
|
||||
const requiredFields = ((flow.options?.['fields'] as { field: string; meta: { required: boolean } }[]) ?? [])
|
||||
.filter((field) => field.meta?.required)
|
||||
.map((field) => field.field);
|
||||
|
||||
for (const fieldName of requiredFields) {
|
||||
if (!args.data || !(fieldName in args.data)) {
|
||||
throw new InvalidPayloadError({ reason: `Required field "${fieldName}" is missing` });
|
||||
}
|
||||
}
|
||||
|
||||
const flowManager = getFlowManager();
|
||||
|
||||
const { result } = await flowManager.runWebhookFlow(
|
||||
`POST-${args.id}`,
|
||||
{
|
||||
path: `/trigger/${args.id}`,
|
||||
query: args.query ?? {},
|
||||
method: 'POST',
|
||||
body: {
|
||||
collection: args.collection,
|
||||
keys: args.keys,
|
||||
...(args.data ?? {}),
|
||||
},
|
||||
headers: args.headers ?? {},
|
||||
},
|
||||
{ accountability, schema },
|
||||
);
|
||||
|
||||
return { type: 'text', data: result };
|
||||
},
|
||||
});
|
||||
25
api/src/mcp/transport.ts
Normal file
25
api/src/mcp/transport.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import type { Transport } from '@modelcontextprotocol/sdk/shared/transport.js';
|
||||
import type { JSONRPCMessage, MessageExtraInfo } from '@modelcontextprotocol/sdk/types.js';
|
||||
import type { Response } from 'express';
|
||||
|
||||
export class DirectusTransport implements Transport {
|
||||
res: Response;
|
||||
onerror?: (error: Error) => void;
|
||||
onmessage?: (message: JSONRPCMessage, extra?: MessageExtraInfo) => void;
|
||||
onclose?: () => void;
|
||||
constructor(res: Response) {
|
||||
this.res = res;
|
||||
}
|
||||
|
||||
async start(): Promise<void> {
|
||||
return;
|
||||
}
|
||||
|
||||
async send(message: JSONRPCMessage): Promise<void> {
|
||||
this.res.json(message);
|
||||
}
|
||||
|
||||
async close(): Promise<void> {
|
||||
return;
|
||||
}
|
||||
}
|
||||
59
api/src/mcp/types.ts
Normal file
59
api/src/mcp/types.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import type { Accountability, Query, SchemaOverview } from '@directus/types';
|
||||
import type { ToolAnnotations } from '@modelcontextprotocol/sdk/types.js';
|
||||
import type { ZodType } from 'zod';
|
||||
|
||||
export type ToolResultBase = {
|
||||
type?: 'text' | 'image' | 'audio';
|
||||
url?: string | undefined;
|
||||
};
|
||||
|
||||
export type TextToolResult = ToolResultBase & {
|
||||
type: 'text';
|
||||
data: unknown;
|
||||
};
|
||||
|
||||
export type AssetToolResult = ToolResultBase & {
|
||||
type: 'image' | 'audio';
|
||||
data: string;
|
||||
mimeType: string;
|
||||
};
|
||||
|
||||
export type ToolResult = TextToolResult | AssetToolResult;
|
||||
|
||||
export type ToolHandler<T> = {
|
||||
(options: {
|
||||
args: T;
|
||||
sanitizedQuery: Query;
|
||||
schema: SchemaOverview;
|
||||
accountability: Accountability | undefined;
|
||||
}): Promise<ToolResult | undefined>;
|
||||
};
|
||||
|
||||
export type ToolEndpoint<T> = {
|
||||
(options: { input: T; data: unknown }): string[] | undefined;
|
||||
};
|
||||
|
||||
export interface ToolConfig<T> {
|
||||
name: string;
|
||||
description: string;
|
||||
endpoint?: ToolEndpoint<T>;
|
||||
admin?: boolean;
|
||||
inputSchema: ZodType<any>;
|
||||
validateSchema?: ZodType<T>;
|
||||
annotations?: ToolAnnotations;
|
||||
handler: ToolHandler<T>;
|
||||
}
|
||||
|
||||
export interface Prompt {
|
||||
name: string;
|
||||
system_prompt?: string | null;
|
||||
description?: string;
|
||||
messages: { role: 'user' | 'assistant'; text: string }[];
|
||||
}
|
||||
|
||||
export interface MCPOptions {
|
||||
promptsCollection?: string;
|
||||
allowDeletes?: boolean;
|
||||
systemPromptEnabled?: boolean;
|
||||
systemPrompt?: string | null;
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
import { isObject } from '@directus/utils';
|
||||
import type { RequestHandler } from 'express';
|
||||
import { VersionsService } from '../services/versions.js';
|
||||
import asyncHandler from '../utils/async-handler.js';
|
||||
import { mergeVersionsRaw, mergeVersionsRecursive } from '../utils/merge-version-data.js';
|
||||
|
||||
export const mergeContentVersions: RequestHandler = asyncHandler(async (req, res, next) => {
|
||||
if (
|
||||
req.sanitizedQuery.version &&
|
||||
req.collection &&
|
||||
(req.singleton || req.params['pk']) &&
|
||||
'data' in res.locals['payload']
|
||||
) {
|
||||
const originalData = res.locals['payload'].data as unknown;
|
||||
|
||||
// only act on single item requests
|
||||
if (!isObject(originalData)) return next();
|
||||
|
||||
const versionsService = new VersionsService({ accountability: req.accountability ?? null, schema: req.schema });
|
||||
|
||||
const versionData = await versionsService.getVersionSaves(
|
||||
req.sanitizedQuery.version,
|
||||
req.collection,
|
||||
req.params['pk'],
|
||||
);
|
||||
|
||||
if (!versionData || versionData.length === 0) return next();
|
||||
|
||||
if (req.sanitizedQuery.versionRaw) {
|
||||
res.locals['payload'].data = mergeVersionsRaw(originalData, versionData);
|
||||
} else {
|
||||
res.locals['payload'].data = mergeVersionsRecursive(originalData, versionData, req.collection, req.schema);
|
||||
}
|
||||
}
|
||||
|
||||
return next();
|
||||
});
|
||||
@@ -11,7 +11,7 @@ import { getCacheKey } from '../utils/get-cache-key.js';
|
||||
import { getDateFormatted } from '../utils/get-date-formatted.js';
|
||||
import { getMilliseconds } from '../utils/get-milliseconds.js';
|
||||
import { stringByteSize } from '../utils/get-string-byte-size.js';
|
||||
import { permissionsCachable } from '../utils/permissions-cachable.js';
|
||||
import { permissionsCacheable } from '../utils/permissions-cacheable.js';
|
||||
|
||||
export const respond: RequestHandler = asyncHandler(async (req, res) => {
|
||||
const env = useEnv();
|
||||
@@ -35,7 +35,7 @@ export const respond: RequestHandler = asyncHandler(async (req, res) => {
|
||||
!req.sanitizedQuery.export &&
|
||||
res.locals['cache'] !== false &&
|
||||
exceedsMaxSize === false &&
|
||||
(await permissionsCachable(
|
||||
(await permissionsCacheable(
|
||||
req.collection,
|
||||
{
|
||||
knex: getDatabase(),
|
||||
|
||||
@@ -24,6 +24,7 @@ import { getMilliseconds } from '../utils/get-milliseconds.js';
|
||||
import { getSecret } from '../utils/get-secret.js';
|
||||
import { stall } from '../utils/stall.js';
|
||||
import { ActivityService } from './activity.js';
|
||||
import { RevisionsService } from './revisions.js';
|
||||
import { SettingsService } from './settings.js';
|
||||
import { TFAService } from './tfa.js';
|
||||
|
||||
@@ -139,6 +140,28 @@ export class AuthenticationService {
|
||||
await this.knex('directus_users').update({ status: 'suspended' }).where({ id: user.id });
|
||||
user.status = 'suspended';
|
||||
|
||||
if (this.accountability) {
|
||||
const activity = await this.activityService.createOne({
|
||||
action: Action.UPDATE,
|
||||
user: user.id,
|
||||
ip: this.accountability.ip,
|
||||
user_agent: this.accountability.userAgent,
|
||||
origin: this.accountability.origin,
|
||||
collection: 'directus_users',
|
||||
item: user.id,
|
||||
});
|
||||
|
||||
const revisionsService = new RevisionsService({ knex: this.knex, schema: this.schema });
|
||||
|
||||
await revisionsService.createOne({
|
||||
activity: activity,
|
||||
collection: 'directus_users',
|
||||
item: user.id,
|
||||
data: user,
|
||||
delta: { status: 'suspended' },
|
||||
});
|
||||
}
|
||||
|
||||
// This means that new attempts after the user has been re-activated will be accepted
|
||||
await loginAttemptsLimiter.set(user.id, 0, 0);
|
||||
} else {
|
||||
@@ -189,6 +212,24 @@ export class AuthenticationService {
|
||||
admin_access: globalAccess.admin,
|
||||
};
|
||||
|
||||
// Add role-based enforcement to token payload for users who need to set up 2FA
|
||||
if (!user.tfa_secret) {
|
||||
// Check if user has role-based enforcement
|
||||
const roleEnforcement = await this.knex
|
||||
.select('directus_policies.enforce_tfa')
|
||||
.from('directus_users')
|
||||
.leftJoin('directus_roles', 'directus_users.role', 'directus_roles.id')
|
||||
.leftJoin('directus_access', 'directus_roles.id', 'directus_access.role')
|
||||
.leftJoin('directus_policies', 'directus_access.policy', 'directus_policies.id')
|
||||
.where('directus_users.id', user.id)
|
||||
.where('directus_policies.enforce_tfa', true)
|
||||
.first();
|
||||
|
||||
if (roleEnforcement) {
|
||||
tokenPayload.enforce_tfa = true;
|
||||
}
|
||||
}
|
||||
|
||||
const refreshToken = nanoid(64);
|
||||
const refreshTokenExpiration = new Date(Date.now() + getMilliseconds(env['REFRESH_TOKEN_TTL'], 0));
|
||||
|
||||
|
||||
@@ -475,7 +475,7 @@ export class FieldsService {
|
||||
}
|
||||
|
||||
if (opts?.emitEvents !== false && nestedActionEvents.length > 0) {
|
||||
const updatedSchema = await getSchema();
|
||||
const updatedSchema = await getSchema({ database: this.knex });
|
||||
|
||||
for (const nestedActionEvent of nestedActionEvents) {
|
||||
nestedActionEvent.context.schema = updatedSchema;
|
||||
@@ -616,7 +616,7 @@ export class FieldsService {
|
||||
}
|
||||
|
||||
if (opts?.emitEvents !== false && nestedActionEvents.length > 0) {
|
||||
const updatedSchema = await getSchema();
|
||||
const updatedSchema = await getSchema({ database: this.knex });
|
||||
|
||||
for (const nestedActionEvent of nestedActionEvents) {
|
||||
nestedActionEvent.context.schema = updatedSchema;
|
||||
@@ -653,7 +653,7 @@ export class FieldsService {
|
||||
}
|
||||
|
||||
if (opts?.emitEvents !== false && nestedActionEvents.length > 0) {
|
||||
const updatedSchema = await getSchema();
|
||||
const updatedSchema = await getSchema({ database: this.knex });
|
||||
|
||||
for (const nestedActionEvent of nestedActionEvents) {
|
||||
nestedActionEvent.context.schema = updatedSchema;
|
||||
@@ -842,7 +842,7 @@ export class FieldsService {
|
||||
}
|
||||
|
||||
if (opts?.emitEvents !== false && nestedActionEvents.length > 0) {
|
||||
const updatedSchema = await getSchema();
|
||||
const updatedSchema = await getSchema({ database: this.knex });
|
||||
|
||||
for (const nestedActionEvent of nestedActionEvents) {
|
||||
nestedActionEvent.context.schema = updatedSchema;
|
||||
|
||||
@@ -7,6 +7,7 @@ import type {
|
||||
Item,
|
||||
Query,
|
||||
SchemaOverview,
|
||||
PrimaryKey,
|
||||
} from '@directus/types';
|
||||
import type { ExecutionResult, FormattedExecutionResult, GraphQLSchema } from 'graphql';
|
||||
import { NoSchemaIntrospectionCustomRule, execute, specifiedRules, validate } from 'graphql';
|
||||
@@ -99,18 +100,19 @@ export class GraphQLService {
|
||||
/**
|
||||
* Execute the read action on the correct service. Checks for singleton as well.
|
||||
*/
|
||||
async read(collection: string, query: Query): Promise<Partial<Item>> {
|
||||
async read(collection: string, query: Query, id?: PrimaryKey): Promise<Partial<Item>> {
|
||||
const service = getService(collection, {
|
||||
knex: this.knex,
|
||||
accountability: this.accountability,
|
||||
schema: this.schema,
|
||||
});
|
||||
|
||||
const result = this.schema.collections[collection]!.singleton
|
||||
? await service.readSingleton(query, { stripNonRequested: false })
|
||||
: await service.readByQuery(query, { stripNonRequested: false });
|
||||
if (this.schema.collections[collection]!.singleton)
|
||||
return await service.readSingleton(query, { stripNonRequested: false });
|
||||
|
||||
return result;
|
||||
if (id) return await service.readOne(id, query, { stripNonRequested: false });
|
||||
|
||||
return await service.readByQuery(query, { stripNonRequested: false });
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -2,8 +2,6 @@ import type { Item, Query } from '@directus/types';
|
||||
import { parseFilterFunctionPath } from '@directus/utils';
|
||||
import type { GraphQLResolveInfo } from 'graphql';
|
||||
import { omit } from 'lodash-es';
|
||||
import { mergeVersionsRaw, mergeVersionsRecursive } from '../../../utils/merge-version-data.js';
|
||||
import { VersionsService } from '../../versions.js';
|
||||
import type { GraphQLService } from '../index.js';
|
||||
import { parseArgs } from '../schema/parse-args.js';
|
||||
import { getQuery } from '../schema/parse-query.js';
|
||||
@@ -23,7 +21,6 @@ export async function resolveQuery(gql: GraphQLService, info: GraphQLResolveInfo
|
||||
const args: Record<string, any> = parseArgs(info.fieldNodes[0]!.arguments || [], info.variableValues);
|
||||
|
||||
let query: Query;
|
||||
let versionRaw = false;
|
||||
|
||||
const isAggregate = collection.endsWith('_aggregated') && collection in gql.schema.collections === false;
|
||||
|
||||
@@ -39,25 +36,10 @@ export async function resolveQuery(gql: GraphQLService, info: GraphQLResolveInfo
|
||||
|
||||
if (collection.endsWith('_by_version') && collection in gql.schema.collections === false) {
|
||||
collection = collection.slice(0, -11);
|
||||
versionRaw = true;
|
||||
query.versionRaw = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (args['id']) {
|
||||
query.filter = {
|
||||
_and: [
|
||||
query.filter || {},
|
||||
{
|
||||
[gql.schema.collections[collection]!.primary]: {
|
||||
_eq: args['id'],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
query.limit = 1;
|
||||
}
|
||||
|
||||
// Transform count(a.b.c) into a.b.count(c)
|
||||
if (query.fields?.length) {
|
||||
for (let fieldIndex = 0; fieldIndex < query.fields.length; fieldIndex++) {
|
||||
@@ -65,31 +47,9 @@ export async function resolveQuery(gql: GraphQLService, info: GraphQLResolveInfo
|
||||
}
|
||||
}
|
||||
|
||||
const result = await gql.read(collection, query);
|
||||
const result = await gql.read(collection, query, args['id']);
|
||||
|
||||
if (args['version']) {
|
||||
const versionsService = new VersionsService({ accountability: gql.accountability, schema: gql.schema });
|
||||
|
||||
const saves = await versionsService.getVersionSaves(args['version'], collection, args['id']);
|
||||
|
||||
if (saves) {
|
||||
if (gql.schema.collections[collection]!.singleton) {
|
||||
return versionRaw
|
||||
? mergeVersionsRaw(result, saves)
|
||||
: mergeVersionsRecursive(result, saves, collection, gql.schema);
|
||||
} else {
|
||||
if (result?.[0] === undefined) return null;
|
||||
|
||||
return versionRaw
|
||||
? mergeVersionsRaw(result[0], saves)
|
||||
: mergeVersionsRecursive(result[0], saves, collection, gql.schema);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (args['id']) {
|
||||
return result?.[0] || null;
|
||||
}
|
||||
if (args['id']) return result;
|
||||
|
||||
if (query.group) {
|
||||
// for every entry in result add a group field based on query.group;
|
||||
|
||||
@@ -33,6 +33,7 @@ import { shouldClearCache } from '../utils/should-clear-cache.js';
|
||||
import { transaction } from '../utils/transaction.js';
|
||||
import { validateKeys } from '../utils/validate-keys.js';
|
||||
import { validateUserCountIntegrity } from '../utils/validate-user-count-integrity.js';
|
||||
import { handleVersion } from '../utils/versioning/handle-version.js';
|
||||
import { PayloadService } from './payload.js';
|
||||
|
||||
const env = useEnv();
|
||||
@@ -129,9 +130,6 @@ export class ItemsService<Item extends AnyItem = AnyItem, Collection extends str
|
||||
opts.mutationTracker.trackMutations(1);
|
||||
}
|
||||
|
||||
const { ActivityService } = await import('./activity.js');
|
||||
const { RevisionsService } = await import('./revisions.js');
|
||||
|
||||
const primaryKeyField = this.schema.collections[this.collection]!.primary;
|
||||
const fields = Object.keys(this.schema.collections[this.collection]!.fields);
|
||||
|
||||
@@ -199,6 +197,7 @@ export class ItemsService<Item extends AnyItem = AnyItem, Collection extends str
|
||||
knex: trx,
|
||||
schema: this.schema,
|
||||
nested: this.nested,
|
||||
overwriteDefaults: opts.overwriteDefaults,
|
||||
});
|
||||
|
||||
const {
|
||||
@@ -312,7 +311,14 @@ export class ItemsService<Item extends AnyItem = AnyItem, Collection extends str
|
||||
}
|
||||
|
||||
// If this is an authenticated action, and accountability tracking is enabled, save activity row
|
||||
if (this.accountability && this.schema.collections[this.collection]!.accountability !== null) {
|
||||
if (
|
||||
opts.skipTracking !== true &&
|
||||
this.accountability &&
|
||||
this.schema.collections[this.collection]!.accountability !== null
|
||||
) {
|
||||
const { ActivityService } = await import('./activity.js');
|
||||
const { RevisionsService } = await import('./revisions.js');
|
||||
|
||||
const activityService = new ActivityService({
|
||||
knex: trx,
|
||||
schema: this.schema,
|
||||
@@ -362,6 +368,10 @@ export class ItemsService<Item extends AnyItem = AnyItem, Collection extends str
|
||||
await getHelpers(trx).sequence.resetAutoIncrementSequence(this.collection, primaryKeyField);
|
||||
}
|
||||
|
||||
if (opts.onItemCreate) {
|
||||
opts.onItemCreate(this.collection, primaryKey);
|
||||
}
|
||||
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
@@ -438,6 +448,7 @@ export class ItemsService<Item extends AnyItem = AnyItem, Collection extends str
|
||||
onRequireUserIntegrityCheck: (flags) => (userIntegrityCheckFlags |= flags),
|
||||
bypassEmitAction: (params) => nestedActionEvents.push(params),
|
||||
mutationTracker: opts.mutationTracker,
|
||||
overwriteDefaults: opts.overwriteDefaults?.[index],
|
||||
bypassAutoIncrementSequenceReset,
|
||||
});
|
||||
|
||||
@@ -570,7 +581,13 @@ export class ItemsService<Item extends AnyItem = AnyItem, Collection extends str
|
||||
const filterWithKey = assign({}, query.filter, { [primaryKeyField]: { _eq: key } });
|
||||
const queryWithKey = assign({}, query, { filter: filterWithKey });
|
||||
|
||||
const results = await this.readByQuery(queryWithKey, opts);
|
||||
let results: Item[] = [];
|
||||
|
||||
if (query.version) {
|
||||
results = [await handleVersion(this, key, queryWithKey, opts)];
|
||||
} else {
|
||||
results = await this.readByQuery(queryWithKey, opts);
|
||||
}
|
||||
|
||||
if (results.length === 0) {
|
||||
throw new ForbiddenError();
|
||||
@@ -644,13 +661,15 @@ export class ItemsService<Item extends AnyItem = AnyItem, Collection extends str
|
||||
|
||||
let userIntegrityCheckFlags = opts.userIntegrityCheckFlags ?? UserIntegrityCheckFlag.None;
|
||||
|
||||
for (const item of data) {
|
||||
for (const index in data) {
|
||||
const item = data[index]!;
|
||||
const primaryKey = item[primaryKeyField];
|
||||
if (!primaryKey) throw new InvalidPayloadError({ reason: `Item in update misses primary key` });
|
||||
|
||||
const combinedOpts: MutationOptions = {
|
||||
autoPurgeCache: false,
|
||||
...opts,
|
||||
overwriteDefaults: opts.overwriteDefaults?.[index],
|
||||
onRequireUserIntegrityCheck: (flags) => (userIntegrityCheckFlags |= flags),
|
||||
};
|
||||
|
||||
@@ -684,9 +703,6 @@ export class ItemsService<Item extends AnyItem = AnyItem, Collection extends str
|
||||
opts.mutationTracker.trackMutations(keys.length);
|
||||
}
|
||||
|
||||
const { ActivityService } = await import('./activity.js');
|
||||
const { RevisionsService } = await import('./revisions.js');
|
||||
|
||||
const primaryKeyField = this.schema.collections[this.collection]!.primary;
|
||||
validateKeys(this.schema, this.collection, primaryKeyField, keys);
|
||||
|
||||
@@ -765,6 +781,7 @@ export class ItemsService<Item extends AnyItem = AnyItem, Collection extends str
|
||||
knex: trx,
|
||||
schema: this.schema,
|
||||
nested: this.nested,
|
||||
overwriteDefaults: opts.overwriteDefaults,
|
||||
});
|
||||
|
||||
const {
|
||||
@@ -824,7 +841,14 @@ export class ItemsService<Item extends AnyItem = AnyItem, Collection extends str
|
||||
}
|
||||
|
||||
// If this is an authenticated action, and accountability tracking is enabled, save activity row
|
||||
if (this.accountability && this.schema.collections[this.collection]!.accountability !== null) {
|
||||
if (
|
||||
opts.skipTracking !== true &&
|
||||
this.accountability &&
|
||||
this.schema.collections[this.collection]!.accountability !== null
|
||||
) {
|
||||
const { ActivityService } = await import('./activity.js');
|
||||
const { RevisionsService } = await import('./revisions.js');
|
||||
|
||||
const activityService = new ActivityService({
|
||||
knex: trx,
|
||||
schema: this.schema,
|
||||
@@ -974,8 +998,15 @@ export class ItemsService<Item extends AnyItem = AnyItem, Collection extends str
|
||||
|
||||
const primaryKeys: PrimaryKey[] = [];
|
||||
|
||||
for (const payload of payloads) {
|
||||
const primaryKey = await service.upsertOne(payload, { ...(opts || {}), autoPurgeCache: false });
|
||||
for (const index in payloads) {
|
||||
const payload = payloads[index]!;
|
||||
|
||||
const primaryKey = await service.upsertOne(payload, {
|
||||
...(opts || {}),
|
||||
overwriteDefaults: opts.overwriteDefaults?.[index],
|
||||
autoPurgeCache: false,
|
||||
});
|
||||
|
||||
primaryKeys.push(primaryKey);
|
||||
}
|
||||
|
||||
@@ -1026,8 +1057,6 @@ export class ItemsService<Item extends AnyItem = AnyItem, Collection extends str
|
||||
opts.mutationTracker.trackMutations(keys.length);
|
||||
}
|
||||
|
||||
const { ActivityService } = await import('./activity.js');
|
||||
|
||||
const primaryKeyField = this.schema.collections[this.collection]!.primary;
|
||||
validateKeys(this.schema, this.collection, primaryKeyField, keys);
|
||||
|
||||
@@ -1076,7 +1105,13 @@ export class ItemsService<Item extends AnyItem = AnyItem, Collection extends str
|
||||
}
|
||||
}
|
||||
|
||||
if (this.accountability && this.schema.collections[this.collection]!.accountability !== null) {
|
||||
if (
|
||||
opts.skipTracking !== true &&
|
||||
this.accountability &&
|
||||
this.schema.collections[this.collection]!.accountability !== null
|
||||
) {
|
||||
const { ActivityService } = await import('./activity.js');
|
||||
|
||||
const activityService = new ActivityService({
|
||||
knex: trx,
|
||||
schema: this.schema,
|
||||
|
||||
@@ -5,6 +5,7 @@ import type {
|
||||
ActionEventParams,
|
||||
Aggregate,
|
||||
Alterations,
|
||||
DefaultOverwrite,
|
||||
FieldOverview,
|
||||
Item,
|
||||
MutationOptions,
|
||||
@@ -36,6 +37,7 @@ type Transformers = {
|
||||
accountability: Accountability | null;
|
||||
specials: string[];
|
||||
helpers: Helpers;
|
||||
overwriteDefaults: DefaultOverwrite | undefined;
|
||||
}) => Promise<any>;
|
||||
};
|
||||
|
||||
@@ -50,14 +52,19 @@ export class PayloadService {
|
||||
collection: string;
|
||||
schema: SchemaOverview;
|
||||
nested: string[];
|
||||
overwriteDefaults: DefaultOverwrite | undefined;
|
||||
|
||||
constructor(collection: string, options: AbstractServiceOptions) {
|
||||
constructor(
|
||||
collection: string,
|
||||
options: AbstractServiceOptions & { overwriteDefaults?: DefaultOverwrite | undefined },
|
||||
) {
|
||||
this.accountability = options.accountability || null;
|
||||
this.knex = options.knex || getDatabase();
|
||||
this.helpers = getHelpers(this.knex);
|
||||
this.collection = collection;
|
||||
this.schema = options.schema;
|
||||
this.nested = options.nested ?? [];
|
||||
this.overwriteDefaults = options.overwriteDefaults;
|
||||
|
||||
return this;
|
||||
}
|
||||
@@ -109,12 +116,12 @@ export class PayloadService {
|
||||
if (action === 'read') return value ? '**********' : null;
|
||||
return value;
|
||||
},
|
||||
async 'user-created'({ action, value, accountability }) {
|
||||
if (action === 'create') return accountability?.user || null;
|
||||
async 'user-created'({ action, value, accountability, overwriteDefaults }) {
|
||||
if (action === 'create') return (overwriteDefaults ? overwriteDefaults._user : accountability?.user) ?? null;
|
||||
return value;
|
||||
},
|
||||
async 'user-updated'({ action, value, accountability }) {
|
||||
if (action === 'update') return accountability?.user || null;
|
||||
async 'user-updated'({ action, value, accountability, overwriteDefaults }) {
|
||||
if (action === 'update') return (overwriteDefaults ? overwriteDefaults._user : accountability?.user) ?? null;
|
||||
return value;
|
||||
},
|
||||
async 'role-created'({ action, value, accountability }) {
|
||||
@@ -125,12 +132,18 @@ export class PayloadService {
|
||||
if (action === 'update') return accountability?.role || null;
|
||||
return value;
|
||||
},
|
||||
async 'date-created'({ action, value, helpers }) {
|
||||
if (action === 'create') return new Date(helpers.date.writeTimestamp(new Date().toISOString()));
|
||||
async 'date-created'({ action, value, helpers, overwriteDefaults }) {
|
||||
if (action === 'create')
|
||||
return new Date(
|
||||
overwriteDefaults ? overwriteDefaults._date : helpers.date.writeTimestamp(new Date().toISOString()),
|
||||
);
|
||||
return value;
|
||||
},
|
||||
async 'date-updated'({ action, value, helpers }) {
|
||||
if (action === 'update') return new Date(helpers.date.writeTimestamp(new Date().toISOString()));
|
||||
async 'date-updated'({ action, value, helpers, overwriteDefaults }) {
|
||||
if (action === 'update')
|
||||
return new Date(
|
||||
overwriteDefaults ? overwriteDefaults._date : helpers.date.writeTimestamp(new Date().toISOString()),
|
||||
);
|
||||
return value;
|
||||
},
|
||||
async 'cast-csv'({ action, value }) {
|
||||
@@ -281,6 +294,7 @@ export class PayloadService {
|
||||
accountability,
|
||||
specials: fieldSpecials,
|
||||
helpers: this.helpers,
|
||||
overwriteDefaults: this.overwriteDefaults,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -536,6 +550,11 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
autoPurgeCache: opts?.autoPurgeCache,
|
||||
autoPurgeSystemCache: opts?.autoPurgeSystemCache,
|
||||
skipTracking: opts?.skipTracking,
|
||||
overwriteDefaults: opts?.overwriteDefaults?.[relation.field],
|
||||
onItemCreate: opts?.onItemCreate,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
});
|
||||
}
|
||||
@@ -546,6 +565,11 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
autoPurgeCache: opts?.autoPurgeCache,
|
||||
autoPurgeSystemCache: opts?.autoPurgeSystemCache,
|
||||
skipTracking: opts?.skipTracking,
|
||||
overwriteDefaults: opts?.overwriteDefaults?.[relation.field],
|
||||
onItemCreate: opts?.onItemCreate,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
});
|
||||
}
|
||||
@@ -626,6 +650,11 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
autoPurgeCache: opts?.autoPurgeCache,
|
||||
autoPurgeSystemCache: opts?.autoPurgeSystemCache,
|
||||
skipTracking: opts?.skipTracking,
|
||||
overwriteDefaults: opts?.overwriteDefaults?.[relation.field],
|
||||
onItemCreate: opts?.onItemCreate,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
});
|
||||
}
|
||||
@@ -636,6 +665,11 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
autoPurgeCache: opts?.autoPurgeCache,
|
||||
autoPurgeSystemCache: opts?.autoPurgeSystemCache,
|
||||
skipTracking: opts?.skipTracking,
|
||||
overwriteDefaults: opts?.overwriteDefaults?.[relation.field],
|
||||
onItemCreate: opts?.onItemCreate,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
});
|
||||
}
|
||||
@@ -759,6 +793,11 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
autoPurgeCache: opts?.autoPurgeCache,
|
||||
autoPurgeSystemCache: opts?.autoPurgeSystemCache,
|
||||
skipTracking: opts?.skipTracking,
|
||||
overwriteDefaults: opts?.overwriteDefaults?.[relation.meta!.one_field!],
|
||||
onItemCreate: opts?.onItemCreate,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
})),
|
||||
);
|
||||
@@ -789,6 +828,11 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
autoPurgeCache: opts?.autoPurgeCache,
|
||||
autoPurgeSystemCache: opts?.autoPurgeSystemCache,
|
||||
skipTracking: opts?.skipTracking,
|
||||
overwriteDefaults: opts?.overwriteDefaults?.[relation.meta!.one_field!],
|
||||
onItemCreate: opts?.onItemCreate,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
});
|
||||
} else {
|
||||
@@ -801,6 +845,11 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
autoPurgeCache: opts?.autoPurgeCache,
|
||||
autoPurgeSystemCache: opts?.autoPurgeSystemCache,
|
||||
skipTracking: opts?.skipTracking,
|
||||
overwriteDefaults: opts?.overwriteDefaults?.[relation.meta!.one_field!],
|
||||
onItemCreate: opts?.onItemCreate,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
},
|
||||
);
|
||||
@@ -851,13 +900,18 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
autoPurgeCache: opts?.autoPurgeCache,
|
||||
autoPurgeSystemCache: opts?.autoPurgeSystemCache,
|
||||
skipTracking: opts?.skipTracking,
|
||||
overwriteDefaults: opts?.overwriteDefaults?.[relation.meta!.one_field!]?.['create'],
|
||||
onItemCreate: opts?.onItemCreate,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
});
|
||||
}
|
||||
|
||||
if (alterations.update) {
|
||||
for (const item of alterations.update) {
|
||||
const { [relatedPrimaryKeyField]: key, ...record } = item;
|
||||
for (const index in alterations.update) {
|
||||
const { [relatedPrimaryKeyField]: key, ...record } = alterations.update[index]!;
|
||||
|
||||
const existingRecord = await this.knex
|
||||
.select(relatedPrimaryKeyField, relation.field)
|
||||
@@ -875,6 +929,11 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
autoPurgeCache: opts?.autoPurgeCache,
|
||||
autoPurgeSystemCache: opts?.autoPurgeSystemCache,
|
||||
skipTracking: opts?.skipTracking,
|
||||
overwriteDefaults: opts?.overwriteDefaults?.[relation.meta!.one_field!]?.['update'][index],
|
||||
onItemCreate: opts?.onItemCreate,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
});
|
||||
}
|
||||
@@ -905,6 +964,11 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
autoPurgeCache: opts?.autoPurgeCache,
|
||||
autoPurgeSystemCache: opts?.autoPurgeSystemCache,
|
||||
skipTracking: opts?.skipTracking,
|
||||
overwriteDefaults: opts?.overwriteDefaults?.[relation.meta!.one_field!]?.['delete'],
|
||||
onItemCreate: opts?.onItemCreate,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
});
|
||||
} else {
|
||||
@@ -917,6 +981,11 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
autoPurgeCache: opts?.autoPurgeCache,
|
||||
autoPurgeSystemCache: opts?.autoPurgeSystemCache,
|
||||
skipTracking: opts?.skipTracking,
|
||||
overwriteDefaults: opts?.overwriteDefaults?.[relation.meta!.one_field!]?.['delete'],
|
||||
onItemCreate: opts?.onItemCreate,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
},
|
||||
);
|
||||
@@ -932,8 +1001,8 @@ export class PayloadService {
|
||||
* Transforms the input partial payload to match the output structure, to have consistency
|
||||
* between delta and data
|
||||
*/
|
||||
async prepareDelta(data: Partial<Item>): Promise<string | null> {
|
||||
let payload = cloneDeep(data);
|
||||
async prepareDelta(delta: Partial<Item>): Promise<string | null> {
|
||||
let payload = cloneDeep(delta);
|
||||
|
||||
for (const key in payload) {
|
||||
if (payload[key]?.isRawInstance) {
|
||||
|
||||
@@ -62,6 +62,8 @@ export class ServerService {
|
||||
|
||||
info['project'] = projectInfo;
|
||||
|
||||
info['mcp_enabled'] = toBoolean(env['MCP_ENABLED'] ?? true);
|
||||
|
||||
if (this.accountability?.user) {
|
||||
if (env['RATE_LIMITER_ENABLED']) {
|
||||
info['rateLimit'] = {
|
||||
|
||||
@@ -4,6 +4,7 @@ import type { Knex } from 'knex';
|
||||
import { authenticator } from 'otplib';
|
||||
import getDatabase from '../database/index.js';
|
||||
import { ItemsService } from './items.js';
|
||||
import { DEFAULT_AUTH_PROVIDER } from '../constants.js';
|
||||
|
||||
export class TFAService {
|
||||
knex: Knex;
|
||||
@@ -28,33 +29,51 @@ export class TFAService {
|
||||
return authenticator.check(otp, user.tfa_secret);
|
||||
}
|
||||
|
||||
async generateTFA(key: PrimaryKey): Promise<Record<string, string>> {
|
||||
const user = await this.knex.select('email', 'tfa_secret').from('directus_users').where({ id: key }).first();
|
||||
async generateTFA(key: PrimaryKey, requiresPassword: boolean = true): Promise<Record<string, string>> {
|
||||
const user = await this.knex
|
||||
.select('email', 'tfa_secret', 'provider', 'external_identifier')
|
||||
.from('directus_users')
|
||||
.where({ id: key })
|
||||
.first();
|
||||
|
||||
if (user?.tfa_secret !== null) {
|
||||
throw new InvalidPayloadError({ reason: 'TFA Secret is already set for this user' });
|
||||
}
|
||||
|
||||
if (!user?.email) {
|
||||
// Only require email for non-OAuth users
|
||||
if (user?.provider === DEFAULT_AUTH_PROVIDER && !user?.email) {
|
||||
throw new InvalidPayloadError({ reason: 'User must have a valid email to enable TFA' });
|
||||
}
|
||||
|
||||
if (!requiresPassword && user?.provider === DEFAULT_AUTH_PROVIDER) {
|
||||
throw new InvalidPayloadError({ reason: 'This method is only available for OAuth users' });
|
||||
}
|
||||
|
||||
const secret = authenticator.generateSecret();
|
||||
const project = await this.knex.select('project_name').from('directus_settings').limit(1).first();
|
||||
|
||||
// For OAuth users without email, use external_identifier as fallback
|
||||
const accountName = user.email || user.external_identifier || `user_${key}`;
|
||||
|
||||
return {
|
||||
secret,
|
||||
url: authenticator.keyuri(user.email, project?.project_name || 'Directus', secret),
|
||||
url: authenticator.keyuri(accountName, project?.project_name || 'Directus', secret),
|
||||
};
|
||||
}
|
||||
|
||||
async enableTFA(key: PrimaryKey, otp: string, secret: string): Promise<void> {
|
||||
const user = await this.knex.select('tfa_secret').from('directus_users').where({ id: key }).first();
|
||||
const user = await this.knex.select('tfa_secret', 'provider').from('directus_users').where({ id: key }).first();
|
||||
|
||||
const requiresPassword = user?.['provider'] === DEFAULT_AUTH_PROVIDER;
|
||||
|
||||
if (user?.tfa_secret !== null) {
|
||||
throw new InvalidPayloadError({ reason: 'TFA Secret is already set for this user' });
|
||||
}
|
||||
|
||||
if (!requiresPassword && user?.provider === DEFAULT_AUTH_PROVIDER) {
|
||||
throw new InvalidPayloadError({ reason: 'This method is only available for OAuth users' });
|
||||
}
|
||||
|
||||
if (!authenticator.check(otp, secret)) {
|
||||
throw new InvalidPayloadError({ reason: `"otp" is invalid` });
|
||||
}
|
||||
|
||||
@@ -3,25 +3,28 @@ import { ForbiddenError, InvalidPayloadError, UnprocessableContentError } from '
|
||||
import type {
|
||||
AbstractServiceOptions,
|
||||
ContentVersion,
|
||||
Filter,
|
||||
Item,
|
||||
MutationOptions,
|
||||
PrimaryKey,
|
||||
Query,
|
||||
QueryOptions,
|
||||
} from '@directus/types';
|
||||
import Joi from 'joi';
|
||||
import { assign, pick } from 'lodash-es';
|
||||
import { assign, get, isEqual, isPlainObject, pick } from 'lodash-es';
|
||||
import objectHash from 'object-hash';
|
||||
import { getCache } from '../cache.js';
|
||||
import { getHelpers } from '../database/helpers/index.js';
|
||||
import emitter from '../emitter.js';
|
||||
import { validateAccess } from '../permissions/modules/validate-access/validate-access.js';
|
||||
import { shouldClearCache } from '../utils/should-clear-cache.js';
|
||||
import { splitRecursive } from '../utils/versioning/split-recursive.js';
|
||||
import { ActivityService } from './activity.js';
|
||||
import { ItemsService } from './items.js';
|
||||
import { PayloadService } from './payload.js';
|
||||
import { RevisionsService } from './revisions.js';
|
||||
import { deepMapWithSchema } from '../utils/versioning/deep-map-with-schema.js';
|
||||
|
||||
export class VersionsService extends ItemsService {
|
||||
export class VersionsService extends ItemsService<ContentVersion> {
|
||||
constructor(options: AbstractServiceOptions) {
|
||||
super('directus_versions', options);
|
||||
}
|
||||
@@ -79,12 +82,12 @@ export class VersionsService extends ItemsService {
|
||||
schema: this.schema,
|
||||
});
|
||||
|
||||
const existingVersions = await sudoService.readByQuery({
|
||||
const existingVersions = (await sudoService.readByQuery({
|
||||
aggregate: { count: ['*'] },
|
||||
filter: { key: { _eq: data['key'] }, collection: { _eq: data['collection'] }, item: { _eq: data['item'] } },
|
||||
});
|
||||
})) as any[];
|
||||
|
||||
if (existingVersions[0]!['count'] > 0) {
|
||||
if (existingVersions[0]['count'] > 0) {
|
||||
throw new UnprocessableContentError({
|
||||
reason: `Version "${data['key']}" already exists for item "${data['item']}" in collection "${data['collection']}"`,
|
||||
});
|
||||
@@ -113,25 +116,20 @@ export class VersionsService extends ItemsService {
|
||||
return { outdated: hash !== mainHash, mainHash };
|
||||
}
|
||||
|
||||
async getVersionSaves(key: string, collection: string, item: string | undefined): Promise<Partial<Item>[] | null> {
|
||||
const filter: Filter = {
|
||||
key: { _eq: key },
|
||||
collection: { _eq: collection },
|
||||
};
|
||||
async getVersionSave(key: string, collection: string, item: string, mapDelta = true) {
|
||||
const version = (
|
||||
await this.readByQuery({
|
||||
filter: {
|
||||
key: { _eq: key },
|
||||
collection: { _eq: collection },
|
||||
item: { _eq: item },
|
||||
},
|
||||
})
|
||||
)[0];
|
||||
|
||||
if (item) {
|
||||
filter['item'] = { _eq: item };
|
||||
}
|
||||
if (mapDelta && version?.delta) version.delta = this.mapDelta(version);
|
||||
|
||||
const versions = await this.readByQuery({ filter });
|
||||
|
||||
if (!versions?.[0]) return null;
|
||||
|
||||
if (versions[0]['delta']) {
|
||||
return [versions[0]['delta']];
|
||||
}
|
||||
|
||||
return null;
|
||||
return version;
|
||||
}
|
||||
|
||||
override async createOne(data: Partial<Item>, opts?: MutationOptions): Promise<PrimaryKey> {
|
||||
@@ -144,6 +142,14 @@ export class VersionsService extends ItemsService {
|
||||
return super.createOne(data, opts);
|
||||
}
|
||||
|
||||
override async readOne(key: PrimaryKey, query: Query = {}, opts?: QueryOptions): Promise<ContentVersion> {
|
||||
const version = await super.readOne(key, query, opts);
|
||||
|
||||
if (version?.delta) version.delta = this.mapDelta(version);
|
||||
|
||||
return version;
|
||||
}
|
||||
|
||||
override async createMany(data: Partial<Item>[], opts?: MutationOptions): Promise<PrimaryKey[]> {
|
||||
if (!Array.isArray(data)) {
|
||||
throw new InvalidPayloadError({ reason: 'Input should be an array of items' });
|
||||
@@ -200,7 +206,7 @@ export class VersionsService extends ItemsService {
|
||||
filter: { id: { _neq: pk }, key: { _eq: data['key'] }, collection: { _eq: collection }, item: { _eq: item } },
|
||||
});
|
||||
|
||||
if (existingVersions[0]!['count'] > 0) {
|
||||
if ((existingVersions as any)[0]['count'] > 0) {
|
||||
throw new UnprocessableContentError({
|
||||
reason: `Version "${data['key']}" already exists for item "${item}" in collection "${collection}"`,
|
||||
});
|
||||
@@ -211,7 +217,7 @@ export class VersionsService extends ItemsService {
|
||||
return super.updateMany(keys, data, opts);
|
||||
}
|
||||
|
||||
async save(key: PrimaryKey, data: Partial<Item>): Promise<Partial<Item>> {
|
||||
async save(key: PrimaryKey, delta: Partial<Item>): Promise<Partial<Item>> {
|
||||
const version = await super.readOne(key);
|
||||
|
||||
const payloadService = new PayloadService(this.collection, {
|
||||
@@ -230,7 +236,7 @@ export class VersionsService extends ItemsService {
|
||||
schema: this.schema,
|
||||
});
|
||||
|
||||
const { item, collection } = version;
|
||||
const { item, collection, delta: existingDelta } = version;
|
||||
|
||||
const activity = await activityService.createOne({
|
||||
action: Action.VERSION_SAVE,
|
||||
@@ -242,7 +248,9 @@ export class VersionsService extends ItemsService {
|
||||
item,
|
||||
});
|
||||
|
||||
const revisionDelta = await payloadService.prepareDelta(data);
|
||||
const helpers = getHelpers(this.knex);
|
||||
|
||||
let revisionDelta = await payloadService.prepareDelta(delta);
|
||||
|
||||
await revisionsService.createOne({
|
||||
activity,
|
||||
@@ -253,11 +261,28 @@ export class VersionsService extends ItemsService {
|
||||
delta: revisionDelta,
|
||||
});
|
||||
|
||||
const finalVersionDelta = assign({}, version['delta'], revisionDelta ? JSON.parse(revisionDelta) : null);
|
||||
revisionDelta = revisionDelta ? JSON.parse(revisionDelta) : null;
|
||||
|
||||
const date = new Date(helpers.date.writeTimestamp(new Date().toISOString()));
|
||||
|
||||
deepMapObjects(revisionDelta, (object, path) => {
|
||||
const existing = get(existingDelta, path);
|
||||
|
||||
if (existing && isEqual(existing, object)) return;
|
||||
|
||||
object['_user'] = this.accountability?.user;
|
||||
object['_date'] = date;
|
||||
});
|
||||
|
||||
const finalVersionDelta = assign({}, existingDelta, revisionDelta);
|
||||
|
||||
const sudoService = new ItemsService(this.collection, {
|
||||
knex: this.knex,
|
||||
schema: this.schema,
|
||||
accountability: {
|
||||
...this.accountability!,
|
||||
admin: true,
|
||||
},
|
||||
});
|
||||
|
||||
await sudoService.updateOne(key, { delta: finalVersionDelta });
|
||||
@@ -272,7 +297,7 @@ export class VersionsService extends ItemsService {
|
||||
}
|
||||
|
||||
async promote(version: PrimaryKey, mainHash: string, fields?: string[]) {
|
||||
const { collection, item, delta } = (await this.readOne(version)) as ContentVersion;
|
||||
const { collection, item, delta } = (await super.readOne(version)) as ContentVersion;
|
||||
|
||||
// will throw an error if the accountability does not have permission to update the item
|
||||
if (this.accountability) {
|
||||
@@ -304,7 +329,9 @@ export class VersionsService extends ItemsService {
|
||||
});
|
||||
}
|
||||
|
||||
const payloadToUpdate = fields ? pick(delta, fields) : delta;
|
||||
const { rawDelta, defaultOverwrites } = splitRecursive(delta);
|
||||
|
||||
const payloadToUpdate = fields ? pick(rawDelta, fields) : rawDelta;
|
||||
|
||||
const itemsService = new ItemsService(collection, {
|
||||
accountability: this.accountability,
|
||||
@@ -327,7 +354,9 @@ export class VersionsService extends ItemsService {
|
||||
},
|
||||
);
|
||||
|
||||
const updatedItemKey = await itemsService.updateOne(item, payloadAfterHooks);
|
||||
const updatedItemKey = await itemsService.updateOne(item, payloadAfterHooks, {
|
||||
overwriteDefaults: defaultOverwrites as any,
|
||||
});
|
||||
|
||||
emitter.emitAction(
|
||||
['items.promote', `${collection}.items.promote`],
|
||||
@@ -346,4 +375,54 @@ export class VersionsService extends ItemsService {
|
||||
|
||||
return updatedItemKey;
|
||||
}
|
||||
|
||||
private mapDelta(version: ContentVersion) {
|
||||
const delta = version.delta ?? {};
|
||||
delta[this.schema.collections[version.collection]!.primary] = version.item;
|
||||
|
||||
return deepMapWithSchema(
|
||||
delta,
|
||||
([key, value], context) => {
|
||||
if (key === '_user' || key === '_date') return;
|
||||
|
||||
if (context.collection.primary in context.object) {
|
||||
if (context.field.special.includes('user-updated')) {
|
||||
return [key, context.object['_user']];
|
||||
}
|
||||
|
||||
if (context.field.special.includes('date-updated')) {
|
||||
return [key, context.object['_date']];
|
||||
}
|
||||
} else {
|
||||
if (context.field.special.includes('user-created')) {
|
||||
return [key, context.object['_user']];
|
||||
}
|
||||
|
||||
if (context.field.special.includes('date-created')) {
|
||||
return [key, context.object['_date']];
|
||||
}
|
||||
}
|
||||
|
||||
if (key in context.object) return [key, value];
|
||||
|
||||
return undefined;
|
||||
},
|
||||
{ collection: version.collection, schema: this.schema },
|
||||
{ mapNonExistentFields: true, detailedUpdateSyntax: true },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/** Deeply maps all objects of a structure. Only calls the callback for objects, not for arrays. Objects in arrays will continued to be mapped. */
|
||||
function deepMapObjects(
|
||||
object: unknown,
|
||||
fn: (object: Record<string, any>, path: string[]) => void,
|
||||
path: string[] = [],
|
||||
) {
|
||||
if (isPlainObject(object) && typeof object === 'object' && object !== null) {
|
||||
fn(object, path);
|
||||
Object.entries(object).map(([key, value]) => deepMapObjects(value, fn, [...path, key]));
|
||||
} else if (Array.isArray(object)) {
|
||||
object.map((value, index) => deepMapObjects(value, fn, [...path, String(index)]));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,10 +32,11 @@ export interface Session {
|
||||
export type DirectusTokenPayload = {
|
||||
id?: string;
|
||||
role: string | null;
|
||||
session?: string;
|
||||
app_access: boolean | number;
|
||||
admin_access: boolean | number;
|
||||
share?: string;
|
||||
session?: string;
|
||||
enforce_tfa?: boolean;
|
||||
};
|
||||
|
||||
export type ShareData = {
|
||||
|
||||
505
api/src/utils/deep-map-response.test.ts
Normal file
505
api/src/utils/deep-map-response.test.ts
Normal file
@@ -0,0 +1,505 @@
|
||||
import { expect, test } from 'vitest';
|
||||
import { deepMapResponse } from './deep-map-response.js';
|
||||
import { SchemaBuilder } from '@directus/schema-builder';
|
||||
import { getRelation } from '@directus/utils';
|
||||
|
||||
const schema = new SchemaBuilder()
|
||||
.collection('articles', (c) => {
|
||||
c.field('id').id();
|
||||
c.field('title').string();
|
||||
c.field('date').date();
|
||||
c.field('author').m2o('users');
|
||||
c.field('tags').m2m('tags');
|
||||
c.field('links').o2m('links', 'article_id');
|
||||
c.field('sections').m2a(['sec_num', 'sec_text']);
|
||||
})
|
||||
.collection('users', (c) => {
|
||||
c.field('id').id();
|
||||
c.field('name').string();
|
||||
})
|
||||
.collection('tags', (c) => {
|
||||
c.field('id').id();
|
||||
c.field('tag').string();
|
||||
})
|
||||
.collection('links', (c) => {
|
||||
c.field('id').id();
|
||||
c.field('name').string();
|
||||
})
|
||||
.collection('sec_num', (c) => {
|
||||
c.field('id').id();
|
||||
c.field('num').integer();
|
||||
})
|
||||
.collection('sec_text', (c) => {
|
||||
c.field('id').id();
|
||||
c.field('text').text();
|
||||
})
|
||||
.build();
|
||||
|
||||
test('map flat object', () => {
|
||||
const result = deepMapResponse(
|
||||
{
|
||||
id: 1,
|
||||
title: 2,
|
||||
author: 3,
|
||||
tags: [1, 2, 3],
|
||||
},
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
id: {
|
||||
value: 1,
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['id'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
},
|
||||
},
|
||||
title: {
|
||||
value: 2,
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['title'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
},
|
||||
},
|
||||
author: {
|
||||
value: 3,
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['author'],
|
||||
relation: getRelation(schema.relations, 'articles', 'author'),
|
||||
leaf: true,
|
||||
relationType: 'm2o',
|
||||
},
|
||||
},
|
||||
tags: {
|
||||
value: [1, 2, 3],
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['tags'],
|
||||
relation: getRelation(schema.relations, 'articles', 'tags'),
|
||||
leaf: true,
|
||||
relationType: 'o2m',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('map m2o object', () => {
|
||||
const result = deepMapResponse(
|
||||
{
|
||||
author: {
|
||||
id: 1,
|
||||
name: 'hello',
|
||||
},
|
||||
},
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
author: {
|
||||
value: {
|
||||
id: {
|
||||
value: 1,
|
||||
context: {
|
||||
collection: schema.collections['users'],
|
||||
field: schema.collections['users']!.fields['id'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
},
|
||||
},
|
||||
name: {
|
||||
value: 'hello',
|
||||
context: {
|
||||
collection: schema.collections['users'],
|
||||
field: schema.collections['users']!.fields['name'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
},
|
||||
},
|
||||
},
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['author'],
|
||||
relation: getRelation(schema.relations, 'articles', 'author'),
|
||||
leaf: false,
|
||||
relationType: 'm2o',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('map o2m object', () => {
|
||||
const result = deepMapResponse(
|
||||
{
|
||||
links: [
|
||||
{
|
||||
id: 1,
|
||||
},
|
||||
{
|
||||
name: 'hello',
|
||||
},
|
||||
],
|
||||
},
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
links: {
|
||||
value: [
|
||||
{
|
||||
id: {
|
||||
value: 1,
|
||||
context: {
|
||||
collection: schema.collections['links'],
|
||||
field: schema.collections['links']!.fields['id'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: {
|
||||
value: 'hello',
|
||||
context: {
|
||||
collection: schema.collections['links'],
|
||||
field: schema.collections['links']!.fields['name'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['links'],
|
||||
relation: getRelation(schema.relations, 'articles', 'links'),
|
||||
leaf: false,
|
||||
relationType: 'o2m',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('map m2m object', () => {
|
||||
const result = deepMapResponse(
|
||||
{
|
||||
tags: [
|
||||
{
|
||||
id: 1,
|
||||
articles_id: 2,
|
||||
tags_id: {
|
||||
tag: 'myTag',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
tags: {
|
||||
value: [
|
||||
{
|
||||
id: {
|
||||
value: 1,
|
||||
context: {
|
||||
collection: schema.collections['articles_tags_junction'],
|
||||
field: schema.collections['articles_tags_junction']!.fields['id'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
},
|
||||
},
|
||||
articles_id: {
|
||||
value: 2,
|
||||
context: {
|
||||
collection: schema.collections['articles_tags_junction'],
|
||||
field: schema.collections['articles_tags_junction']!.fields['articles_id'],
|
||||
relation: getRelation(schema.relations, 'articles_tags_junction', 'articles_id'),
|
||||
leaf: true,
|
||||
relationType: 'm2o',
|
||||
},
|
||||
},
|
||||
tags_id: {
|
||||
value: {
|
||||
tag: {
|
||||
value: 'myTag',
|
||||
context: {
|
||||
collection: schema.collections['tags'],
|
||||
field: schema.collections['tags']!.fields['tag'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
},
|
||||
},
|
||||
},
|
||||
context: {
|
||||
collection: schema.collections['articles_tags_junction'],
|
||||
field: schema.collections['articles_tags_junction']!.fields['tags_id'],
|
||||
relation: getRelation(schema.relations, 'articles_tags_junction', 'tags_id'),
|
||||
leaf: false,
|
||||
relationType: 'm2o',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['tags'],
|
||||
relation: getRelation(schema.relations, 'articles', 'tags'),
|
||||
leaf: false,
|
||||
relationType: 'o2m',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('map m2a object', () => {
|
||||
const result = deepMapResponse(
|
||||
{
|
||||
sections: [
|
||||
{
|
||||
collection: 'sec_num',
|
||||
item: {
|
||||
num: 123,
|
||||
},
|
||||
},
|
||||
{
|
||||
collection: 'sec_text',
|
||||
item: {
|
||||
text: 'abc',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
sections: {
|
||||
value: [
|
||||
{
|
||||
collection: {
|
||||
value: 'sec_num',
|
||||
context: {
|
||||
collection: schema.collections['articles_builder'],
|
||||
field: schema.collections['articles_builder']!.fields['collection'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
},
|
||||
},
|
||||
item: {
|
||||
value: {
|
||||
num: {
|
||||
value: 123,
|
||||
context: {
|
||||
collection: schema.collections['sec_num'],
|
||||
field: schema.collections['sec_num']!.fields['num'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
},
|
||||
},
|
||||
},
|
||||
context: {
|
||||
collection: schema.collections['articles_builder'],
|
||||
field: schema.collections['articles_builder']!.fields['item'],
|
||||
relation: getRelation(schema.relations, 'articles_builder', 'item'),
|
||||
leaf: false,
|
||||
relationType: 'a2o',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
collection: {
|
||||
value: 'sec_text',
|
||||
context: {
|
||||
collection: schema.collections['articles_builder'],
|
||||
field: schema.collections['articles_builder']!.fields['collection'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
},
|
||||
},
|
||||
item: {
|
||||
value: {
|
||||
text: {
|
||||
value: 'abc',
|
||||
context: {
|
||||
collection: schema.collections['sec_text'],
|
||||
field: schema.collections['sec_text']!.fields['text'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
},
|
||||
},
|
||||
},
|
||||
context: {
|
||||
collection: schema.collections['articles_builder'],
|
||||
field: schema.collections['articles_builder']!.fields['item'],
|
||||
relation: getRelation(schema.relations, 'articles_builder', 'item'),
|
||||
leaf: false,
|
||||
relationType: 'a2o',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['sections'],
|
||||
relation: getRelation(schema.relations, 'articles', 'sections'),
|
||||
leaf: false,
|
||||
relationType: 'o2m',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('map flat invalid field', () => {
|
||||
const result = deepMapResponse(
|
||||
{
|
||||
invalid: 1,
|
||||
},
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
invalid: 1,
|
||||
});
|
||||
});
|
||||
|
||||
test('map with invalid object', () => {
|
||||
expect(() => {
|
||||
deepMapResponse(
|
||||
new Date(),
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
}).toThrowError();
|
||||
});
|
||||
|
||||
test('map flat date value', () => {
|
||||
const date = new Date();
|
||||
|
||||
const result = deepMapResponse(
|
||||
{ date },
|
||||
([key, value]) => {
|
||||
return [key, value];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
|
||||
expect(result).toEqual({ date });
|
||||
});
|
||||
|
||||
test('map flat invalid deep field', () => {
|
||||
const result = deepMapResponse(
|
||||
{
|
||||
author: {
|
||||
invalid: 1,
|
||||
},
|
||||
},
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
author: {
|
||||
value: {
|
||||
invalid: 1,
|
||||
},
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['author'],
|
||||
relation: getRelation(schema.relations, 'articles', 'author'),
|
||||
leaf: false,
|
||||
relationType: 'm2o',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('map flat invalid deep field', () => {
|
||||
const result = deepMapResponse(
|
||||
{
|
||||
author: {
|
||||
invalid: 1,
|
||||
},
|
||||
},
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
author: {
|
||||
value: {
|
||||
invalid: 1,
|
||||
},
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['author'],
|
||||
relation: getRelation(schema.relations, 'articles', 'author'),
|
||||
leaf: false,
|
||||
relationType: 'm2o',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('map m2a relation without collection field', () => {
|
||||
const callback = () =>
|
||||
deepMapResponse(
|
||||
{
|
||||
sections: [
|
||||
{
|
||||
item: {
|
||||
num: 123,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
|
||||
expect(callback).toThrowError(
|
||||
"When selecting 'articles_builder.item', the field 'articles_builder.collection' has to be selected when using versioning and m2a relations",
|
||||
);
|
||||
});
|
||||
98
api/src/utils/deep-map-response.ts
Normal file
98
api/src/utils/deep-map-response.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import type { CollectionOverview, FieldOverview, Relation, SchemaOverview } from '@directus/types';
|
||||
import { isPlainObject } from 'lodash-es';
|
||||
import assert from 'node:assert';
|
||||
import { getRelationInfo, type RelationInfo } from './get-relation-info.js';
|
||||
import { InvalidQueryError } from '@directus/errors';
|
||||
|
||||
/**
|
||||
* Allows to deep map the response from the ItemsService with collection, field and relation context for each entry.
|
||||
* Bottom to Top depth first mapping of values.
|
||||
*/
|
||||
export function deepMapResponse(
|
||||
object: Record<string, any>,
|
||||
callback: (
|
||||
entry: [key: string | number, value: unknown],
|
||||
context: {
|
||||
collection: CollectionOverview;
|
||||
field: FieldOverview;
|
||||
relation: Relation | null;
|
||||
leaf: boolean;
|
||||
relationType: RelationInfo['relationType'] | null;
|
||||
},
|
||||
) => [key: string | number, value: unknown],
|
||||
context: {
|
||||
schema: SchemaOverview;
|
||||
collection: string;
|
||||
relationInfo?: RelationInfo;
|
||||
},
|
||||
): any {
|
||||
const collection = context.schema.collections[context.collection];
|
||||
|
||||
assert(
|
||||
isPlainObject(object) && typeof object === 'object' && object !== null,
|
||||
`DeepMapResponse only works on objects, received ${JSON.stringify(object)}`,
|
||||
);
|
||||
|
||||
return Object.fromEntries(
|
||||
Object.entries(object).map(([key, value]) => {
|
||||
const field = collection?.fields[key];
|
||||
|
||||
if (!field) return [key, value];
|
||||
|
||||
const relationInfo = getRelationInfo(context.schema.relations, collection.collection, field.field);
|
||||
let leaf = true;
|
||||
|
||||
if (relationInfo.relation && typeof value === 'object' && value !== null && isPlainObject(object)) {
|
||||
switch (relationInfo.relationType) {
|
||||
case 'm2o':
|
||||
value = deepMapResponse(value, callback, {
|
||||
schema: context.schema,
|
||||
collection: relationInfo.relation.related_collection!,
|
||||
relationInfo,
|
||||
});
|
||||
|
||||
leaf = false;
|
||||
break;
|
||||
case 'o2m':
|
||||
value = (value as any[]).map((childValue) => {
|
||||
if (isPlainObject(childValue) && typeof childValue === 'object' && childValue !== null) {
|
||||
leaf = false;
|
||||
return deepMapResponse(childValue, callback, {
|
||||
schema: context.schema,
|
||||
collection: relationInfo!.relation!.collection,
|
||||
relationInfo,
|
||||
});
|
||||
} else return childValue;
|
||||
});
|
||||
|
||||
break;
|
||||
|
||||
case 'a2o': {
|
||||
const related_collection = object[relationInfo.relation.meta!.one_collection_field!];
|
||||
|
||||
if (!related_collection) {
|
||||
throw new InvalidQueryError({
|
||||
reason: `When selecting '${collection.collection}.${field.field}', the field '${
|
||||
collection.collection
|
||||
}.${
|
||||
relationInfo.relation.meta!.one_collection_field
|
||||
}' has to be selected when using versioning and m2a relations `,
|
||||
});
|
||||
}
|
||||
|
||||
value = deepMapResponse(value, callback, {
|
||||
schema: context.schema,
|
||||
collection: related_collection,
|
||||
relationInfo,
|
||||
});
|
||||
|
||||
leaf = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return callback([key, value], { collection, field, ...relationInfo, leaf });
|
||||
}),
|
||||
);
|
||||
}
|
||||
@@ -2,7 +2,7 @@ import type { Relation, RelationMeta } from '@directus/types';
|
||||
import { getRelation } from '@directus/utils';
|
||||
import { getRelationType } from './get-relation-type.js';
|
||||
|
||||
type RelationInfo = {
|
||||
export type RelationInfo = {
|
||||
relation: Relation | null;
|
||||
relationType: 'o2m' | 'm2o' | 'a2o' | 'o2a' | null;
|
||||
};
|
||||
|
||||
@@ -1,917 +0,0 @@
|
||||
import { SchemaBuilder } from '@directus/schema-builder';
|
||||
import { describe, expect, test } from 'vitest';
|
||||
import { mergeVersionsRaw, mergeVersionsRecursive } from './merge-version-data.js';
|
||||
|
||||
describe('content versioning mergeVersionsRaw', () => {
|
||||
test('No versions available', () => {
|
||||
const result = mergeVersionsRaw({ test_field: 'value' }, []);
|
||||
|
||||
expect(result).toMatchObject({ test_field: 'value' });
|
||||
});
|
||||
|
||||
test('Basic field versions', () => {
|
||||
const result = mergeVersionsRaw({ test_field: 'value', edited_field: 'original' }, [
|
||||
{ edited_field: 'updated' },
|
||||
{ test_field: null },
|
||||
]);
|
||||
|
||||
expect(result).toMatchObject({
|
||||
test_field: null,
|
||||
edited_field: 'updated',
|
||||
});
|
||||
});
|
||||
|
||||
test('Relational field versions', () => {
|
||||
const result = mergeVersionsRaw({ test_field: 'value', relation: null }, [
|
||||
{ relation: { create: [{ test: 'value ' }], update: [], delete: [] } },
|
||||
]);
|
||||
|
||||
expect(result).toMatchObject({
|
||||
test_field: 'value',
|
||||
relation: {
|
||||
create: [{ test: 'value ' }],
|
||||
update: [],
|
||||
delete: [],
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('content versioning mergeVersionsRecursive', () => {
|
||||
const schema = new SchemaBuilder()
|
||||
.collection('collection_a', (c) => {
|
||||
c.field('id').id();
|
||||
|
||||
c.field('status').string().options({
|
||||
defaultValue: 'draft',
|
||||
});
|
||||
|
||||
c.field('m2o').m2o('collection_b', 'o2m');
|
||||
c.field('m2o_c').m2o('collection_c');
|
||||
c.field('m2m').m2m('collection_c');
|
||||
c.field('m2a').m2a(['collection_b', 'collection_c']);
|
||||
})
|
||||
.collection('collection_b', (c) => {
|
||||
c.field('id').id();
|
||||
|
||||
c.field('status').string().options({
|
||||
defaultValue: 'draft',
|
||||
});
|
||||
})
|
||||
.collection('collection_c', (c) => {
|
||||
c.field('id').id();
|
||||
|
||||
c.field('status').string().options({
|
||||
defaultValue: 'draft',
|
||||
});
|
||||
|
||||
c.field('translations').translations();
|
||||
})
|
||||
.collection('collection_c_translations', (c) => {
|
||||
c.field('id').id();
|
||||
c.field('text').string();
|
||||
})
|
||||
.build();
|
||||
|
||||
test('No versions available', () => {
|
||||
const result = mergeVersionsRecursive({ status: 'draft' }, [], 'collection_a', schema);
|
||||
|
||||
expect(result).toMatchObject({ status: 'draft' });
|
||||
});
|
||||
|
||||
describe('m2o field', () => {
|
||||
test('Setting m2o value', () => {
|
||||
const result = mergeVersionsRecursive(
|
||||
{ id: 1, status: 'draft', m2o: null },
|
||||
[{ status: 'published' }, { m2o: 1 }],
|
||||
'collection_a',
|
||||
schema,
|
||||
);
|
||||
|
||||
expect(result).toMatchObject({ id: 1, status: 'published', m2o: 1 });
|
||||
});
|
||||
|
||||
test('Unsetting m2o value', () => {
|
||||
const result = mergeVersionsRecursive(
|
||||
{ id: 1, status: 'draft', m2o: { id: 1, status: 'draft' } },
|
||||
[{ status: 'published', m2o: null }],
|
||||
'collection_a',
|
||||
schema,
|
||||
);
|
||||
|
||||
expect(result).toMatchObject({ id: 1, status: 'published', m2o: null });
|
||||
});
|
||||
|
||||
test('Updating m2o value', () => {
|
||||
const result = mergeVersionsRecursive(
|
||||
{ id: 1, status: 'draft', m2o: { id: 1, test: 'data', status: 'draft' } },
|
||||
[{ status: 'published' }, { m2o: { id: 1, status: 'published' } }],
|
||||
'collection_a',
|
||||
schema,
|
||||
);
|
||||
|
||||
expect(result).toMatchObject({ id: 1, status: 'published', m2o: { id: 1, test: 'data', status: 'published' } });
|
||||
});
|
||||
});
|
||||
|
||||
describe('o2m field', () => {
|
||||
test('Setting o2m values', () => {
|
||||
const result = mergeVersionsRecursive(
|
||||
{ id: 2, status: 'draft', o2m: [] },
|
||||
[
|
||||
{
|
||||
o2m: {
|
||||
create: [{ status: 'draft' }],
|
||||
update: [
|
||||
{
|
||||
m2o: '2',
|
||||
id: 2,
|
||||
},
|
||||
{
|
||||
m2o: '2',
|
||||
id: 3,
|
||||
},
|
||||
],
|
||||
delete: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
'collection_b',
|
||||
schema,
|
||||
);
|
||||
|
||||
expect(result).toMatchObject({
|
||||
id: 2,
|
||||
status: 'draft',
|
||||
o2m: [{ m2o: '2', id: 2 }, { m2o: '2', id: 3 }, { status: 'draft' }],
|
||||
});
|
||||
});
|
||||
|
||||
test('Updating o2m values', () => {
|
||||
const result = mergeVersionsRecursive(
|
||||
{ id: 1, status: 'draft', o2m: [1, 2, 3, { id: 4, test: 'value' }, { id: 5 }] },
|
||||
[
|
||||
{
|
||||
status: 'published',
|
||||
},
|
||||
{
|
||||
o2m: {
|
||||
create: [
|
||||
{
|
||||
test: 'new',
|
||||
},
|
||||
],
|
||||
update: [
|
||||
{
|
||||
id: 1,
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
},
|
||||
],
|
||||
delete: [2, 5],
|
||||
},
|
||||
},
|
||||
],
|
||||
'collection_b',
|
||||
schema,
|
||||
);
|
||||
|
||||
expect(result).toMatchObject({
|
||||
id: 1,
|
||||
status: 'published',
|
||||
o2m: [
|
||||
{
|
||||
id: 1,
|
||||
},
|
||||
3,
|
||||
{
|
||||
id: 4,
|
||||
test: 'value',
|
||||
},
|
||||
{
|
||||
test: 'new',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('m2m field', () => {
|
||||
test('Adding related items', () => {
|
||||
const result = mergeVersionsRecursive(
|
||||
{
|
||||
id: 1,
|
||||
status: 'draft',
|
||||
m2m: [],
|
||||
},
|
||||
[
|
||||
{
|
||||
status: 'published',
|
||||
m2m: {
|
||||
create: [
|
||||
{
|
||||
collection_c_id: {
|
||||
status: 'published',
|
||||
},
|
||||
},
|
||||
{
|
||||
collection_a_id: '1',
|
||||
collection_c_id: {
|
||||
id: 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
update: [],
|
||||
delete: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
'collection_a',
|
||||
schema,
|
||||
);
|
||||
|
||||
expect(result).toMatchObject({
|
||||
id: 1,
|
||||
status: 'published',
|
||||
m2m: [
|
||||
{
|
||||
collection_c_id: {
|
||||
status: 'published',
|
||||
},
|
||||
},
|
||||
{
|
||||
collection_a_id: '1',
|
||||
collection_c_id: {
|
||||
id: 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
test('Updating m2m values', () => {
|
||||
const result = mergeVersionsRecursive(
|
||||
{
|
||||
id: 1,
|
||||
status: 'draft',
|
||||
m2m: [1, 2, 3, { id: 4 }, { id: 5 }],
|
||||
},
|
||||
[
|
||||
{
|
||||
status: 'published',
|
||||
},
|
||||
{
|
||||
m2m: {
|
||||
create: [
|
||||
{
|
||||
collection_c_id: {
|
||||
id: 3,
|
||||
},
|
||||
},
|
||||
],
|
||||
update: [
|
||||
{
|
||||
id: 1,
|
||||
collection_c_id: {
|
||||
id: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
collection_c_id: {
|
||||
id: 2,
|
||||
},
|
||||
},
|
||||
],
|
||||
delete: [2, 5],
|
||||
},
|
||||
},
|
||||
],
|
||||
'collection_a',
|
||||
schema,
|
||||
);
|
||||
|
||||
expect(result).toMatchObject({
|
||||
id: 1,
|
||||
status: 'published',
|
||||
m2m: [
|
||||
{
|
||||
collection_c_id: {
|
||||
id: 1,
|
||||
},
|
||||
id: 1,
|
||||
},
|
||||
3,
|
||||
{
|
||||
id: 4,
|
||||
collection_c_id: {
|
||||
id: 2,
|
||||
},
|
||||
},
|
||||
{
|
||||
collection_c_id: {
|
||||
id: 3,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('m2a field', () => {
|
||||
test('Adding related items', () => {
|
||||
const result = mergeVersionsRecursive(
|
||||
{
|
||||
id: 1,
|
||||
status: 'draft',
|
||||
m2a: [],
|
||||
},
|
||||
[
|
||||
{
|
||||
m2a: {
|
||||
create: [
|
||||
{
|
||||
collection_a_id: '1',
|
||||
collection: 'collection_b',
|
||||
item: {
|
||||
id: 2,
|
||||
},
|
||||
},
|
||||
{
|
||||
collection_a_id: '1',
|
||||
collection: 'collection_c',
|
||||
item: {
|
||||
id: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
collection: 'collection_b',
|
||||
item: {
|
||||
status: 'published',
|
||||
},
|
||||
},
|
||||
{
|
||||
collection: 'collection_c',
|
||||
item: {
|
||||
status: 'published',
|
||||
},
|
||||
},
|
||||
],
|
||||
update: [],
|
||||
delete: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
'collection_a',
|
||||
schema,
|
||||
);
|
||||
|
||||
expect(result).toMatchObject({
|
||||
id: 1,
|
||||
status: 'draft',
|
||||
m2a: [
|
||||
{
|
||||
collection_a_id: '1',
|
||||
collection: 'collection_b',
|
||||
item: {
|
||||
id: 2,
|
||||
},
|
||||
},
|
||||
{
|
||||
collection_a_id: '1',
|
||||
collection: 'collection_c',
|
||||
item: {
|
||||
id: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
collection: 'collection_b',
|
||||
item: {
|
||||
status: 'published',
|
||||
},
|
||||
},
|
||||
{
|
||||
collection: 'collection_c',
|
||||
item: {
|
||||
status: 'published',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
test('Updating m2a values', () => {
|
||||
const result = mergeVersionsRecursive(
|
||||
{
|
||||
id: 1,
|
||||
status: 'draft',
|
||||
m2a: [
|
||||
1,
|
||||
{
|
||||
id: 2,
|
||||
collection_a_id: 1,
|
||||
item: '1',
|
||||
collection: 'collection_c',
|
||||
},
|
||||
3,
|
||||
{ id: 4 },
|
||||
{
|
||||
id: 5,
|
||||
collection_a_id: 1,
|
||||
item: '1',
|
||||
collection: 'collection_b',
|
||||
},
|
||||
],
|
||||
},
|
||||
[
|
||||
{
|
||||
status: 'published',
|
||||
},
|
||||
{
|
||||
m2a: {
|
||||
create: [
|
||||
{
|
||||
collection: 'collection_c',
|
||||
item: {
|
||||
status: 'published',
|
||||
},
|
||||
},
|
||||
],
|
||||
update: [
|
||||
{
|
||||
collection: 'collection_b',
|
||||
item: {
|
||||
status: 'published',
|
||||
id: 1,
|
||||
},
|
||||
id: 1,
|
||||
},
|
||||
{
|
||||
collection: 'collection_b',
|
||||
item: {
|
||||
id: '2',
|
||||
},
|
||||
id: 5,
|
||||
},
|
||||
],
|
||||
delete: [2, 4],
|
||||
},
|
||||
},
|
||||
],
|
||||
'collection_a',
|
||||
schema,
|
||||
);
|
||||
|
||||
expect(result).toMatchObject({
|
||||
id: 1,
|
||||
status: 'published',
|
||||
m2a: [
|
||||
{
|
||||
id: 1,
|
||||
item: {
|
||||
status: 'published',
|
||||
id: 1,
|
||||
},
|
||||
collection: 'collection_b',
|
||||
},
|
||||
3,
|
||||
{
|
||||
id: 5,
|
||||
collection_a_id: 1,
|
||||
item: {
|
||||
id: '2',
|
||||
},
|
||||
collection: 'collection_b',
|
||||
},
|
||||
{
|
||||
collection: 'collection_c',
|
||||
item: {
|
||||
status: 'published',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('nested relations', () => {
|
||||
test('m2o > translation', () => {
|
||||
const result = mergeVersionsRecursive(
|
||||
{
|
||||
id: 1,
|
||||
status: 'draft',
|
||||
m2o_c: {
|
||||
id: 1,
|
||||
status: 'draft',
|
||||
translations: [
|
||||
{
|
||||
id: 1,
|
||||
collection_c_id: 1,
|
||||
languages_id: 'ar-SA',
|
||||
text: 'ar-sa',
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
collection_c_id: 1,
|
||||
languages_id: 'de-DE',
|
||||
text: 'de-de',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
[
|
||||
{
|
||||
m2o_c: {
|
||||
translations: {
|
||||
create: [
|
||||
{
|
||||
text: 'en-us',
|
||||
languages_id: {
|
||||
code: 'en-US',
|
||||
},
|
||||
collection_c_id: 1,
|
||||
},
|
||||
],
|
||||
update: [
|
||||
{
|
||||
text: 'german',
|
||||
languages_id: {
|
||||
code: 'de-DE',
|
||||
},
|
||||
id: 2,
|
||||
},
|
||||
],
|
||||
delete: [1],
|
||||
},
|
||||
id: 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
'collection_a',
|
||||
schema,
|
||||
);
|
||||
|
||||
expect(result).toMatchObject({
|
||||
id: 1,
|
||||
status: 'draft',
|
||||
m2o_c: {
|
||||
id: 1,
|
||||
status: 'draft',
|
||||
translations: [
|
||||
{
|
||||
id: 2,
|
||||
collection_c_id: 1,
|
||||
languages_id: {
|
||||
code: 'de-DE',
|
||||
},
|
||||
text: 'german',
|
||||
},
|
||||
{
|
||||
text: 'en-us',
|
||||
languages_id: {
|
||||
code: 'en-US',
|
||||
},
|
||||
collection_c_id: 1,
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('m2m > translations', () => {
|
||||
const result = mergeVersionsRecursive(
|
||||
{
|
||||
id: 3,
|
||||
status: 'draft',
|
||||
m2m: [
|
||||
{
|
||||
id: 2,
|
||||
collection_a_id: 3,
|
||||
collection_c_id: {
|
||||
id: 1,
|
||||
status: 'draft',
|
||||
translations: [
|
||||
{
|
||||
id: 1,
|
||||
collection_c_id: 1,
|
||||
languages_id: 'ar-SA',
|
||||
text: 'ar-sa',
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
collection_c_id: 1,
|
||||
languages_id: 'de-DE',
|
||||
text: 'de-de',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
collection_a_id: 3,
|
||||
collection_c_id: {
|
||||
id: 2,
|
||||
status: 'draft',
|
||||
translations: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
[
|
||||
{
|
||||
m2m: {
|
||||
create: [],
|
||||
update: [
|
||||
{
|
||||
collection_c_id: {
|
||||
translations: {
|
||||
create: [
|
||||
{
|
||||
text: 'english',
|
||||
languages_id: {
|
||||
code: 'en-US',
|
||||
},
|
||||
collection_c_id: 1,
|
||||
},
|
||||
],
|
||||
update: [
|
||||
{
|
||||
text: 'german',
|
||||
languages_id: {
|
||||
code: 'de-DE',
|
||||
},
|
||||
id: 2,
|
||||
},
|
||||
],
|
||||
delete: [1],
|
||||
},
|
||||
id: 1,
|
||||
},
|
||||
id: 2,
|
||||
},
|
||||
],
|
||||
delete: [3],
|
||||
},
|
||||
},
|
||||
],
|
||||
'collection_a',
|
||||
schema,
|
||||
);
|
||||
|
||||
expect(result).toMatchObject({
|
||||
id: 3,
|
||||
status: 'draft',
|
||||
m2m: [
|
||||
{
|
||||
id: 2,
|
||||
collection_a_id: 3,
|
||||
collection_c_id: {
|
||||
translations: [
|
||||
{
|
||||
id: 2,
|
||||
collection_c_id: 1,
|
||||
text: 'german',
|
||||
languages_id: {
|
||||
code: 'de-DE',
|
||||
},
|
||||
},
|
||||
{
|
||||
text: 'english',
|
||||
languages_id: {
|
||||
code: 'en-US',
|
||||
},
|
||||
collection_c_id: 1,
|
||||
},
|
||||
],
|
||||
id: 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
test('m2a > translations', () => {
|
||||
const result = mergeVersionsRecursive(
|
||||
{
|
||||
id: 4,
|
||||
status: 'draft',
|
||||
m2a: [
|
||||
{
|
||||
id: 3,
|
||||
collection_a_id: 4,
|
||||
collection: 'collection_b',
|
||||
item: 2,
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
collection_a_id: 4,
|
||||
collection: 'collection_c',
|
||||
item: {
|
||||
id: 1,
|
||||
translations: [
|
||||
{
|
||||
id: 1,
|
||||
collection_c_id: 1,
|
||||
languages_id: 'ar-SA',
|
||||
text: 'ar-sa',
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
collection_c_id: 1,
|
||||
languages_id: 'de-DE',
|
||||
text: 'de-de',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
[
|
||||
{
|
||||
m2a: {
|
||||
create: [],
|
||||
update: [
|
||||
{
|
||||
collection: 'collection_c',
|
||||
item: {
|
||||
translations: {
|
||||
create: [
|
||||
{
|
||||
languages_id: {
|
||||
code: 'en-US',
|
||||
},
|
||||
collection_c_id: 1,
|
||||
text: 'english',
|
||||
},
|
||||
],
|
||||
update: [
|
||||
{
|
||||
text: 'german',
|
||||
languages_id: {
|
||||
code: 'de-DE',
|
||||
},
|
||||
id: 2,
|
||||
},
|
||||
],
|
||||
delete: [1],
|
||||
},
|
||||
id: 1,
|
||||
},
|
||||
id: 4,
|
||||
},
|
||||
],
|
||||
delete: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
'collection_a',
|
||||
schema,
|
||||
);
|
||||
|
||||
expect(result).toMatchObject({
|
||||
id: 4,
|
||||
status: 'draft',
|
||||
m2a: [
|
||||
{
|
||||
id: 3,
|
||||
collection_a_id: 4,
|
||||
collection: 'collection_b',
|
||||
item: 2,
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
collection_a_id: 4,
|
||||
collection: 'collection_c',
|
||||
item: {
|
||||
id: 1,
|
||||
translations: [
|
||||
{
|
||||
id: 2,
|
||||
collection_c_id: 1,
|
||||
languages_id: {
|
||||
code: 'de-DE',
|
||||
},
|
||||
text: 'german',
|
||||
},
|
||||
{
|
||||
languages_id: {
|
||||
code: 'en-US',
|
||||
},
|
||||
collection_c_id: 1,
|
||||
text: 'english',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
test('creating nested relations', () => {
|
||||
const result = mergeVersionsRecursive(
|
||||
{
|
||||
id: 2,
|
||||
status: 'draft',
|
||||
m2m: [],
|
||||
m2o_c: null,
|
||||
},
|
||||
[
|
||||
{
|
||||
m2m: {
|
||||
create: [
|
||||
{
|
||||
collection_c_id: {
|
||||
translations: {
|
||||
create: [
|
||||
{
|
||||
text: 'german',
|
||||
languages_id: {
|
||||
code: 'de-DE',
|
||||
},
|
||||
},
|
||||
{
|
||||
text: 'english',
|
||||
languages_id: {
|
||||
code: 'en-US',
|
||||
},
|
||||
},
|
||||
],
|
||||
update: [],
|
||||
delete: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
update: [],
|
||||
delete: [],
|
||||
},
|
||||
m2o_c: {
|
||||
translations: {
|
||||
create: [
|
||||
{
|
||||
text: 'french',
|
||||
languages_id: {
|
||||
code: 'fr-FR',
|
||||
},
|
||||
},
|
||||
{
|
||||
text: 'english',
|
||||
languages_id: {
|
||||
code: 'en-US',
|
||||
},
|
||||
},
|
||||
],
|
||||
update: [],
|
||||
delete: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
'collection_a',
|
||||
schema,
|
||||
);
|
||||
|
||||
expect(result).toMatchObject({
|
||||
id: 2,
|
||||
status: 'draft',
|
||||
m2m: [
|
||||
{
|
||||
collection_c_id: {
|
||||
translations: [
|
||||
{
|
||||
text: 'german',
|
||||
languages_id: {
|
||||
code: 'de-DE',
|
||||
},
|
||||
},
|
||||
{
|
||||
text: 'english',
|
||||
languages_id: {
|
||||
code: 'en-US',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
m2o_c: {
|
||||
translations: [
|
||||
{
|
||||
text: 'french',
|
||||
languages_id: {
|
||||
code: 'fr-FR',
|
||||
},
|
||||
},
|
||||
{
|
||||
text: 'english',
|
||||
languages_id: {
|
||||
code: 'en-US',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,184 +0,0 @@
|
||||
import type { Alterations, Item, SchemaOverview } from '@directus/types';
|
||||
import { isObject } from '@directus/utils';
|
||||
import Joi from 'joi';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
|
||||
const alterationSchema = Joi.object({
|
||||
create: Joi.array().items(Joi.object().unknown()),
|
||||
update: Joi.array().items(Joi.object().unknown()),
|
||||
delete: Joi.array().items(Joi.string(), Joi.number()),
|
||||
});
|
||||
|
||||
export function mergeVersionsRaw(item: Item, versionData: Partial<Item>[]) {
|
||||
const result = cloneDeep(item);
|
||||
|
||||
for (const versionRecord of versionData) {
|
||||
for (const key of Object.keys(versionRecord)) {
|
||||
result[key] = versionRecord[key];
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
export function mergeVersionsRecursive(
|
||||
item: Item,
|
||||
versionData: Item[],
|
||||
collection: string,
|
||||
schema: SchemaOverview,
|
||||
): Item {
|
||||
if (versionData.length === 0) return item;
|
||||
|
||||
return recursiveMerging(item, versionData, collection, schema) as Item;
|
||||
}
|
||||
|
||||
function recursiveMerging(data: Item, versionData: unknown[], collection: string, schema: SchemaOverview): unknown {
|
||||
const result = cloneDeep(data);
|
||||
const relations = getRelations(collection, schema);
|
||||
|
||||
for (const versionRecord of versionData) {
|
||||
if (!isObject(versionRecord)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const key of Object.keys(data)) {
|
||||
if (key in versionRecord === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const currentValue: unknown = data[key];
|
||||
const newValue: unknown = versionRecord[key];
|
||||
|
||||
if (typeof newValue !== 'object' || newValue === null) {
|
||||
// primitive type substitution, json and non relational array values are handled in the next check
|
||||
result[key] = newValue;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (key in relations === false) {
|
||||
// check for m2a exception
|
||||
if (isManyToAnyCollection(collection, schema) && key === 'item') {
|
||||
const item = addMissingKeys(isObject(currentValue) ? currentValue : {}, newValue);
|
||||
result[key] = recursiveMerging(item, [newValue], data['collection'], schema);
|
||||
} else {
|
||||
// item is not a relation
|
||||
result[key] = newValue;
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
const { error } = alterationSchema.validate(newValue);
|
||||
|
||||
if (error) {
|
||||
if (typeof newValue === 'object' && key in relations) {
|
||||
const newItem = !currentValue || typeof currentValue !== 'object' ? newValue : currentValue;
|
||||
result[key] = recursiveMerging(newItem, [newValue], relations[key]!, schema);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
const alterations = newValue as Alterations;
|
||||
const currentPrimaryKeyField = schema.collections[collection]!.primary;
|
||||
const relatedPrimaryKeyField = schema.collections[relations[key]!]!.primary;
|
||||
|
||||
const mergedRelation: Item[] = [];
|
||||
|
||||
if (Array.isArray(currentValue)) {
|
||||
if (alterations.delete.length > 0) {
|
||||
for (const currentItem of currentValue) {
|
||||
const currentId = typeof currentItem === 'object' ? currentItem[currentPrimaryKeyField] : currentItem;
|
||||
|
||||
if (alterations.delete.includes(currentId) === false) {
|
||||
mergedRelation.push(currentItem);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
mergedRelation.push(...currentValue);
|
||||
}
|
||||
|
||||
if (alterations.update.length > 0) {
|
||||
for (const updatedItem of alterations.update) {
|
||||
// find existing item to update
|
||||
const itemIndex = mergedRelation.findIndex(
|
||||
(currentItem) => currentItem[relatedPrimaryKeyField] === updatedItem[currentPrimaryKeyField],
|
||||
);
|
||||
|
||||
if (itemIndex === -1) {
|
||||
// check for raw primary keys
|
||||
const pkIndex = mergedRelation.findIndex(
|
||||
(currentItem) => currentItem === updatedItem[currentPrimaryKeyField],
|
||||
);
|
||||
|
||||
if (pkIndex === -1) {
|
||||
// nothing to update so add the item as is
|
||||
mergedRelation.push(updatedItem);
|
||||
} else {
|
||||
mergedRelation[pkIndex] = updatedItem;
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
const item = addMissingKeys(mergedRelation[itemIndex]!, updatedItem);
|
||||
|
||||
mergedRelation[itemIndex] = recursiveMerging(item, [updatedItem], relations[key]!, schema) as Item;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (alterations.create.length > 0) {
|
||||
for (const createdItem of alterations.create) {
|
||||
const item = addMissingKeys({}, createdItem);
|
||||
mergedRelation.push(recursiveMerging(item, [createdItem], relations[key]!, schema) as Item);
|
||||
}
|
||||
}
|
||||
|
||||
result[key] = mergedRelation;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function addMissingKeys(item: Item, edits: Item) {
|
||||
const result: Item = { ...item };
|
||||
|
||||
for (const key of Object.keys(edits)) {
|
||||
if (key in item === false) {
|
||||
result[key] = null;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function isManyToAnyCollection(collection: string, schema: SchemaOverview) {
|
||||
const relation = schema.relations.find(
|
||||
(relation) => relation.collection === collection && relation.meta?.many_collection === collection,
|
||||
);
|
||||
|
||||
if (!relation || !relation.meta?.one_field || !relation.related_collection) return false;
|
||||
|
||||
return Boolean(
|
||||
schema.collections[relation.related_collection]?.fields[relation.meta.one_field]?.special.includes('m2a'),
|
||||
);
|
||||
}
|
||||
|
||||
function getRelations(collection: string, schema: SchemaOverview) {
|
||||
return schema.relations.reduce(
|
||||
(result, relation) => {
|
||||
if (relation.related_collection === collection && relation.meta?.one_field) {
|
||||
result[relation.meta.one_field] = relation.collection;
|
||||
}
|
||||
|
||||
if (relation.collection === collection && relation.related_collection) {
|
||||
result[relation.field] = relation.related_collection;
|
||||
}
|
||||
|
||||
return result;
|
||||
},
|
||||
{} as Record<string, string>,
|
||||
);
|
||||
}
|
||||
@@ -1,158 +0,0 @@
|
||||
import type { Filter, Permission } from '@directus/types';
|
||||
import { expect, test, vi } from 'vitest';
|
||||
import { fetchPermissions } from '../permissions/lib/fetch-permissions.js';
|
||||
import { fetchPolicies } from '../permissions/lib/fetch-policies.js';
|
||||
import { filter_has_now, permissionsCachable } from './permissions-cachable.js';
|
||||
|
||||
vi.mock('../permissions/lib/fetch-permissions.js');
|
||||
vi.mock('../permissions/lib/fetch-policies.js');
|
||||
|
||||
test('filter has $NOW', () => {
|
||||
let filter: Filter = {
|
||||
created_on: {
|
||||
_gt: '$NOW',
|
||||
},
|
||||
};
|
||||
|
||||
expect(filter_has_now(filter)).toBe(true);
|
||||
|
||||
filter = {
|
||||
_and: [
|
||||
{
|
||||
created_on: {
|
||||
_gt: '$NOW',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
expect(filter_has_now(filter)).toBe(true);
|
||||
|
||||
filter = {
|
||||
_or: [
|
||||
{
|
||||
created_on: {
|
||||
some: {
|
||||
_gt: '$NOW(-1 year)',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
expect(filter_has_now(filter)).toBe(true);
|
||||
});
|
||||
|
||||
test('filter does not have $NOW', () => {
|
||||
let filter: Filter = {
|
||||
created_on: {
|
||||
_gt: '2021-01-01',
|
||||
},
|
||||
};
|
||||
|
||||
expect(filter_has_now(filter)).toBe(false);
|
||||
|
||||
filter = {
|
||||
_and: [
|
||||
{
|
||||
created_on: {
|
||||
_gt: '2021-01-01',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
expect(filter_has_now(filter)).toBe(false);
|
||||
|
||||
filter = {
|
||||
_or: [
|
||||
{
|
||||
created_on: {
|
||||
some: {
|
||||
_gt: '2021-01-01',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
expect(filter_has_now(filter)).toBe(false);
|
||||
});
|
||||
|
||||
test('permissions are not cacheable on many policies with $NOW', async () => {
|
||||
vi.mocked(fetchPolicies).mockResolvedValue(['policy1', 'policy2', 'policy3']);
|
||||
|
||||
const permissions: Permission[] = [
|
||||
{
|
||||
action: 'read',
|
||||
collection: 'items',
|
||||
fields: ['*'],
|
||||
permissions: {
|
||||
created_on: {
|
||||
_gt: '2021-01-01',
|
||||
},
|
||||
},
|
||||
policy: 'policy1',
|
||||
presets: [],
|
||||
validation: null,
|
||||
},
|
||||
{
|
||||
action: 'read',
|
||||
collection: 'items',
|
||||
fields: ['*'],
|
||||
permissions: {
|
||||
created_on: {
|
||||
_gt: '$NOW',
|
||||
},
|
||||
},
|
||||
policy: 'policy1',
|
||||
presets: [],
|
||||
validation: null,
|
||||
},
|
||||
];
|
||||
|
||||
vi.mocked(fetchPermissions).mockResolvedValue(permissions);
|
||||
|
||||
const result = await permissionsCachable('items', {} as any, {} as any);
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
test('permissions are cacheable on many policies without $NOW', async () => {
|
||||
vi.mocked(fetchPolicies).mockResolvedValue(['policy1', 'policy2', 'policy3']);
|
||||
|
||||
const permissions: Permission[] = [
|
||||
{
|
||||
action: 'read',
|
||||
collection: 'items',
|
||||
fields: ['*'],
|
||||
permissions: {
|
||||
created_on: {
|
||||
_gt: '2021-01-01',
|
||||
},
|
||||
},
|
||||
policy: 'policy1',
|
||||
presets: [],
|
||||
validation: null,
|
||||
},
|
||||
{
|
||||
action: 'read',
|
||||
collection: 'items',
|
||||
fields: ['*'],
|
||||
permissions: {
|
||||
created_on: {
|
||||
_gt: '2021-01-01',
|
||||
},
|
||||
},
|
||||
policy: 'policy1',
|
||||
presets: [],
|
||||
validation: null,
|
||||
},
|
||||
];
|
||||
|
||||
vi.mocked(fetchPermissions).mockResolvedValue(permissions);
|
||||
|
||||
const result = await permissionsCachable('items', {} as any, {} as any);
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
211
api/src/utils/permissions-cacheable.test.ts
Normal file
211
api/src/utils/permissions-cacheable.test.ts
Normal file
@@ -0,0 +1,211 @@
|
||||
import type { Permission } from '@directus/types';
|
||||
import { describe, expect, test, vi } from 'vitest';
|
||||
import { fetchPermissions } from '../permissions/lib/fetch-permissions.js';
|
||||
import { fetchPolicies } from '../permissions/lib/fetch-policies.js';
|
||||
import { filterHasNow, permissionsCacheable } from './permissions-cacheable.js';
|
||||
|
||||
vi.mock('../permissions/lib/fetch-permissions.js');
|
||||
vi.mock('../permissions/lib/fetch-policies.js');
|
||||
|
||||
describe('filterHasNow', () => {
|
||||
describe('has $NOW', () => {
|
||||
test('operator has $NOW', () => {
|
||||
const filter = {
|
||||
created_on: {
|
||||
_gt: '$NOW',
|
||||
},
|
||||
};
|
||||
|
||||
expect(filterHasNow(filter)).toBe(true);
|
||||
});
|
||||
|
||||
test('operator has $NOW function', () => {
|
||||
const filter = {
|
||||
created_on: {
|
||||
_gt: '$NOW(-1 year)',
|
||||
},
|
||||
};
|
||||
|
||||
expect(filterHasNow(filter)).toBe(true);
|
||||
});
|
||||
|
||||
test('_and has $NOW', () => {
|
||||
const filter = {
|
||||
_and: [
|
||||
{
|
||||
created_on: {
|
||||
_gt: '$NOW',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
expect(filterHasNow(filter)).toBe(true);
|
||||
});
|
||||
|
||||
test('_and has $NOW', () => {
|
||||
const filter = {
|
||||
_or: [
|
||||
{
|
||||
created_on: {
|
||||
some: {
|
||||
_gt: '$NOW',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
expect(filterHasNow(filter)).toBe(true);
|
||||
});
|
||||
|
||||
test('has nested $NOW', () => {
|
||||
const filter = {
|
||||
_or: [
|
||||
{
|
||||
_and: [
|
||||
{ status: { _eq: 'archived' } },
|
||||
{
|
||||
metadata: {
|
||||
updated_at: { _lt: '$NOW' },
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
expect(filterHasNow(filter)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('does not have $NOW', () => {
|
||||
test.each(['2021-01-01', null, false, true, '$CURRENT_USER'])('operator does not have $NOW', (value) => {
|
||||
const filter = {
|
||||
created_on: {
|
||||
_eq: value,
|
||||
},
|
||||
};
|
||||
|
||||
expect(filterHasNow(filter)).toBe(false);
|
||||
});
|
||||
|
||||
test('_in operator does not have $NOW', () => {
|
||||
const filter = {
|
||||
created_on: {
|
||||
_in: [1, 2],
|
||||
},
|
||||
};
|
||||
|
||||
expect(filterHasNow(filter)).toBe(false);
|
||||
});
|
||||
|
||||
test('_and does not have $NOW', () => {
|
||||
const filter = {
|
||||
_and: [
|
||||
{
|
||||
created_on: {
|
||||
_gt: '2021-01-01',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
expect(filterHasNow(filter)).toBe(false);
|
||||
});
|
||||
|
||||
test('_or does not have $NOW', () => {
|
||||
const filter = {
|
||||
_or: [
|
||||
{
|
||||
created_on: {
|
||||
some: {
|
||||
_gt: '2021-01-01',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
expect(filterHasNow(filter)).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('permissions are not cacheable on many policies with $NOW', async () => {
|
||||
vi.mocked(fetchPolicies).mockResolvedValue(['policy1', 'policy2', 'policy3']);
|
||||
|
||||
const permissions: Permission[] = [
|
||||
{
|
||||
action: 'read',
|
||||
collection: 'items',
|
||||
fields: ['*'],
|
||||
permissions: {
|
||||
created_on: {
|
||||
_gt: '2021-01-01',
|
||||
},
|
||||
},
|
||||
policy: 'policy1',
|
||||
presets: [],
|
||||
validation: null,
|
||||
},
|
||||
{
|
||||
action: 'read',
|
||||
collection: 'items',
|
||||
fields: ['*'],
|
||||
permissions: {
|
||||
created_on: {
|
||||
_gt: '$NOW',
|
||||
},
|
||||
},
|
||||
policy: 'policy1',
|
||||
presets: [],
|
||||
validation: null,
|
||||
},
|
||||
];
|
||||
|
||||
vi.mocked(fetchPermissions).mockResolvedValue(permissions);
|
||||
|
||||
const result = await permissionsCacheable('items', {} as any, {} as any);
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
test('permissions are cacheable on many policies without $NOW', async () => {
|
||||
vi.mocked(fetchPolicies).mockResolvedValue(['policy1', 'policy2', 'policy3']);
|
||||
|
||||
const permissions: Permission[] = [
|
||||
{
|
||||
action: 'read',
|
||||
collection: 'items',
|
||||
fields: ['*'],
|
||||
permissions: {
|
||||
created_on: {
|
||||
_gt: '2021-01-01',
|
||||
},
|
||||
},
|
||||
policy: 'policy1',
|
||||
presets: [],
|
||||
validation: null,
|
||||
},
|
||||
{
|
||||
action: 'read',
|
||||
collection: 'items',
|
||||
fields: ['*'],
|
||||
permissions: {
|
||||
created_on: {
|
||||
_gt: '2021-01-01',
|
||||
},
|
||||
},
|
||||
policy: 'policy1',
|
||||
presets: [],
|
||||
validation: null,
|
||||
},
|
||||
];
|
||||
|
||||
vi.mocked(fetchPermissions).mockResolvedValue(permissions);
|
||||
|
||||
const result = await permissionsCacheable('items', {} as any, {} as any);
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
@@ -6,9 +6,9 @@ import { createDefaultAccountability } from '../permissions/utils/create-default
|
||||
|
||||
/**
|
||||
* Check if the read permissions for a collection contain the dynamic variable $NOW.
|
||||
* If they do, the permissions are not cachable.
|
||||
* If they do, the permissions are not cacheable.
|
||||
*/
|
||||
export async function permissionsCachable(
|
||||
export async function permissionsCacheable(
|
||||
collection: string | undefined,
|
||||
context: Context,
|
||||
accountability?: Accountability,
|
||||
@@ -33,18 +33,20 @@ export async function permissionsCachable(
|
||||
return false;
|
||||
}
|
||||
|
||||
return filter_has_now(permission.permissions);
|
||||
return filterHasNow(permission.permissions);
|
||||
});
|
||||
|
||||
return !has_now;
|
||||
}
|
||||
|
||||
export function filter_has_now(filter: Filter): boolean {
|
||||
export function filterHasNow(filter: Filter): boolean {
|
||||
if (filter === null) return false;
|
||||
|
||||
return Object.entries(filter).some(([key, value]) => {
|
||||
if (key === '_and' || key === '_or') {
|
||||
return (value as Filter[]).some((sub_filter) => filter_has_now(sub_filter));
|
||||
return (value as Filter[]).some((sub_filter) => filterHasNow(sub_filter));
|
||||
} else if (typeof value === 'object') {
|
||||
return filter_has_now(value);
|
||||
return filterHasNow(value);
|
||||
} else if (typeof value === 'string') {
|
||||
return value.startsWith('$NOW');
|
||||
}
|
||||
@@ -11,9 +11,12 @@ import type { DatabaseClient } from '@directus/types';
|
||||
* Can be used to ensure the handler is run within a transaction,
|
||||
* while preventing nested transactions.
|
||||
*/
|
||||
export const transaction = async <T = unknown>(knex: Knex, handler: (knex: Knex) => Promise<T>): Promise<T> => {
|
||||
export const transaction = async <T = unknown>(
|
||||
knex: Knex,
|
||||
handler: (knex: Knex.Transaction) => Promise<T>,
|
||||
): Promise<T> => {
|
||||
if (knex.isTransaction) {
|
||||
return handler(knex);
|
||||
return handler(knex as Knex.Transaction);
|
||||
} else {
|
||||
try {
|
||||
return await knex.transaction((trx) => handler(trx));
|
||||
@@ -70,10 +73,27 @@ function shouldRetryTransaction(client: DatabaseClient, error: unknown): boolean
|
||||
* @link https://www.sqlite.org/rescode.html#busy
|
||||
*/
|
||||
const SQLITE_BUSY_ERROR_CODE = 'SQLITE_BUSY';
|
||||
// Both mariadb and mysql
|
||||
const MYSQL_DEADLOCK_CODE = 'ER_LOCK_DEADLOCK';
|
||||
const POSTGRES_DEADLOCK_CODE = '40P01';
|
||||
const ORACLE_DEADLOCK_CODE = 'ORA-00060';
|
||||
const MSSQL_DEADLOCK_CODE = 'EREQUEST';
|
||||
const MSSQL_DEADLOCK_NUMBER = '1205';
|
||||
|
||||
const codes: Record<DatabaseClient, Record<string, any>[]> = {
|
||||
cockroachdb: [{ code: COCKROACH_RETRY_ERROR_CODE }],
|
||||
sqlite: [{ code: SQLITE_BUSY_ERROR_CODE }],
|
||||
mysql: [{ code: MYSQL_DEADLOCK_CODE }],
|
||||
mssql: [{ code: MSSQL_DEADLOCK_CODE, number: MSSQL_DEADLOCK_NUMBER }],
|
||||
oracle: [{ code: ORACLE_DEADLOCK_CODE }],
|
||||
postgres: [{ code: POSTGRES_DEADLOCK_CODE }],
|
||||
redshift: [],
|
||||
};
|
||||
|
||||
return (
|
||||
isObject(error) &&
|
||||
((client === 'cockroachdb' && error['code'] === COCKROACH_RETRY_ERROR_CODE) ||
|
||||
(client === 'sqlite' && error['code'] === SQLITE_BUSY_ERROR_CODE))
|
||||
codes[client].some((code) => {
|
||||
return Object.entries(code).every(([key, value]) => String(error[key]) === value);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
694
api/src/utils/versioning/deep-map-with-schema.test.ts
Normal file
694
api/src/utils/versioning/deep-map-with-schema.test.ts
Normal file
@@ -0,0 +1,694 @@
|
||||
import { expect, test } from 'vitest';
|
||||
import { deepMapWithSchema } from './deep-map-with-schema.js';
|
||||
import { SchemaBuilder } from '@directus/schema-builder';
|
||||
import { getRelation } from '@directus/utils';
|
||||
|
||||
const schema = new SchemaBuilder()
|
||||
.collection('articles', (c) => {
|
||||
c.field('id').id();
|
||||
c.field('title').string();
|
||||
c.field('date').date();
|
||||
c.field('author').m2o('users');
|
||||
c.field('tags').m2m('tags');
|
||||
c.field('links').o2m('links', 'article_id');
|
||||
c.field('sections').m2a(['sec_num', 'sec_text']);
|
||||
})
|
||||
.collection('users', (c) => {
|
||||
c.field('id').id();
|
||||
c.field('name').string();
|
||||
})
|
||||
.collection('tags', (c) => {
|
||||
c.field('id').id();
|
||||
c.field('tag').string();
|
||||
})
|
||||
.collection('links', (c) => {
|
||||
c.field('id').id();
|
||||
c.field('name').string();
|
||||
})
|
||||
.collection('sec_num', (c) => {
|
||||
c.field('id').id();
|
||||
c.field('num').integer();
|
||||
})
|
||||
.collection('sec_text', (c) => {
|
||||
c.field('id').id();
|
||||
c.field('text').text();
|
||||
})
|
||||
.build();
|
||||
|
||||
test('map flat object', () => {
|
||||
const object = {
|
||||
id: 1,
|
||||
title: 2,
|
||||
author: 3,
|
||||
tags: [1, 2, 3],
|
||||
};
|
||||
|
||||
const result = deepMapWithSchema(
|
||||
object,
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
id: {
|
||||
value: 1,
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['id'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
object,
|
||||
},
|
||||
},
|
||||
title: {
|
||||
value: 2,
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['title'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
object,
|
||||
},
|
||||
},
|
||||
author: {
|
||||
value: 3,
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['author'],
|
||||
relation: getRelation(schema.relations, 'articles', 'author'),
|
||||
leaf: true,
|
||||
relationType: 'm2o',
|
||||
object,
|
||||
},
|
||||
},
|
||||
tags: {
|
||||
value: [1, 2, 3],
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['tags'],
|
||||
relation: getRelation(schema.relations, 'articles', 'tags'),
|
||||
leaf: true,
|
||||
relationType: 'o2m',
|
||||
object,
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('map m2o object', () => {
|
||||
const object = {
|
||||
author: {
|
||||
id: 1,
|
||||
name: 'hello',
|
||||
},
|
||||
};
|
||||
|
||||
const result = deepMapWithSchema(
|
||||
object,
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
author: {
|
||||
value: {
|
||||
id: {
|
||||
value: 1,
|
||||
context: {
|
||||
collection: schema.collections['users'],
|
||||
field: schema.collections['users']!.fields['id'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
object: object.author,
|
||||
relationType: null,
|
||||
},
|
||||
},
|
||||
name: {
|
||||
value: 'hello',
|
||||
context: {
|
||||
collection: schema.collections['users'],
|
||||
field: schema.collections['users']!.fields['name'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
object: object.author,
|
||||
},
|
||||
},
|
||||
},
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['author'],
|
||||
relation: getRelation(schema.relations, 'articles', 'author'),
|
||||
leaf: false,
|
||||
object,
|
||||
relationType: 'm2o',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('map o2m object', () => {
|
||||
const object = {
|
||||
links: [
|
||||
{
|
||||
id: 1,
|
||||
},
|
||||
{
|
||||
name: 'hello',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const result = deepMapWithSchema(
|
||||
object,
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
links: {
|
||||
value: [
|
||||
{
|
||||
id: {
|
||||
value: 1,
|
||||
context: {
|
||||
collection: schema.collections['links'],
|
||||
field: schema.collections['links']!.fields['id'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
object: object.links[0],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: {
|
||||
value: 'hello',
|
||||
context: {
|
||||
collection: schema.collections['links'],
|
||||
field: schema.collections['links']!.fields['name'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
object: object.links[1],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['links'],
|
||||
relation: getRelation(schema.relations, 'articles', 'links'),
|
||||
leaf: false,
|
||||
object,
|
||||
relationType: 'o2m',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('map o2m object with detailed syntax', () => {
|
||||
const object = {
|
||||
links: { create: [{ name: 'hello' }], update: [{ id: 1 }], delete: [] },
|
||||
};
|
||||
|
||||
const result = deepMapWithSchema(
|
||||
object,
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
{ detailedUpdateSyntax: true },
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
links: {
|
||||
value: {
|
||||
create: [
|
||||
{
|
||||
name: {
|
||||
value: 'hello',
|
||||
context: {
|
||||
collection: schema.collections['links'],
|
||||
field: schema.collections['links']!.fields['name'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
object: object.links.create[0],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
update: [
|
||||
{
|
||||
id: {
|
||||
value: 1,
|
||||
context: {
|
||||
collection: schema.collections['links'],
|
||||
field: schema.collections['links']!.fields['id'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
object: object.links.update[0],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
delete: [],
|
||||
},
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['links'],
|
||||
relation: getRelation(schema.relations, 'articles', 'links'),
|
||||
leaf: false,
|
||||
object,
|
||||
relationType: 'o2m',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('map m2m object', () => {
|
||||
const object = {
|
||||
tags: [
|
||||
{
|
||||
id: 1,
|
||||
articles_id: 2,
|
||||
tags_id: {
|
||||
tag: 'myTag',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const result = deepMapWithSchema(
|
||||
object,
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
tags: {
|
||||
value: [
|
||||
{
|
||||
id: {
|
||||
value: 1,
|
||||
context: {
|
||||
collection: schema.collections['articles_tags_junction'],
|
||||
field: schema.collections['articles_tags_junction']!.fields['id'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
object: object.tags[0],
|
||||
},
|
||||
},
|
||||
articles_id: {
|
||||
value: 2,
|
||||
context: {
|
||||
collection: schema.collections['articles_tags_junction'],
|
||||
field: schema.collections['articles_tags_junction']!.fields['articles_id'],
|
||||
relation: getRelation(schema.relations, 'articles_tags_junction', 'articles_id'),
|
||||
leaf: true,
|
||||
relationType: 'm2o',
|
||||
object: object.tags[0],
|
||||
},
|
||||
},
|
||||
tags_id: {
|
||||
value: {
|
||||
tag: {
|
||||
value: 'myTag',
|
||||
context: {
|
||||
collection: schema.collections['tags'],
|
||||
field: schema.collections['tags']!.fields['tag'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
object: object.tags[0]?.tags_id,
|
||||
},
|
||||
},
|
||||
},
|
||||
context: {
|
||||
collection: schema.collections['articles_tags_junction'],
|
||||
field: schema.collections['articles_tags_junction']!.fields['tags_id'],
|
||||
relation: getRelation(schema.relations, 'articles_tags_junction', 'tags_id'),
|
||||
leaf: false,
|
||||
relationType: 'm2o',
|
||||
object: object.tags[0],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['tags'],
|
||||
relation: getRelation(schema.relations, 'articles', 'tags'),
|
||||
leaf: false,
|
||||
object,
|
||||
relationType: 'o2m',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('map m2a object', () => {
|
||||
const object = {
|
||||
sections: [
|
||||
{
|
||||
collection: 'sec_num',
|
||||
item: {
|
||||
num: 123,
|
||||
},
|
||||
},
|
||||
{
|
||||
collection: 'sec_text',
|
||||
item: {
|
||||
text: 'abc',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const result = deepMapWithSchema(
|
||||
object,
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
sections: {
|
||||
value: [
|
||||
{
|
||||
collection: {
|
||||
value: 'sec_num',
|
||||
context: {
|
||||
collection: schema.collections['articles_builder'],
|
||||
field: schema.collections['articles_builder']!.fields['collection'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
object: object.sections[0],
|
||||
relationType: null,
|
||||
},
|
||||
},
|
||||
item: {
|
||||
value: {
|
||||
num: {
|
||||
value: 123,
|
||||
context: {
|
||||
collection: schema.collections['sec_num'],
|
||||
field: schema.collections['sec_num']!.fields['num'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
object: object.sections[0]?.item,
|
||||
relationType: null,
|
||||
},
|
||||
},
|
||||
},
|
||||
context: {
|
||||
collection: schema.collections['articles_builder'],
|
||||
field: schema.collections['articles_builder']!.fields['item'],
|
||||
relation: getRelation(schema.relations, 'articles_builder', 'item'),
|
||||
leaf: false,
|
||||
object: object.sections[0],
|
||||
relationType: 'a2o',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
collection: {
|
||||
value: 'sec_text',
|
||||
context: {
|
||||
collection: schema.collections['articles_builder'],
|
||||
field: schema.collections['articles_builder']!.fields['collection'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
object: object.sections[1],
|
||||
relationType: null,
|
||||
},
|
||||
},
|
||||
item: {
|
||||
value: {
|
||||
text: {
|
||||
value: 'abc',
|
||||
context: {
|
||||
collection: schema.collections['sec_text'],
|
||||
field: schema.collections['sec_text']!.fields['text'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
object: object.sections[1]?.item,
|
||||
relationType: null,
|
||||
},
|
||||
},
|
||||
},
|
||||
context: {
|
||||
collection: schema.collections['articles_builder'],
|
||||
field: schema.collections['articles_builder']!.fields['item'],
|
||||
relation: getRelation(schema.relations, 'articles_builder', 'item'),
|
||||
leaf: false,
|
||||
object: object.sections[1],
|
||||
relationType: 'a2o',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['sections'],
|
||||
relation: getRelation(schema.relations, 'articles', 'sections'),
|
||||
leaf: false,
|
||||
object,
|
||||
relationType: 'o2m',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('map flat invalid field', () => {
|
||||
const result = deepMapWithSchema(
|
||||
{
|
||||
invalid: 1,
|
||||
},
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
invalid: 1,
|
||||
});
|
||||
});
|
||||
|
||||
test('map with invalid object', () => {
|
||||
expect(() => {
|
||||
deepMapWithSchema(
|
||||
new Date(),
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
}).toThrowError();
|
||||
});
|
||||
|
||||
test('map flat date value', () => {
|
||||
const date = new Date();
|
||||
|
||||
const result = deepMapWithSchema(
|
||||
{ date },
|
||||
([key, value]) => {
|
||||
return [key, value];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
|
||||
expect(result).toEqual({ date });
|
||||
});
|
||||
|
||||
test('map flat invalid deep field', () => {
|
||||
const object = {
|
||||
author: {
|
||||
invalid: 1,
|
||||
},
|
||||
};
|
||||
|
||||
const result = deepMapWithSchema(
|
||||
object,
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
author: {
|
||||
value: {
|
||||
invalid: 1,
|
||||
},
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['author'],
|
||||
relation: getRelation(schema.relations, 'articles', 'author'),
|
||||
object,
|
||||
leaf: false,
|
||||
relationType: 'm2o',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('map flat invalid deep field', () => {
|
||||
const object = {
|
||||
author: {
|
||||
invalid: 1,
|
||||
},
|
||||
};
|
||||
|
||||
const result = deepMapWithSchema(
|
||||
object,
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
author: {
|
||||
value: {
|
||||
invalid: 1,
|
||||
},
|
||||
context: {
|
||||
collection: schema.collections['articles'],
|
||||
field: schema.collections['articles']!.fields['author'],
|
||||
relation: getRelation(schema.relations, 'articles', 'author'),
|
||||
leaf: false,
|
||||
object,
|
||||
relationType: 'm2o',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('map m2a relation without collection field', () => {
|
||||
const callback = () =>
|
||||
deepMapWithSchema(
|
||||
{
|
||||
sections: [
|
||||
{
|
||||
item: {
|
||||
num: 123,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: schema, collection: 'articles' },
|
||||
);
|
||||
|
||||
expect(callback).toThrowError(
|
||||
"When selecting 'articles_builder.item', the field 'articles_builder.collection' has to be selected when using versioning and m2a relations",
|
||||
);
|
||||
});
|
||||
|
||||
const simpleSchema = new SchemaBuilder()
|
||||
.collection('articles', (c) => {
|
||||
c.field('id').id();
|
||||
c.field('title').string();
|
||||
c.field('author').m2o('users');
|
||||
})
|
||||
.collection('users', (c) => {
|
||||
c.field('id').id();
|
||||
c.field('name').string();
|
||||
})
|
||||
.build();
|
||||
|
||||
test('map with non-existent fields', () => {
|
||||
const object = {
|
||||
id: 1,
|
||||
title: 'hi',
|
||||
author: {
|
||||
id: 1,
|
||||
},
|
||||
};
|
||||
|
||||
const result = deepMapWithSchema(
|
||||
object,
|
||||
([key, value], context) => {
|
||||
return [key, { value, context }];
|
||||
},
|
||||
{ schema: simpleSchema, collection: 'articles' },
|
||||
{ mapNonExistentFields: true },
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
id: {
|
||||
value: 1,
|
||||
context: {
|
||||
collection: simpleSchema.collections['articles'],
|
||||
field: simpleSchema.collections['articles']!.fields['id'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
object,
|
||||
},
|
||||
},
|
||||
title: {
|
||||
value: 'hi',
|
||||
context: {
|
||||
collection: simpleSchema.collections['articles'],
|
||||
field: simpleSchema.collections['articles']!.fields['title'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
object,
|
||||
},
|
||||
},
|
||||
author: {
|
||||
value: {
|
||||
id: {
|
||||
value: 1,
|
||||
context: {
|
||||
collection: simpleSchema.collections['users'],
|
||||
field: simpleSchema.collections['users']!.fields['id'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
object: object.author,
|
||||
},
|
||||
},
|
||||
name: {
|
||||
value: undefined,
|
||||
context: {
|
||||
collection: simpleSchema.collections['users'],
|
||||
field: simpleSchema.collections['users']!.fields['name'],
|
||||
relation: null,
|
||||
leaf: true,
|
||||
relationType: null,
|
||||
object: object.author,
|
||||
},
|
||||
},
|
||||
},
|
||||
context: {
|
||||
collection: simpleSchema.collections['articles'],
|
||||
field: simpleSchema.collections['articles']!.fields['author'],
|
||||
relation: getRelation(simpleSchema.relations, 'articles', 'author'),
|
||||
leaf: false,
|
||||
relationType: 'm2o',
|
||||
object,
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
142
api/src/utils/versioning/deep-map-with-schema.ts
Normal file
142
api/src/utils/versioning/deep-map-with-schema.ts
Normal file
@@ -0,0 +1,142 @@
|
||||
import type { CollectionOverview, FieldOverview, Relation, SchemaOverview } from '@directus/types';
|
||||
import { isPlainObject } from 'lodash-es';
|
||||
import assert from 'node:assert';
|
||||
import { getRelationInfo, type RelationInfo } from '../get-relation-info.js';
|
||||
import { InvalidQueryError } from '@directus/errors';
|
||||
|
||||
/**
|
||||
* Allows to deep map the data like a response or delta changes with collection, field and relation context for each entry.
|
||||
* Bottom to Top depth first mapping of values.
|
||||
*/
|
||||
export function deepMapWithSchema(
|
||||
object: Record<string, any>,
|
||||
callback: (
|
||||
entry: [key: string | number, value: unknown],
|
||||
context: {
|
||||
collection: CollectionOverview;
|
||||
field: FieldOverview;
|
||||
relation: Relation | null;
|
||||
leaf: boolean;
|
||||
relationType: RelationInfo['relationType'] | null;
|
||||
object: Record<string, any>;
|
||||
},
|
||||
) => [key: string | number, value: unknown] | undefined,
|
||||
context: {
|
||||
schema: SchemaOverview;
|
||||
collection: string;
|
||||
relationInfo?: RelationInfo;
|
||||
},
|
||||
options?: {
|
||||
/** If set to true, non-existent fields will be included in the mapping and will have a value of undefined */
|
||||
mapNonExistentFields?: boolean;
|
||||
/** If set to true, it will map the "create", "update" and "delete" syntax for o2m relations found in deltas */
|
||||
detailedUpdateSyntax?: boolean;
|
||||
},
|
||||
): any {
|
||||
const collection = context.schema.collections[context.collection]!;
|
||||
|
||||
assert(
|
||||
isPlainObject(object) && typeof object === 'object' && object !== null,
|
||||
`DeepMapResponse only works on objects, received ${JSON.stringify(object)}`,
|
||||
);
|
||||
|
||||
let fields: [string, FieldOverview][];
|
||||
|
||||
if (options?.mapNonExistentFields) {
|
||||
fields = Object.entries(collection.fields);
|
||||
} else {
|
||||
fields = Object.keys(object).map((key) => [key, collection.fields[key]!]);
|
||||
}
|
||||
|
||||
return Object.fromEntries(
|
||||
fields
|
||||
.map(([key, field]) => {
|
||||
let value = object[key];
|
||||
|
||||
if (!field) return [key, value];
|
||||
|
||||
const relationInfo = getRelationInfo(context.schema.relations, collection.collection, field.field);
|
||||
let leaf = true;
|
||||
|
||||
if (relationInfo.relation && typeof value === 'object' && value !== null && isPlainObject(object)) {
|
||||
switch (relationInfo.relationType) {
|
||||
case 'm2o':
|
||||
value = deepMapWithSchema(
|
||||
value,
|
||||
callback,
|
||||
{
|
||||
schema: context.schema,
|
||||
collection: relationInfo.relation.related_collection!,
|
||||
relationInfo,
|
||||
},
|
||||
options,
|
||||
);
|
||||
|
||||
leaf = false;
|
||||
break;
|
||||
|
||||
case 'o2m': {
|
||||
function map(childValue: any) {
|
||||
if (isPlainObject(childValue) && typeof childValue === 'object' && childValue !== null) {
|
||||
leaf = false;
|
||||
return deepMapWithSchema(
|
||||
childValue,
|
||||
callback,
|
||||
{
|
||||
schema: context.schema,
|
||||
collection: relationInfo!.relation!.collection,
|
||||
relationInfo,
|
||||
},
|
||||
options,
|
||||
);
|
||||
} else return childValue;
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
value = (value as any[]).map(map);
|
||||
} else if (options?.detailedUpdateSyntax && isPlainObject(value)) {
|
||||
value = {
|
||||
create: value['create']?.map(map),
|
||||
update: value['update']?.map(map),
|
||||
delete: value['delete']?.map(map),
|
||||
};
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'a2o': {
|
||||
const related_collection = object[relationInfo.relation.meta!.one_collection_field!];
|
||||
|
||||
if (!related_collection) {
|
||||
throw new InvalidQueryError({
|
||||
reason: `When selecting '${collection.collection}.${field.field}', the field '${
|
||||
collection.collection
|
||||
}.${
|
||||
relationInfo.relation.meta!.one_collection_field
|
||||
}' has to be selected when using versioning and m2a relations `,
|
||||
});
|
||||
}
|
||||
|
||||
value = deepMapWithSchema(
|
||||
value,
|
||||
callback,
|
||||
{
|
||||
schema: context.schema,
|
||||
collection: related_collection,
|
||||
relationInfo,
|
||||
},
|
||||
options,
|
||||
);
|
||||
|
||||
leaf = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return callback([key, value], { collection, field, ...relationInfo, leaf, object });
|
||||
})
|
||||
.filter((f) => f) as any[],
|
||||
);
|
||||
}
|
||||
126
api/src/utils/versioning/handle-version.ts
Normal file
126
api/src/utils/versioning/handle-version.ts
Normal file
@@ -0,0 +1,126 @@
|
||||
import { ForbiddenError } from '@directus/errors';
|
||||
import type { Accountability, Item, PrimaryKey, Query, QueryOptions } from '@directus/types';
|
||||
import type { ItemsService as ItemsServiceType } from '../../services/index.js';
|
||||
import { transaction } from '../transaction.js';
|
||||
import { deepMapWithSchema } from './deep-map-with-schema.js';
|
||||
import { splitRecursive } from './split-recursive.js';
|
||||
|
||||
export async function handleVersion(self: ItemsServiceType, key: PrimaryKey, queryWithKey: Query, opts?: QueryOptions) {
|
||||
const { VersionsService } = await import('../../services/versions.js');
|
||||
const { ItemsService } = await import('../../services/items.js');
|
||||
|
||||
if (queryWithKey.versionRaw) {
|
||||
const originalData = await self.readByQuery(queryWithKey, opts);
|
||||
|
||||
if (originalData.length === 0) {
|
||||
throw new ForbiddenError();
|
||||
}
|
||||
|
||||
const versionsService = new VersionsService({
|
||||
schema: self.schema,
|
||||
accountability: self.accountability,
|
||||
knex: self.knex,
|
||||
});
|
||||
|
||||
const version = await versionsService.getVersionSave(queryWithKey.version!, self.collection, key as string);
|
||||
|
||||
return Object.assign(originalData[0]!, version?.delta);
|
||||
}
|
||||
|
||||
let result: Item | undefined;
|
||||
|
||||
const versionsService = new VersionsService({
|
||||
schema: self.schema,
|
||||
accountability: self.accountability,
|
||||
knex: self.knex,
|
||||
});
|
||||
|
||||
const createdIDs: Record<string, PrimaryKey[]> = {};
|
||||
const version = await versionsService.getVersionSave(queryWithKey.version!, self.collection, key as string, false);
|
||||
|
||||
if (!version) {
|
||||
throw new ForbiddenError();
|
||||
}
|
||||
|
||||
const { delta } = version;
|
||||
|
||||
await transaction(self.knex, async (trx) => {
|
||||
const itemsServiceAdmin = new ItemsService<Item>(self.collection, {
|
||||
schema: self.schema,
|
||||
accountability: {
|
||||
admin: true,
|
||||
} as Accountability,
|
||||
knex: trx,
|
||||
});
|
||||
|
||||
if (delta) {
|
||||
const { rawDelta, defaultOverwrites } = splitRecursive(delta);
|
||||
|
||||
await itemsServiceAdmin.updateOne(key, rawDelta, {
|
||||
emitEvents: false,
|
||||
autoPurgeCache: false,
|
||||
skipTracking: true,
|
||||
overwriteDefaults: defaultOverwrites as any,
|
||||
onItemCreate: (collection, pk) => {
|
||||
if (collection in createdIDs === false) createdIDs[collection] = [];
|
||||
|
||||
createdIDs[collection]!.push(pk);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const itemsServiceUser = new ItemsService<Item>(self.collection, {
|
||||
schema: self.schema,
|
||||
accountability: self.accountability,
|
||||
knex: trx,
|
||||
});
|
||||
|
||||
result = (await itemsServiceUser.readByQuery(queryWithKey, opts))[0];
|
||||
|
||||
await trx.rollback();
|
||||
});
|
||||
|
||||
if (!result) {
|
||||
throw new ForbiddenError();
|
||||
}
|
||||
|
||||
return deepMapWithSchema(
|
||||
result,
|
||||
([key, value], context) => {
|
||||
if (context.relationType === 'm2o' || context.relationType === 'a2o') {
|
||||
const ids = createdIDs[context.relation!.related_collection!];
|
||||
const match = ids?.find((id) => String(id) === String(value));
|
||||
|
||||
if (match) {
|
||||
return [key, null];
|
||||
}
|
||||
} else if (context.relationType === 'o2m' && Array.isArray(value)) {
|
||||
const ids = createdIDs[context.relation!.collection];
|
||||
return [
|
||||
key,
|
||||
value.map((val) => {
|
||||
const match = ids?.find((id) => String(id) === String(val));
|
||||
|
||||
if (match) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return val;
|
||||
}),
|
||||
];
|
||||
}
|
||||
|
||||
if (context.field.field === context.collection.primary) {
|
||||
const ids = createdIDs[context.collection.collection];
|
||||
const match = ids?.find((id) => String(id) === String(value));
|
||||
|
||||
if (match) {
|
||||
return [key, null];
|
||||
}
|
||||
}
|
||||
|
||||
return [key, value];
|
||||
},
|
||||
{ collection: self.collection, schema: self.schema },
|
||||
);
|
||||
}
|
||||
37
api/src/utils/versioning/merge-version-data.test.ts
Normal file
37
api/src/utils/versioning/merge-version-data.test.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { describe, expect, test } from 'vitest';
|
||||
import { mergeVersionsRaw } from './merge-version-data.js';
|
||||
|
||||
describe('content versioning mergeVersionsRaw', () => {
|
||||
test('No versions available', () => {
|
||||
const result = mergeVersionsRaw({ test_field: 'value' }, []);
|
||||
|
||||
expect(result).toMatchObject({ test_field: 'value' });
|
||||
});
|
||||
|
||||
test('Basic field versions', () => {
|
||||
const result = mergeVersionsRaw({ test_field: 'value', edited_field: 'original' }, [
|
||||
{ edited_field: 'updated' },
|
||||
{ test_field: null },
|
||||
]);
|
||||
|
||||
expect(result).toMatchObject({
|
||||
test_field: null,
|
||||
edited_field: 'updated',
|
||||
});
|
||||
});
|
||||
|
||||
test('Relational field versions', () => {
|
||||
const result = mergeVersionsRaw({ test_field: 'value', relation: null }, [
|
||||
{ relation: { create: [{ test: 'value ' }], update: [], delete: [] } },
|
||||
]);
|
||||
|
||||
expect(result).toMatchObject({
|
||||
test_field: 'value',
|
||||
relation: {
|
||||
create: [{ test: 'value ' }],
|
||||
update: [],
|
||||
delete: [],
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
14
api/src/utils/versioning/merge-version-data.ts
Normal file
14
api/src/utils/versioning/merge-version-data.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import type { Item } from '@directus/types';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
|
||||
export function mergeVersionsRaw(item: Item, versionData: Partial<Item>[]) {
|
||||
const result = cloneDeep(item);
|
||||
|
||||
for (const versionRecord of versionData) {
|
||||
for (const key of Object.keys(versionRecord)) {
|
||||
result[key] = versionRecord[key];
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
113
api/src/utils/versioning/split-recursive.test.ts
Normal file
113
api/src/utils/versioning/split-recursive.test.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { expect, test } from 'vitest';
|
||||
import { splitRecursive } from './split-recursive.js';
|
||||
|
||||
test('split with no default overwrites', () => {
|
||||
const input = {
|
||||
id: 1,
|
||||
name: 'Test',
|
||||
related: {
|
||||
id: 2,
|
||||
name: 'Related',
|
||||
},
|
||||
};
|
||||
|
||||
const { rawDelta, defaultOverwrites } = splitRecursive(input);
|
||||
|
||||
expect(rawDelta).toEqual(input);
|
||||
|
||||
expect(defaultOverwrites).toEqual({
|
||||
_date: undefined,
|
||||
_user: undefined,
|
||||
related: { _date: undefined, _user: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
test('split with default overwrites', () => {
|
||||
const input = {
|
||||
id: 1,
|
||||
name: 'Test',
|
||||
_user: 'admin',
|
||||
related: {
|
||||
id: 2,
|
||||
name: 'Related',
|
||||
_date: '2023-01-01T00:00:00Z',
|
||||
},
|
||||
};
|
||||
|
||||
const { rawDelta, defaultOverwrites } = splitRecursive(input);
|
||||
|
||||
expect(rawDelta).toEqual({
|
||||
id: 1,
|
||||
name: 'Test',
|
||||
related: {
|
||||
id: 2,
|
||||
name: 'Related',
|
||||
},
|
||||
});
|
||||
|
||||
expect(defaultOverwrites).toEqual({
|
||||
_user: 'admin',
|
||||
_date: undefined,
|
||||
related: { _date: '2023-01-01T00:00:00Z', _user: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
test('split with arrays and nested objects', () => {
|
||||
const input = {
|
||||
id: 1,
|
||||
name: 'Test',
|
||||
items: [
|
||||
{
|
||||
id: 2,
|
||||
value: 'Item 1',
|
||||
details: {
|
||||
info: 'Detail 1',
|
||||
_user: 'editor',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
value: 'Item 2',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const { rawDelta, defaultOverwrites } = splitRecursive(input);
|
||||
|
||||
expect(rawDelta).toEqual({
|
||||
id: 1,
|
||||
name: 'Test',
|
||||
items: [
|
||||
{
|
||||
id: 2,
|
||||
value: 'Item 1',
|
||||
details: {
|
||||
info: 'Detail 1',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
value: 'Item 2',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
expect(defaultOverwrites).toEqual({
|
||||
_user: undefined,
|
||||
_date: undefined,
|
||||
items: [
|
||||
{
|
||||
details: {
|
||||
_user: 'editor',
|
||||
_date: undefined,
|
||||
},
|
||||
_user: undefined,
|
||||
_date: undefined,
|
||||
},
|
||||
{
|
||||
_user: undefined,
|
||||
_date: undefined,
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
34
api/src/utils/versioning/split-recursive.ts
Normal file
34
api/src/utils/versioning/split-recursive.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { isPlainObject } from 'lodash-es';
|
||||
|
||||
export function splitRecursive(object: unknown): {
|
||||
rawDelta: Record<string, any>;
|
||||
defaultOverwrites: Record<string, any> | undefined;
|
||||
} {
|
||||
if (isPlainObject(object) && typeof object === 'object' && object !== null) {
|
||||
const { _user, _date, ...rest } = object as any;
|
||||
const defaultOverwrites: Record<string, any> = { _user, _date };
|
||||
|
||||
for (const key in rest) {
|
||||
const { rawDelta, defaultOverwrites: innerDefaultOverwrites } = splitRecursive(rest[key]);
|
||||
rest[key] = rawDelta;
|
||||
if (innerDefaultOverwrites) defaultOverwrites[key] = innerDefaultOverwrites;
|
||||
}
|
||||
|
||||
return { rawDelta: rest, defaultOverwrites };
|
||||
} else if (Array.isArray(object)) {
|
||||
const rest: Record<string, any> = [];
|
||||
const defaultOverwrites: Record<string, any> = [];
|
||||
|
||||
for (const key in object) {
|
||||
const { rawDelta, defaultOverwrites: innerDefaultOverwrites } = splitRecursive(object[key]);
|
||||
rest[key] = rawDelta;
|
||||
if (innerDefaultOverwrites) defaultOverwrites[key] = innerDefaultOverwrites;
|
||||
}
|
||||
|
||||
object.map((value) => splitRecursive(value));
|
||||
|
||||
return { rawDelta: rest, defaultOverwrites };
|
||||
}
|
||||
|
||||
return { rawDelta: object as any, defaultOverwrites: undefined };
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@directus/app",
|
||||
"version": "13.13.1",
|
||||
"version": "14.0.0",
|
||||
"description": "App dashboard for Directus",
|
||||
"homepage": "https://directus.io",
|
||||
"repository": {
|
||||
|
||||
@@ -2,6 +2,9 @@
|
||||
|
||||
exports[`Mount component 1`] = `
|
||||
"<div data-v-f0538e61="" class="v-notice info">
|
||||
<v-icon-stub data-v-f0538e61="" name="info" left=""></v-icon-stub>
|
||||
<div data-v-f0538e61="" class="v-notice-title">
|
||||
<v-icon-stub data-v-f0538e61="" name="info" left=""></v-icon-stub>
|
||||
</div>
|
||||
<!--v-if-->
|
||||
</div>"
|
||||
`;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
<script setup lang="ts">
|
||||
import { getFlatpickrLocale } from '@/utils/get-flatpickr-locale';
|
||||
import { format, formatISO } from 'date-fns';
|
||||
import { format } from 'date-fns';
|
||||
import Flatpickr from 'flatpickr';
|
||||
import { computed, onBeforeUnmount, onMounted, ref, watch } from 'vue';
|
||||
import { useI18n } from 'vue-i18n';
|
||||
@@ -111,7 +111,7 @@ function emitValue(value: Date | null) {
|
||||
emit('update:modelValue', format(value, 'HH:mm:ss'));
|
||||
break;
|
||||
case 'timestamp':
|
||||
emit('update:modelValue', formatISO(value));
|
||||
emit('update:modelValue', value.toISOString());
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<script setup lang="ts">
|
||||
import { ref, computed, useTemplateRef, watch, nextTick } from 'vue';
|
||||
import { useFocusTrapManager } from '@/composables/use-focus-trap-manager';
|
||||
import { useShortcut } from '@/composables/use-shortcut';
|
||||
import { useDialogRouteLeave } from '@/composables/use-dialog-route';
|
||||
import { useFocusTrap } from '@vueuse/integrations/useFocusTrap';
|
||||
@@ -79,19 +80,22 @@ function nudge() {
|
||||
|
||||
function useOverlayFocusTrap() {
|
||||
const overlayEl = useTemplateRef<HTMLDivElement>('overlayEl');
|
||||
const { addFocusTrap } = useFocusTrapManager();
|
||||
|
||||
const { activate, deactivate } = useFocusTrap(overlayEl, {
|
||||
const focusTrap = useFocusTrap(overlayEl, {
|
||||
escapeDeactivates: false,
|
||||
initialFocus: false,
|
||||
});
|
||||
|
||||
addFocusTrap(focusTrap);
|
||||
|
||||
watch(
|
||||
internalActive,
|
||||
async (newActive) => {
|
||||
await nextTick();
|
||||
|
||||
if (newActive) activate();
|
||||
else deactivate();
|
||||
if (newActive) focusTrap.activate();
|
||||
else focusTrap.deactivate();
|
||||
},
|
||||
{ immediate: true },
|
||||
);
|
||||
|
||||
@@ -0,0 +1,10 @@
|
||||
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
|
||||
|
||||
exports[`Mount component 1`] = `
|
||||
"<v-notice data-v-4508db34="" type="danger" class="full">
|
||||
<div data-v-4508db34="">
|
||||
<p data-v-4508db34="">The following fields have invalid values:</p>
|
||||
<ul data-v-4508db34="" class="validation-errors-list"></ul>
|
||||
</div>
|
||||
</v-notice>"
|
||||
`;
|
||||
@@ -3,7 +3,8 @@ import FormField from '@/components/v-form/form-field.vue';
|
||||
import { i18n } from '@/lang';
|
||||
import { Width } from '@directus/system-data';
|
||||
import { mount } from '@vue/test-utils';
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import { createTestingPinia } from '@pinia/testing';
|
||||
|
||||
const baseField = {
|
||||
field: 'test',
|
||||
@@ -25,7 +26,12 @@ const baseField = {
|
||||
|
||||
const global = {
|
||||
components: { VMenu },
|
||||
plugins: [i18n],
|
||||
plugins: [
|
||||
i18n,
|
||||
createTestingPinia({
|
||||
createSpy: vi.fn,
|
||||
}),
|
||||
],
|
||||
};
|
||||
|
||||
describe('FormField', () => {
|
||||
|
||||
179
app/src/components/v-form/validation-errors.test.ts
Normal file
179
app/src/components/v-form/validation-errors.test.ts
Normal file
@@ -0,0 +1,179 @@
|
||||
import type { GlobalMountOptions } from '@/__utils__/types';
|
||||
import { i18n } from '@/lang';
|
||||
import type { Field, ValidationError } from '@directus/types';
|
||||
import { mount } from '@vue/test-utils';
|
||||
import { expect, test, describe, it } from 'vitest';
|
||||
import ValidationErrors from './validation-errors.vue';
|
||||
|
||||
const global: GlobalMountOptions = {
|
||||
plugins: [i18n],
|
||||
};
|
||||
|
||||
test('Mount component', () => {
|
||||
expect(ValidationErrors).toBeTruthy();
|
||||
|
||||
const wrapper = mount(ValidationErrors, {
|
||||
props: {
|
||||
validationErrors: [],
|
||||
fields: [],
|
||||
},
|
||||
global,
|
||||
});
|
||||
|
||||
expect(wrapper.html()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
describe('Custom validation message', () => {
|
||||
const baseField: Field = {
|
||||
collection: 'posts',
|
||||
name: 'Title',
|
||||
field: 'title',
|
||||
type: 'string',
|
||||
schema: {
|
||||
name: 'title',
|
||||
table: 'posts',
|
||||
data_type: 'varchar',
|
||||
default_value: null,
|
||||
max_length: 255,
|
||||
numeric_precision: null,
|
||||
numeric_scale: null,
|
||||
is_generated: false,
|
||||
generation_expression: null,
|
||||
is_nullable: true,
|
||||
is_unique: false,
|
||||
is_indexed: false,
|
||||
is_primary_key: false,
|
||||
has_auto_increment: false,
|
||||
foreign_key_column: null,
|
||||
foreign_key_table: null,
|
||||
},
|
||||
meta: {
|
||||
id: 1,
|
||||
collection: 'posts',
|
||||
field: 'title',
|
||||
special: null,
|
||||
interface: 'input',
|
||||
options: null,
|
||||
display: null,
|
||||
display_options: null,
|
||||
readonly: false,
|
||||
hidden: false,
|
||||
sort: 1,
|
||||
width: 'full',
|
||||
translations: null,
|
||||
note: null,
|
||||
conditions: null,
|
||||
required: false,
|
||||
group: null,
|
||||
validation: null,
|
||||
validation_message: null,
|
||||
},
|
||||
};
|
||||
|
||||
const customValidationRule = { _and: [{ title: { _contains: 'a' } }] };
|
||||
|
||||
const customValidationError = {
|
||||
field: 'title',
|
||||
path: [],
|
||||
type: 'contains',
|
||||
substring: 'a',
|
||||
hidden: false,
|
||||
group: null,
|
||||
} as unknown as ValidationError;
|
||||
|
||||
const requiredValidationError = {
|
||||
field: 'title',
|
||||
path: [],
|
||||
type: 'nnull',
|
||||
hidden: false,
|
||||
group: null,
|
||||
} as unknown as ValidationError;
|
||||
|
||||
it('appears when custom validation rule fails', () => {
|
||||
const wrapper = mount(ValidationErrors, {
|
||||
props: {
|
||||
validationErrors: [customValidationError],
|
||||
fields: [
|
||||
{
|
||||
...baseField,
|
||||
meta: {
|
||||
...baseField.meta,
|
||||
validation: customValidationRule,
|
||||
validation_message: 'my custom message',
|
||||
},
|
||||
} as Field,
|
||||
],
|
||||
},
|
||||
global,
|
||||
});
|
||||
|
||||
expect(wrapper.html()).toContain('my custom message');
|
||||
});
|
||||
|
||||
it('appears when required rule fails, but no custom validation rule exists', () => {
|
||||
const wrapper = mount(ValidationErrors, {
|
||||
props: {
|
||||
validationErrors: [requiredValidationError],
|
||||
fields: [
|
||||
{
|
||||
...baseField,
|
||||
meta: {
|
||||
...baseField.meta,
|
||||
validation: null,
|
||||
validation_message: 'my custom message',
|
||||
required: true,
|
||||
},
|
||||
} as Field,
|
||||
],
|
||||
},
|
||||
global,
|
||||
});
|
||||
|
||||
expect(wrapper.html()).toContain('my custom message');
|
||||
});
|
||||
|
||||
it('does not appear when required rule fails and custom validation rule exists', () => {
|
||||
const wrapper = mount(ValidationErrors, {
|
||||
props: {
|
||||
validationErrors: [requiredValidationError],
|
||||
fields: [
|
||||
{
|
||||
...baseField,
|
||||
meta: {
|
||||
...baseField.meta,
|
||||
validation: customValidationRule,
|
||||
validation_message: 'my custom message',
|
||||
required: true,
|
||||
},
|
||||
} as Field,
|
||||
],
|
||||
},
|
||||
global,
|
||||
});
|
||||
|
||||
expect(wrapper.html()).not.toContain('my custom message');
|
||||
});
|
||||
|
||||
it('appears when required rule and custom validation rule fails', () => {
|
||||
const wrapper = mount(ValidationErrors, {
|
||||
props: {
|
||||
validationErrors: [customValidationError, requiredValidationError],
|
||||
fields: [
|
||||
{
|
||||
...baseField,
|
||||
meta: {
|
||||
...baseField.meta,
|
||||
validation: customValidationRule,
|
||||
validation_message: 'my custom message',
|
||||
required: true,
|
||||
},
|
||||
} as Field,
|
||||
],
|
||||
},
|
||||
global,
|
||||
});
|
||||
|
||||
expect(wrapper.html()).toContain('my custom message');
|
||||
expect(wrapper.html()).toContain('Value is required');
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user