mirror of
https://github.com/directus/directus.git
synced 2026-04-25 03:00:53 -04:00
5
.changeset/afraid-trainers-brush.md
Normal file
5
.changeset/afraid-trainers-brush.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/api': patch
|
||||
---
|
||||
|
||||
Fixed `groupBy` behavior that resulted in an internal server error when used with relational item permissions
|
||||
5
.changeset/bright-plants-rule.md
Normal file
5
.changeset/bright-plants-rule.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/constants': minor
|
||||
---
|
||||
|
||||
Added permission actions constant
|
||||
5
.changeset/chilled-seas-care.md
Normal file
5
.changeset/chilled-seas-care.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/utils': major
|
||||
---
|
||||
|
||||
Added new dynamic variables to `parseFilter` and added the `processChunk` helper
|
||||
5
.changeset/dull-rings-drive.md
Normal file
5
.changeset/dull-rings-drive.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
"@directus/app": patch
|
||||
---
|
||||
|
||||
Ensured collections in system permissions interface are scrolled into view when added but out of view
|
||||
5
.changeset/dull-spies-worry.md
Normal file
5
.changeset/dull-spies-worry.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/api': patch
|
||||
---
|
||||
|
||||
Fixed an issue that would cause the API to return an error when a root field in a m2a builder was queried
|
||||
5
.changeset/eighty-toys-clap.md
Normal file
5
.changeset/eighty-toys-clap.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/api': patch
|
||||
---
|
||||
|
||||
Fixed `reduceSchema` to strip out collection the user does not have access to
|
||||
5
.changeset/famous-candles-camp.md
Normal file
5
.changeset/famous-candles-camp.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/api': major
|
||||
---
|
||||
|
||||
Replaced the database client library `mysql` with `mysql2`, used for MySQL/MariaDB
|
||||
5
.changeset/famous-carpets-exercise.md
Normal file
5
.changeset/famous-carpets-exercise.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
"@directus/sdk": minor
|
||||
---
|
||||
|
||||
Implemented new SDK functions for policies
|
||||
5
.changeset/fuzzy-news-drop.md
Normal file
5
.changeset/fuzzy-news-drop.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/api': patch
|
||||
---
|
||||
|
||||
Fixed user counting where users were double counted and inactive users with policies were counted as well
|
||||
5
.changeset/heavy-geese-kneel.md
Normal file
5
.changeset/heavy-geese-kneel.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/types': major
|
||||
---
|
||||
|
||||
Added new types and modified existing types required for Policies
|
||||
5
.changeset/lucky-humans-carry.md
Normal file
5
.changeset/lucky-humans-carry.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
"@directus/app": patch
|
||||
---
|
||||
|
||||
Ensured the permissions table under policies is displayed correctly on mobile devices
|
||||
5
.changeset/nine-geckos-jog.md
Normal file
5
.changeset/nine-geckos-jog.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
"@directus/api": minor
|
||||
---
|
||||
|
||||
Updated WebSocket subscriptions to include the new policies collection
|
||||
5
.changeset/pink-wolves-beg.md
Normal file
5
.changeset/pink-wolves-beg.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/api': patch
|
||||
---
|
||||
|
||||
Fixed filter creation in `fetchPolicies` for users without roles
|
||||
5
.changeset/polite-crabs-eat.md
Normal file
5
.changeset/polite-crabs-eat.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/extensions': major
|
||||
---
|
||||
|
||||
Changed module `preRegisterCheck` signature to align with the changes made for Policies
|
||||
5
.changeset/poor-ladybugs-help.md
Normal file
5
.changeset/poor-ladybugs-help.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/app': patch
|
||||
---
|
||||
|
||||
Fixed table layout default query, to not include presetational fields
|
||||
5
.changeset/proud-cameras-travel.md
Normal file
5
.changeset/proud-cameras-travel.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/api': patch
|
||||
---
|
||||
|
||||
Fixed aggregation field existence and permission checks
|
||||
5
.changeset/rare-squids-compete.md
Normal file
5
.changeset/rare-squids-compete.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/system-data': major
|
||||
---
|
||||
|
||||
Added new collections and fields and updated existing fields and permissions needed for Policies
|
||||
5
.changeset/red-buckets-wink.md
Normal file
5
.changeset/red-buckets-wink.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/app': patch
|
||||
---
|
||||
|
||||
Fixed missing policies in public role policy selection
|
||||
5
.changeset/rude-peas-confess.md
Normal file
5
.changeset/rude-peas-confess.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/api': minor
|
||||
---
|
||||
|
||||
Used explicit headings for CSV export
|
||||
5
.changeset/serious-mangos-tease.md
Normal file
5
.changeset/serious-mangos-tease.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/memory': minor
|
||||
---
|
||||
|
||||
Added new `clear` method to cache implementations
|
||||
5
.changeset/serious-rings-carry.md
Normal file
5
.changeset/serious-rings-carry.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/api': patch
|
||||
---
|
||||
|
||||
Fixed down migration erroring on post migration permissions
|
||||
5
.changeset/slow-rats-smoke.md
Normal file
5
.changeset/slow-rats-smoke.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/api': patch
|
||||
---
|
||||
|
||||
Fixed permission checking for o2m related fields
|
||||
5
.changeset/slow-snakes-occur.md
Normal file
5
.changeset/slow-snakes-occur.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
"@directus/api": minor
|
||||
---
|
||||
|
||||
Implemented new GraphQL queries for policies
|
||||
5
.changeset/strong-numbers-warn.md
Normal file
5
.changeset/strong-numbers-warn.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/errors': minor
|
||||
---
|
||||
|
||||
Added error extension to the `ForbiddenError`
|
||||
5
.changeset/tasty-guests-fry.md
Normal file
5
.changeset/tasty-guests-fry.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/api': patch
|
||||
---
|
||||
|
||||
Ensured that policies attached to a user, role and parent roles are correctly prioritized
|
||||
5
.changeset/ten-beds-pretend.md
Normal file
5
.changeset/ten-beds-pretend.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
"@directus/api": patch
|
||||
---
|
||||
|
||||
Ensured the default `DB_FILENAME` option from the Docker Image is not applied when using MySQL/MariaDB, fixing a corresponding warning
|
||||
5
.changeset/thick-dingos-film.md
Normal file
5
.changeset/thick-dingos-film.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
"@directus/api": patch
|
||||
---
|
||||
|
||||
Fixed the policies migration for the case where permissions had been configured for the public role
|
||||
6
.changeset/thin-feet-float.md
Normal file
6
.changeset/thin-feet-float.md
Normal file
@@ -0,0 +1,6 @@
|
||||
---
|
||||
'@directus/api': major
|
||||
'@directus/app': major
|
||||
---
|
||||
|
||||
Added a new policy based permissions system
|
||||
5
.changeset/three-teachers-destroy.md
Normal file
5
.changeset/three-teachers-destroy.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/api': patch
|
||||
---
|
||||
|
||||
Fix query error on some DB vendors when using multi relation sort
|
||||
5
.changeset/twenty-yaks-live.md
Normal file
5
.changeset/twenty-yaks-live.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/api': patch
|
||||
---
|
||||
|
||||
Migrated `NotificationsService` to new policies system
|
||||
5
.changeset/two-items-joke.md
Normal file
5
.changeset/two-items-joke.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/api': patch
|
||||
---
|
||||
|
||||
Fixed broken permissions for sorting of aggregate query when using the aggregate result as sort field
|
||||
5
.changeset/violet-numbers-retire.md
Normal file
5
.changeset/violet-numbers-retire.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@directus/api': patch
|
||||
---
|
||||
|
||||
Fixed an issue where keys in filter operand objects where incorrectly checked for field permissions
|
||||
2
.github/workflows/blackbox.yml
vendored
2
.github/workflows/blackbox.yml
vendored
@@ -4,7 +4,7 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- next
|
||||
- v11-rc
|
||||
paths:
|
||||
- api/**
|
||||
- tests/blackbox/**
|
||||
|
||||
2
.github/workflows/check.yml
vendored
2
.github/workflows/check.yml
vendored
@@ -4,7 +4,7 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- next
|
||||
- v11-rc
|
||||
|
||||
concurrency:
|
||||
group: check-${{ github.ref }}
|
||||
|
||||
2
.github/workflows/docs.yml
vendored
2
.github/workflows/docs.yml
vendored
@@ -4,7 +4,7 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- next
|
||||
- v11-rc
|
||||
paths:
|
||||
- docs/**
|
||||
- .github/workflows/docs.yml
|
||||
|
||||
@@ -61,7 +61,8 @@
|
||||
"build": "tsc --project tsconfig.prod.json && copyfiles \"src/**/*.{yaml,liquid}\" -u 1 dist",
|
||||
"cli": "NODE_ENV=development SERVE_APP=false tsx src/cli/run.ts",
|
||||
"dev": "NODE_ENV=development SERVE_APP=true tsx watch --ignore extensions --clear-screen=false src/start.ts",
|
||||
"test": "vitest --watch=false"
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest"
|
||||
},
|
||||
"dependencies": {
|
||||
"@authenio/samlify-node-xmllint": "2.0.0",
|
||||
@@ -224,7 +225,7 @@
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@keyv/redis": "2.8.5",
|
||||
"mysql": "2.18.1",
|
||||
"mysql2": "3.10.0",
|
||||
"nodemailer-mailgun-transport": "2.1.5",
|
||||
"nodemailer-sendgrid": "1.0.3",
|
||||
"oracledb": "6.5.1",
|
||||
|
||||
@@ -47,18 +47,10 @@ vi.mock('./flows', () => ({
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock('./middleware/check-ip', () => ({
|
||||
checkIP: Router(),
|
||||
}));
|
||||
|
||||
vi.mock('./middleware/schema', () => ({
|
||||
default: Router(),
|
||||
}));
|
||||
|
||||
vi.mock('./middleware/get-permissions', () => ({
|
||||
default: Router(),
|
||||
}));
|
||||
|
||||
vi.mock('./auth', () => ({
|
||||
registerAuthProviders: vi.fn(),
|
||||
}));
|
||||
|
||||
@@ -12,6 +12,7 @@ import path from 'path';
|
||||
import qs from 'qs';
|
||||
import { registerAuthProviders } from './auth.js';
|
||||
import activityRouter from './controllers/activity.js';
|
||||
import accessRouter from './controllers/access.js';
|
||||
import assetsRouter from './controllers/assets.js';
|
||||
import authRouter from './controllers/auth.js';
|
||||
import collectionsRouter from './controllers/collections.js';
|
||||
@@ -28,6 +29,7 @@ import notificationsRouter from './controllers/notifications.js';
|
||||
import operationsRouter from './controllers/operations.js';
|
||||
import panelsRouter from './controllers/panels.js';
|
||||
import permissionsRouter from './controllers/permissions.js';
|
||||
import policiesRouter from './controllers/policies.js';
|
||||
import presetsRouter from './controllers/presets.js';
|
||||
import relationsRouter from './controllers/relations.js';
|
||||
import revisionsRouter from './controllers/revisions.js';
|
||||
@@ -54,11 +56,9 @@ import { getFlowManager } from './flows.js';
|
||||
import { createExpressLogger, useLogger } from './logger/index.js';
|
||||
import authenticate from './middleware/authenticate.js';
|
||||
import cache from './middleware/cache.js';
|
||||
import { checkIP } from './middleware/check-ip.js';
|
||||
import cors from './middleware/cors.js';
|
||||
import { errorHandler } from './middleware/error-handler.js';
|
||||
import extractToken from './middleware/extract-token.js';
|
||||
import getPermissions from './middleware/get-permissions.js';
|
||||
import rateLimiterGlobal from './middleware/rate-limiter-global.js';
|
||||
import rateLimiter from './middleware/rate-limiter-ip.js';
|
||||
import sanitizeQuery from './middleware/sanitize-query.js';
|
||||
@@ -260,16 +260,12 @@ export default async function createApp(): Promise<express.Application> {
|
||||
|
||||
app.use(authenticate);
|
||||
|
||||
app.use(checkIP);
|
||||
|
||||
app.use(sanitizeQuery);
|
||||
|
||||
app.use(cache);
|
||||
|
||||
app.use(schema);
|
||||
|
||||
app.use(getPermissions);
|
||||
|
||||
await emitter.emitInit('middlewares.after', { app });
|
||||
|
||||
await emitter.emitInit('routes.before', { app });
|
||||
@@ -279,6 +275,7 @@ export default async function createApp(): Promise<express.Application> {
|
||||
app.use('/graphql', graphqlRouter);
|
||||
|
||||
app.use('/activity', activityRouter);
|
||||
app.use('/access', accessRouter);
|
||||
app.use('/assets', assetsRouter);
|
||||
app.use('/collections', collectionsRouter);
|
||||
app.use('/dashboards', dashboardsRouter);
|
||||
@@ -297,6 +294,7 @@ export default async function createApp(): Promise<express.Application> {
|
||||
app.use('/operations', operationsRouter);
|
||||
app.use('/panels', panelsRouter);
|
||||
app.use('/permissions', permissionsRouter);
|
||||
app.use('/policies', policiesRouter);
|
||||
app.use('/presets', presetsRouter);
|
||||
app.use('/translations', translationsRouter);
|
||||
app.use('/relations', relationsRouter);
|
||||
|
||||
@@ -14,17 +14,18 @@ import { Router } from 'express';
|
||||
import Joi from 'joi';
|
||||
import type { Client, Error, LDAPResult, SearchCallbackResponse, SearchEntry } from 'ldapjs';
|
||||
import ldap from 'ldapjs';
|
||||
import { REFRESH_COOKIE_OPTIONS, SESSION_COOKIE_OPTIONS } from '../../constants.js';
|
||||
import getDatabase from '../../database/index.js';
|
||||
import emitter from '../../emitter.js';
|
||||
import { useLogger } from '../../logger/index.js';
|
||||
import { respond } from '../../middleware/respond.js';
|
||||
import { createDefaultAccountability } from '../../permissions/utils/create-default-accountability.js';
|
||||
import { AuthenticationService } from '../../services/authentication.js';
|
||||
import { UsersService } from '../../services/users.js';
|
||||
import type { AuthDriverOptions, AuthenticationMode, User } from '../../types/index.js';
|
||||
import asyncHandler from '../../utils/async-handler.js';
|
||||
import { getIPFromReq } from '../../utils/get-ip-from-req.js';
|
||||
import { AuthDriver } from '../auth.js';
|
||||
import { REFRESH_COOKIE_OPTIONS, SESSION_COOKIE_OPTIONS } from '../../constants.js';
|
||||
|
||||
interface UserInfo {
|
||||
dn: string;
|
||||
@@ -417,10 +418,9 @@ export function createLDAPAuthRouter(provider: string): Router {
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const env = useEnv();
|
||||
|
||||
const accountability: Accountability = {
|
||||
const accountability: Accountability = createDefaultAccountability({
|
||||
ip: getIPFromReq(req),
|
||||
role: null,
|
||||
};
|
||||
});
|
||||
|
||||
const userAgent = req.get('user-agent')?.substring(0, 1024);
|
||||
if (userAgent) accountability.userAgent = userAgent;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { useEnv } from '@directus/env';
|
||||
import { InvalidCredentialsError, InvalidPayloadError } from '@directus/errors';
|
||||
import type { Accountability } from '@directus/types';
|
||||
import argon2 from 'argon2';
|
||||
@@ -5,8 +6,8 @@ import { Router } from 'express';
|
||||
import Joi from 'joi';
|
||||
import { performance } from 'perf_hooks';
|
||||
import { REFRESH_COOKIE_OPTIONS, SESSION_COOKIE_OPTIONS } from '../../constants.js';
|
||||
import { useEnv } from '@directus/env';
|
||||
import { respond } from '../../middleware/respond.js';
|
||||
import { createDefaultAccountability } from '../../permissions/utils/create-default-accountability.js';
|
||||
import { AuthenticationService } from '../../services/authentication.js';
|
||||
import type { AuthenticationMode, User } from '../../types/index.js';
|
||||
import asyncHandler from '../../utils/async-handler.js';
|
||||
@@ -62,10 +63,9 @@ export function createLocalAuthRouter(provider: string): Router {
|
||||
const STALL_TIME = env['LOGIN_STALL_TIME'] as number;
|
||||
const timeStart = performance.now();
|
||||
|
||||
const accountability: Accountability = {
|
||||
const accountability: Accountability = createDefaultAccountability({
|
||||
ip: getIPFromReq(req),
|
||||
role: null,
|
||||
};
|
||||
});
|
||||
|
||||
const userAgent = req.get('user-agent')?.substring(0, 1024);
|
||||
if (userAgent) accountability.userAgent = userAgent;
|
||||
|
||||
@@ -22,16 +22,17 @@ import getDatabase from '../../database/index.js';
|
||||
import emitter from '../../emitter.js';
|
||||
import { useLogger } from '../../logger/index.js';
|
||||
import { respond } from '../../middleware/respond.js';
|
||||
import { createDefaultAccountability } from '../../permissions/utils/create-default-accountability.js';
|
||||
import { AuthenticationService } from '../../services/authentication.js';
|
||||
import { UsersService } from '../../services/users.js';
|
||||
import type { AuthData, AuthDriverOptions, User } from '../../types/index.js';
|
||||
import asyncHandler from '../../utils/async-handler.js';
|
||||
import { getConfigFromEnv } from '../../utils/get-config-from-env.js';
|
||||
import { getIPFromReq } from '../../utils/get-ip-from-req.js';
|
||||
import { getSecret } from '../../utils/get-secret.js';
|
||||
import { isLoginRedirectAllowed } from '../../utils/is-login-redirect-allowed.js';
|
||||
import { Url } from '../../utils/url.js';
|
||||
import { LocalAuthDriver } from './local.js';
|
||||
import { getSecret } from '../../utils/get-secret.js';
|
||||
|
||||
export class OAuth2AuthDriver extends LocalAuthDriver {
|
||||
client: Client;
|
||||
@@ -353,10 +354,9 @@ export function createOAuth2AuthRouter(providerName: string): Router {
|
||||
|
||||
const { verifier, redirect, prompt } = tokenData;
|
||||
|
||||
const accountability: Accountability = {
|
||||
const accountability: Accountability = createDefaultAccountability({
|
||||
ip: getIPFromReq(req),
|
||||
role: null,
|
||||
};
|
||||
});
|
||||
|
||||
const userAgent = req.get('user-agent')?.substring(0, 1024);
|
||||
if (userAgent) accountability.userAgent = userAgent;
|
||||
|
||||
@@ -22,6 +22,7 @@ import getDatabase from '../../database/index.js';
|
||||
import emitter from '../../emitter.js';
|
||||
import { useLogger } from '../../logger/index.js';
|
||||
import { respond } from '../../middleware/respond.js';
|
||||
import { createDefaultAccountability } from '../../permissions/utils/create-default-accountability.js';
|
||||
import { AuthenticationService } from '../../services/authentication.js';
|
||||
import { UsersService } from '../../services/users.js';
|
||||
import type { AuthData, AuthDriverOptions, User } from '../../types/index.js';
|
||||
@@ -383,10 +384,7 @@ export function createOpenIDAuthRouter(providerName: string): Router {
|
||||
|
||||
const { verifier, redirect, prompt } = tokenData;
|
||||
|
||||
const accountability: Accountability = {
|
||||
ip: getIPFromReq(req),
|
||||
role: null,
|
||||
};
|
||||
const accountability: Accountability = createDefaultAccountability({ ip: getIPFromReq(req) });
|
||||
|
||||
const userAgent = req.get('user-agent')?.substring(0, 1024);
|
||||
if (userAgent) accountability.userAgent = userAgent;
|
||||
|
||||
@@ -9,6 +9,7 @@ import { compress, decompress } from './utils/compress.js';
|
||||
import { getConfigFromEnv } from './utils/get-config-from-env.js';
|
||||
import { getMilliseconds } from './utils/get-milliseconds.js';
|
||||
import { validateEnv } from './utils/validate-env.js';
|
||||
import { clearCache as clearPermissionCache } from './permissions/cache.js';
|
||||
|
||||
import { createRequire } from 'node:module';
|
||||
|
||||
@@ -97,6 +98,10 @@ export async function clearSystemCache(opts?: {
|
||||
}
|
||||
|
||||
await localSchemaCache.clear();
|
||||
|
||||
// Since a lot of cached permission function rely on the schema it needs to be cleared as well
|
||||
await clearPermissionCache();
|
||||
|
||||
messenger.publish<CacheMessage>('schemaChanged', { autoPurgeCache: opts?.autoPurgeCache });
|
||||
}
|
||||
|
||||
|
||||
@@ -9,11 +9,13 @@ import getDatabase, {
|
||||
import runMigrations from '../../../database/migrations/run.js';
|
||||
import installDatabase from '../../../database/seeds/run.js';
|
||||
import { useLogger } from '../../../logger/index.js';
|
||||
import { AccessService } from '../../../services/access.js';
|
||||
import { PoliciesService } from '../../../services/policies.js';
|
||||
import { RolesService } from '../../../services/roles.js';
|
||||
import { SettingsService } from '../../../services/settings.js';
|
||||
import { UsersService } from '../../../services/users.js';
|
||||
import { getSchema } from '../../../utils/get-schema.js';
|
||||
import { defaultAdminRole, defaultAdminUser } from '../../utils/defaults.js';
|
||||
import { defaultAdminPolicy, defaultAdminRole, defaultAdminUser } from '../../utils/defaults.js';
|
||||
|
||||
export default async function bootstrap({ skipAdminInit }: { skipAdminInit?: boolean }): Promise<void> {
|
||||
const logger = useLogger();
|
||||
@@ -82,8 +84,14 @@ async function createDefaultAdmin(schema: SchemaOverview) {
|
||||
const { nanoid } = await import('nanoid');
|
||||
|
||||
logger.info('Setting up first admin role...');
|
||||
const accessService = new AccessService({ schema });
|
||||
const policiesService = new PoliciesService({ schema });
|
||||
const rolesService = new RolesService({ schema });
|
||||
|
||||
const role = await rolesService.createOne(defaultAdminRole);
|
||||
const policy = await policiesService.createOne(defaultAdminPolicy);
|
||||
|
||||
await accessService.createOne({ policy, role });
|
||||
|
||||
logger.info('Adding first admin user...');
|
||||
const usersService = new UsersService({ schema });
|
||||
@@ -104,5 +112,5 @@ async function createDefaultAdmin(schema: SchemaOverview) {
|
||||
|
||||
const token = env['ADMIN_TOKEN'] ?? null;
|
||||
|
||||
await usersService.createOne({ email: adminEmail, password: adminPassword, token, role, ...defaultAdminUser });
|
||||
await usersService.createOne({ ...defaultAdminUser, email: adminEmail, password: adminPassword, token, role });
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ import { generateHash } from '../../../utils/generate-hash.js';
|
||||
import type { Credentials } from '../../utils/create-db-connection.js';
|
||||
import createDBConnection from '../../utils/create-db-connection.js';
|
||||
import createEnv from '../../utils/create-env/index.js';
|
||||
import { defaultAdminRole, defaultAdminUser } from '../../utils/defaults.js';
|
||||
import { defaultAdminPolicy, defaultAdminRole, defaultAdminUser } from '../../utils/defaults.js';
|
||||
import { drivers, getDriverForClient } from '../../utils/drivers.js';
|
||||
import { databaseQuestions } from './questions.js';
|
||||
|
||||
@@ -98,20 +98,19 @@ export default async function init(): Promise<void> {
|
||||
|
||||
firstUser.password = await generateHash(firstUser.password);
|
||||
|
||||
const userID = randomUUID();
|
||||
const roleID = randomUUID();
|
||||
const role = randomUUID();
|
||||
const policy = randomUUID();
|
||||
|
||||
await db('directus_roles').insert({
|
||||
id: roleID,
|
||||
...defaultAdminRole,
|
||||
});
|
||||
await db('directus_roles').insert({ ...defaultAdminRole, id: role });
|
||||
await db('directus_policies').insert({ ...defaultAdminPolicy, id: policy });
|
||||
await db('directus_access').insert({ id: randomUUID(), role, policy });
|
||||
|
||||
await db('directus_users').insert({
|
||||
id: userID,
|
||||
...defaultAdminUser,
|
||||
id: randomUUID(),
|
||||
email: firstUser.email,
|
||||
password: firstUser.password,
|
||||
role: roleID,
|
||||
...defaultAdminUser,
|
||||
role,
|
||||
});
|
||||
|
||||
await db.destroy();
|
||||
|
||||
@@ -1,12 +1,21 @@
|
||||
export const defaultAdminRole = {
|
||||
import type { Policy, Role, User } from '@directus/types';
|
||||
|
||||
export const defaultAdminRole: Partial<Role> = {
|
||||
name: 'Administrator',
|
||||
icon: 'verified',
|
||||
admin_access: true,
|
||||
description: '$t:admin_description',
|
||||
};
|
||||
|
||||
export const defaultAdminUser = {
|
||||
export const defaultAdminUser: Partial<User> = {
|
||||
status: 'active',
|
||||
first_name: 'Admin',
|
||||
last_name: 'User',
|
||||
};
|
||||
|
||||
export const defaultAdminPolicy: Partial<Policy> = {
|
||||
name: 'Administrator',
|
||||
icon: 'verified',
|
||||
admin_access: true,
|
||||
app_access: true,
|
||||
description: '$t:admin_description',
|
||||
};
|
||||
|
||||
@@ -61,7 +61,7 @@ export const DEFAULT_AUTH_PROVIDER = 'default';
|
||||
|
||||
export const COLUMN_TRANSFORMS = ['year', 'month', 'day', 'weekday', 'hour', 'minute', 'second'];
|
||||
|
||||
export const GENERATE_SPECIAL = ['uuid', 'date-created', 'role-created', 'user-created'];
|
||||
export const GENERATE_SPECIAL = ['uuid', 'date-created', 'role-created', 'user-created'] as const;
|
||||
|
||||
export const UUID_REGEX = '[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}';
|
||||
|
||||
|
||||
201
api/src/controllers/access.ts
Normal file
201
api/src/controllers/access.ts
Normal file
@@ -0,0 +1,201 @@
|
||||
import { ErrorCode, isDirectusError } from '@directus/errors';
|
||||
import type { PrimaryKey } from '@directus/types';
|
||||
import express from 'express';
|
||||
import { respond } from '../middleware/respond.js';
|
||||
import useCollection from '../middleware/use-collection.js';
|
||||
import { validateBatch } from '../middleware/validate-batch.js';
|
||||
import { MetaService } from '../services/meta.js';
|
||||
import { AccessService } from '../services/access.js';
|
||||
import asyncHandler from '../utils/async-handler.js';
|
||||
import { sanitizeQuery } from '../utils/sanitize-query.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.use(useCollection('directus_access'));
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new AccessService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const savedKeys: PrimaryKey[] = [];
|
||||
|
||||
if (Array.isArray(req.body)) {
|
||||
const keys = await service.createMany(req.body);
|
||||
savedKeys.push(...keys);
|
||||
} else {
|
||||
const key = await service.createOne(req.body);
|
||||
savedKeys.push(key);
|
||||
}
|
||||
|
||||
try {
|
||||
if (Array.isArray(req.body)) {
|
||||
const items = await service.readMany(savedKeys, req.sanitizedQuery);
|
||||
res.locals['payload'] = { data: items };
|
||||
} else {
|
||||
const item = await service.readOne(savedKeys[0]!, req.sanitizedQuery);
|
||||
res.locals['payload'] = { data: item };
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (isDirectusError(error, ErrorCode.Forbidden)) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond,
|
||||
);
|
||||
|
||||
const readHandler = asyncHandler(async (req, res, next) => {
|
||||
const service = new AccessService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const metaService = new MetaService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
let result;
|
||||
|
||||
if (req.body.keys) {
|
||||
result = await service.readMany(req.body.keys, req.sanitizedQuery);
|
||||
} else {
|
||||
result = await service.readByQuery(req.sanitizedQuery);
|
||||
}
|
||||
|
||||
const meta = await metaService.getMetaForQuery('directus_access', req.sanitizedQuery);
|
||||
|
||||
res.locals['payload'] = { data: result, meta };
|
||||
return next();
|
||||
});
|
||||
|
||||
router.get('/', validateBatch('read'), readHandler, respond);
|
||||
router.search('/', validateBatch('read'), readHandler, respond);
|
||||
|
||||
router.get(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (req.path.endsWith('me')) return next();
|
||||
|
||||
const service = new AccessService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const record = await service.readOne(req.params['pk']!, req.sanitizedQuery);
|
||||
|
||||
res.locals['payload'] = { data: record };
|
||||
return next();
|
||||
}),
|
||||
respond,
|
||||
);
|
||||
|
||||
router.patch(
|
||||
'/',
|
||||
validateBatch('update'),
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new AccessService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
let keys: PrimaryKey[] = [];
|
||||
|
||||
if (Array.isArray(req.body)) {
|
||||
keys = await service.updateBatch(req.body);
|
||||
} else if (req.body.keys) {
|
||||
keys = await service.updateMany(req.body.keys, req.body.data);
|
||||
} else {
|
||||
const sanitizedQuery = sanitizeQuery(req.body.query, req.accountability);
|
||||
keys = await service.updateByQuery(sanitizedQuery, req.body.data);
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await service.readMany(keys, req.sanitizedQuery);
|
||||
res.locals['payload'] = { data: result };
|
||||
} catch (error: any) {
|
||||
if (isDirectusError(error, ErrorCode.Forbidden)) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond,
|
||||
);
|
||||
|
||||
router.patch(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new AccessService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const primaryKey = await service.updateOne(req.params['pk']!, req.body);
|
||||
|
||||
try {
|
||||
const item = await service.readOne(primaryKey, req.sanitizedQuery);
|
||||
res.locals['payload'] = { data: item || null };
|
||||
} catch (error: any) {
|
||||
if (isDirectusError(error, ErrorCode.Forbidden)) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond,
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/',
|
||||
validateBatch('delete'),
|
||||
asyncHandler(async (req, _res, next) => {
|
||||
const service = new AccessService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
if (Array.isArray(req.body)) {
|
||||
await service.deleteMany(req.body);
|
||||
} else if (req.body.keys) {
|
||||
await service.deleteMany(req.body.keys);
|
||||
} else {
|
||||
const sanitizedQuery = sanitizeQuery(req.body.query, req.accountability);
|
||||
await service.deleteByQuery(sanitizedQuery);
|
||||
}
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond,
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, _res, next) => {
|
||||
const service = new AccessService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
await service.deleteOne(req.params['pk']!);
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond,
|
||||
);
|
||||
|
||||
export default router;
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
import { DEFAULT_AUTH_PROVIDER, REFRESH_COOKIE_OPTIONS, SESSION_COOKIE_OPTIONS } from '../constants.js';
|
||||
import { useLogger } from '../logger/index.js';
|
||||
import { respond } from '../middleware/respond.js';
|
||||
import { createDefaultAccountability } from '../permissions/utils/create-default-accountability.js';
|
||||
import { AuthenticationService } from '../services/authentication.js';
|
||||
import { UsersService } from '../services/users.js';
|
||||
import type { AuthenticationMode } from '../types/auth.js';
|
||||
@@ -102,10 +103,7 @@ function getCurrentRefreshToken(req: Request, mode: AuthenticationMode): string
|
||||
router.post(
|
||||
'/refresh',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const accountability: Accountability = {
|
||||
ip: getIPFromReq(req),
|
||||
role: null,
|
||||
};
|
||||
const accountability: Accountability = createDefaultAccountability({ ip: getIPFromReq(req) });
|
||||
|
||||
const userAgent = req.get('user-agent')?.substring(0, 1024);
|
||||
if (userAgent) accountability.userAgent = userAgent;
|
||||
@@ -156,10 +154,7 @@ router.post(
|
||||
router.post(
|
||||
'/logout',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const accountability: Accountability = {
|
||||
ip: getIPFromReq(req),
|
||||
role: null,
|
||||
};
|
||||
const accountability: Accountability = createDefaultAccountability({ ip: getIPFromReq(req) });
|
||||
|
||||
const userAgent = req.get('user-agent')?.substring(0, 1024);
|
||||
if (userAgent) accountability.userAgent = userAgent;
|
||||
@@ -203,10 +198,7 @@ router.post(
|
||||
throw new InvalidPayloadError({ reason: `"email" field is required` });
|
||||
}
|
||||
|
||||
const accountability: Accountability = {
|
||||
ip: getIPFromReq(req),
|
||||
role: null,
|
||||
};
|
||||
const accountability: Accountability = createDefaultAccountability({ ip: getIPFromReq(req) });
|
||||
|
||||
const userAgent = req.get('user-agent')?.substring(0, 1024);
|
||||
if (userAgent) accountability.userAgent = userAgent;
|
||||
@@ -242,10 +234,7 @@ router.post(
|
||||
throw new InvalidPayloadError({ reason: `"password" field is required` });
|
||||
}
|
||||
|
||||
const accountability: Accountability = {
|
||||
ip: getIPFromReq(req),
|
||||
role: null,
|
||||
};
|
||||
const accountability: Accountability = createDefaultAccountability({ ip: getIPFromReq(req) });
|
||||
|
||||
const userAgent = req.get('user-agent')?.substring(0, 1024);
|
||||
if (userAgent) accountability.userAgent = userAgent;
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import { ErrorCode, isDirectusError } from '@directus/errors';
|
||||
import { ErrorCode, ForbiddenError, isDirectusError } from '@directus/errors';
|
||||
import type { PrimaryKey } from '@directus/types';
|
||||
import express from 'express';
|
||||
import getDatabase from '../database/index.js';
|
||||
import { respond } from '../middleware/respond.js';
|
||||
import useCollection from '../middleware/use-collection.js';
|
||||
import { validateBatch } from '../middleware/validate-batch.js';
|
||||
import { fetchAccountabilityCollectionAccess } from '../permissions/modules/fetch-accountability-collection-access/fetch-accountability-collection-access.js';
|
||||
import { MetaService } from '../services/meta.js';
|
||||
import { PermissionsService } from '../services/permissions/index.js';
|
||||
import { PermissionsService } from '../services/permissions.js';
|
||||
import asyncHandler from '../utils/async-handler.js';
|
||||
import { sanitizeQuery } from '../utils/sanitize-query.js';
|
||||
|
||||
@@ -86,6 +88,22 @@ const readHandler = asyncHandler(async (req, res, next) => {
|
||||
router.get('/', validateBatch('read'), readHandler, respond);
|
||||
router.search('/', validateBatch('read'), readHandler, respond);
|
||||
|
||||
router.get(
|
||||
'/me',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (!req.accountability?.user && !req.accountability?.role) throw new ForbiddenError();
|
||||
|
||||
const result = await fetchAccountabilityCollectionAccess(req.accountability, {
|
||||
schema: req.schema,
|
||||
knex: getDatabase(),
|
||||
});
|
||||
|
||||
res.locals['payload'] = { data: result };
|
||||
return next();
|
||||
}),
|
||||
respond,
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
|
||||
229
api/src/controllers/policies.ts
Normal file
229
api/src/controllers/policies.ts
Normal file
@@ -0,0 +1,229 @@
|
||||
import { ErrorCode, ForbiddenError, isDirectusError } from '@directus/errors';
|
||||
import type { PrimaryKey } from '@directus/types';
|
||||
import express from 'express';
|
||||
import getDatabase from '../database/index.js';
|
||||
import { respond } from '../middleware/respond.js';
|
||||
import useCollection from '../middleware/use-collection.js';
|
||||
import { validateBatch } from '../middleware/validate-batch.js';
|
||||
import { fetchAccountabilityPolicyGlobals } from '../permissions/modules/fetch-accountability-policy-globals/fetch-accountability-policy-globals.js';
|
||||
import { MetaService } from '../services/meta.js';
|
||||
import { PoliciesService } from '../services/policies.js';
|
||||
import asyncHandler from '../utils/async-handler.js';
|
||||
import { sanitizeQuery } from '../utils/sanitize-query.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.use(useCollection('directus_policies'));
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new PoliciesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const savedKeys: PrimaryKey[] = [];
|
||||
|
||||
if (Array.isArray(req.body)) {
|
||||
const keys = await service.createMany(req.body);
|
||||
savedKeys.push(...keys);
|
||||
} else {
|
||||
const key = await service.createOne(req.body);
|
||||
savedKeys.push(key);
|
||||
}
|
||||
|
||||
try {
|
||||
if (Array.isArray(req.body)) {
|
||||
const items = await service.readMany(savedKeys, req.sanitizedQuery);
|
||||
res.locals['payload'] = { data: items };
|
||||
} else {
|
||||
const item = await service.readOne(savedKeys[0]!, req.sanitizedQuery);
|
||||
res.locals['payload'] = { data: item };
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (isDirectusError(error, ErrorCode.Forbidden)) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond,
|
||||
);
|
||||
|
||||
const readHandler = asyncHandler(async (req, res, next) => {
|
||||
const service = new PoliciesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const metaService = new MetaService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
let result;
|
||||
|
||||
if (req.body.keys) {
|
||||
result = await service.readMany(req.body.keys, req.sanitizedQuery);
|
||||
} else {
|
||||
result = await service.readByQuery(req.sanitizedQuery);
|
||||
}
|
||||
|
||||
const meta = await metaService.getMetaForQuery('directus_policies', req.sanitizedQuery);
|
||||
|
||||
res.locals['payload'] = { data: result, meta };
|
||||
return next();
|
||||
});
|
||||
|
||||
router.get('/', validateBatch('read'), readHandler, respond);
|
||||
router.search('/', validateBatch('read'), readHandler, respond);
|
||||
|
||||
router.get(
|
||||
'/me/globals',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
try {
|
||||
if (!req.accountability?.user && !req.accountability?.role) throw new ForbiddenError();
|
||||
|
||||
const result = await fetchAccountabilityPolicyGlobals(req.accountability, {
|
||||
schema: req.schema,
|
||||
knex: getDatabase(),
|
||||
});
|
||||
|
||||
res.locals['payload'] = { data: result };
|
||||
} catch (error: any) {
|
||||
if (isDirectusError(error, ErrorCode.Forbidden)) {
|
||||
res.locals['payload'] = { data: { app_access: false } };
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond,
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (req.path.endsWith('me')) return next();
|
||||
|
||||
const service = new PoliciesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const record = await service.readOne(req.params['pk']!, req.sanitizedQuery);
|
||||
|
||||
res.locals['payload'] = { data: record };
|
||||
return next();
|
||||
}),
|
||||
respond,
|
||||
);
|
||||
|
||||
router.patch(
|
||||
'/',
|
||||
validateBatch('update'),
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new PoliciesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
let keys: PrimaryKey[] = [];
|
||||
|
||||
if (Array.isArray(req.body)) {
|
||||
keys = await service.updateBatch(req.body);
|
||||
} else if (req.body.keys) {
|
||||
keys = await service.updateMany(req.body.keys, req.body.data);
|
||||
} else {
|
||||
const sanitizedQuery = sanitizeQuery(req.body.query, req.accountability);
|
||||
keys = await service.updateByQuery(sanitizedQuery, req.body.data);
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await service.readMany(keys, req.sanitizedQuery);
|
||||
res.locals['payload'] = { data: result };
|
||||
} catch (error: any) {
|
||||
if (isDirectusError(error, ErrorCode.Forbidden)) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond,
|
||||
);
|
||||
|
||||
router.patch(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new PoliciesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const primaryKey = await service.updateOne(req.params['pk']!, req.body);
|
||||
|
||||
try {
|
||||
const item = await service.readOne(primaryKey, req.sanitizedQuery);
|
||||
res.locals['payload'] = { data: item || null };
|
||||
} catch (error: any) {
|
||||
if (isDirectusError(error, ErrorCode.Forbidden)) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond,
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/',
|
||||
validateBatch('delete'),
|
||||
asyncHandler(async (req, _res, next) => {
|
||||
const service = new PoliciesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
if (Array.isArray(req.body)) {
|
||||
await service.deleteMany(req.body);
|
||||
} else if (req.body.keys) {
|
||||
await service.deleteMany(req.body.keys);
|
||||
} else {
|
||||
const sanitizedQuery = sanitizeQuery(req.body.query, req.accountability);
|
||||
await service.deleteByQuery(sanitizedQuery);
|
||||
}
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond,
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, _res, next) => {
|
||||
const service = new PoliciesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
await service.deleteOne(req.params['pk']!);
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond,
|
||||
);
|
||||
|
||||
export default router;
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ErrorCode, isDirectusError } from '@directus/errors';
|
||||
import { ErrorCode, ForbiddenError, isDirectusError } from '@directus/errors';
|
||||
import type { PrimaryKey } from '@directus/types';
|
||||
import express from 'express';
|
||||
import { respond } from '../middleware/respond.js';
|
||||
@@ -73,6 +73,36 @@ const readHandler = asyncHandler(async (req, res, next) => {
|
||||
router.get('/', validateBatch('read'), readHandler, respond);
|
||||
router.search('/', validateBatch('read'), readHandler, respond);
|
||||
|
||||
router.get(
|
||||
'/me',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (!req.accountability?.user && !req.accountability?.role) throw new ForbiddenError();
|
||||
|
||||
const service = new RolesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const query = { ...req.sanitizedQuery, limit: -1 };
|
||||
|
||||
try {
|
||||
const roles = await service.readMany(req.accountability.roles, query);
|
||||
|
||||
res.locals['payload'] = { data: roles || null };
|
||||
} catch (error: any) {
|
||||
if (isDirectusError(error, ErrorCode.Forbidden)) {
|
||||
res.locals['payload'] = { data: req.accountability.roles.map((id) => ({ id })) };
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond,
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import type { PermissionsAction } from '@directus/types';
|
||||
import { Router } from 'express';
|
||||
import { RESUMABLE_UPLOADS } from '../constants.js';
|
||||
import getDatabase from '../database/index.js';
|
||||
import { validateAccess } from '../permissions/modules/validate-access/validate-access.js';
|
||||
import { createTusServer } from '../services/tus/index.js';
|
||||
import asyncHandler from '../utils/async-handler.js';
|
||||
import { getSchema } from '../utils/get-schema.js';
|
||||
import { scheduleSynchronizedJob, validateCron } from '../utils/schedule.js';
|
||||
import { createTusServer } from '../services/tus/index.js';
|
||||
import { AuthorizationService } from '../services/authorization.js';
|
||||
import asyncHandler from '../utils/async-handler.js';
|
||||
import type { PermissionsAction } from '@directus/types';
|
||||
import { ForbiddenError } from '@directus/errors';
|
||||
import { RESUMABLE_UPLOADS } from '../constants.js';
|
||||
|
||||
const mapAction = (method: string): PermissionsAction => {
|
||||
switch (method) {
|
||||
@@ -22,30 +22,20 @@ const mapAction = (method: string): PermissionsAction => {
|
||||
};
|
||||
|
||||
const checkFileAccess = asyncHandler(async (req, _res, next) => {
|
||||
const auth = new AuthorizationService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
if (!req.accountability?.admin) {
|
||||
if (req.accountability) {
|
||||
const action = mapAction(req.method);
|
||||
|
||||
if (action === 'create') {
|
||||
// checkAccess doesn't seem to work as expected for "create" actions
|
||||
const hasPermission = Boolean(
|
||||
req.accountability?.permissions?.find((permission) => {
|
||||
return permission.collection === 'directus_files' && permission.action === action;
|
||||
}),
|
||||
);
|
||||
|
||||
if (!hasPermission) throw new ForbiddenError();
|
||||
} else {
|
||||
try {
|
||||
await auth.checkAccess(action, 'directus_files');
|
||||
} catch (e) {
|
||||
throw new ForbiddenError();
|
||||
}
|
||||
}
|
||||
await validateAccess(
|
||||
{
|
||||
action,
|
||||
collection: 'directus_files',
|
||||
accountability: req.accountability,
|
||||
},
|
||||
{
|
||||
schema: req.schema,
|
||||
knex: getDatabase(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
return next();
|
||||
|
||||
@@ -5,7 +5,7 @@ import {
|
||||
InvalidPayloadError,
|
||||
isDirectusError,
|
||||
} from '@directus/errors';
|
||||
import type { PrimaryKey, RegisterUserInput, Role } from '@directus/types';
|
||||
import type { PrimaryKey, RegisterUserInput } from '@directus/types';
|
||||
import express from 'express';
|
||||
import Joi from 'joi';
|
||||
import checkRateLimit from '../middleware/rate-limiter-registration.js';
|
||||
@@ -14,7 +14,6 @@ import useCollection from '../middleware/use-collection.js';
|
||||
import { validateBatch } from '../middleware/validate-batch.js';
|
||||
import { AuthenticationService } from '../services/authentication.js';
|
||||
import { MetaService } from '../services/meta.js';
|
||||
import { RolesService } from '../services/roles.js';
|
||||
import { TFAService } from '../services/tfa.js';
|
||||
import { UsersService } from '../services/users.js';
|
||||
import asyncHandler from '../utils/async-handler.js';
|
||||
@@ -376,38 +375,6 @@ router.post(
|
||||
throw new InvalidPayloadError({ reason: `"otp" is required` });
|
||||
}
|
||||
|
||||
// Override permissions only when enforce TFA is enabled in role
|
||||
if (req.accountability.role) {
|
||||
const rolesService = new RolesService({
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const role = (await rolesService.readOne(req.accountability.role)) as Role;
|
||||
|
||||
if (role && role.enforce_tfa) {
|
||||
const existingPermission = await req.accountability.permissions?.find(
|
||||
(p) => p.collection === 'directus_users' && p.action === 'update',
|
||||
);
|
||||
|
||||
if (existingPermission) {
|
||||
existingPermission.fields = ['tfa_secret'];
|
||||
existingPermission.permissions = { id: { _eq: req.accountability.user } };
|
||||
existingPermission.presets = null;
|
||||
existingPermission.validation = null;
|
||||
} else {
|
||||
(req.accountability.permissions || (req.accountability.permissions = [])).push({
|
||||
action: 'update',
|
||||
collection: 'directus_users',
|
||||
fields: ['tfa_secret'],
|
||||
permissions: { id: { _eq: req.accountability.user } },
|
||||
presets: null,
|
||||
role: req.accountability.role,
|
||||
validation: null,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const service = new TFAService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
@@ -431,38 +398,6 @@ router.post(
|
||||
throw new InvalidPayloadError({ reason: `"otp" is required` });
|
||||
}
|
||||
|
||||
// Override permissions only when enforce TFA is enabled in role
|
||||
if (req.accountability.role) {
|
||||
const rolesService = new RolesService({
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const role = (await rolesService.readOne(req.accountability.role)) as Role;
|
||||
|
||||
if (role && role.enforce_tfa) {
|
||||
const existingPermission = await req.accountability.permissions?.find(
|
||||
(p) => p.collection === 'directus_users' && p.action === 'update',
|
||||
);
|
||||
|
||||
if (existingPermission) {
|
||||
existingPermission.fields = ['tfa_secret'];
|
||||
existingPermission.permissions = { id: { _eq: req.accountability.user } };
|
||||
existingPermission.presets = null;
|
||||
existingPermission.validation = null;
|
||||
} else {
|
||||
(req.accountability.permissions || (req.accountability.permissions = [])).push({
|
||||
action: 'update',
|
||||
collection: 'directus_users',
|
||||
fields: ['tfa_secret'],
|
||||
permissions: { id: { _eq: req.accountability.user } },
|
||||
presets: null,
|
||||
role: req.accountability.role,
|
||||
validation: null,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const service = new TFAService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
|
||||
121
api/src/database/get-ast-from-query/get-ast-from-query.ts
Normal file
121
api/src/database/get-ast-from-query/get-ast-from-query.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
/**
|
||||
* Generate an AST based on a given collection and query
|
||||
*/
|
||||
|
||||
import type { Accountability, Query, SchemaOverview } from '@directus/types';
|
||||
import type { Knex } from 'knex';
|
||||
import { cloneDeep, uniq } from 'lodash-es';
|
||||
import { fetchAllowedFields } from '../../permissions/modules/fetch-allowed-fields/fetch-allowed-fields.js';
|
||||
import type { AST } from '../../types/index.js';
|
||||
import { parseFields } from './lib/parse-fields.js';
|
||||
|
||||
export interface GetAstFromQueryOptions {
|
||||
collection: string;
|
||||
query: Query;
|
||||
accountability: Accountability | null;
|
||||
}
|
||||
|
||||
export interface GetAstFromQueryContext {
|
||||
knex: Knex;
|
||||
schema: SchemaOverview;
|
||||
}
|
||||
|
||||
export async function getAstFromQuery(options: GetAstFromQueryOptions, context: GetAstFromQueryContext): Promise<AST> {
|
||||
options.query = cloneDeep(options.query);
|
||||
|
||||
const ast: AST = {
|
||||
type: 'root',
|
||||
name: options.collection,
|
||||
query: options.query,
|
||||
children: [],
|
||||
cases: [],
|
||||
};
|
||||
|
||||
let fields = ['*'];
|
||||
|
||||
if (options.query.fields) {
|
||||
fields = options.query.fields;
|
||||
}
|
||||
|
||||
/**
|
||||
* When using aggregate functions, you can't have any other regular fields
|
||||
* selected. This makes sure you never end up in a non-aggregate fields selection error
|
||||
*/
|
||||
if (Object.keys(options.query.aggregate || {}).length > 0) {
|
||||
fields = [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Similarly, when grouping on a specific field, you can't have other non-aggregated fields.
|
||||
* The group query will override the fields query
|
||||
*/
|
||||
if (options.query.group) {
|
||||
fields = options.query.group;
|
||||
}
|
||||
|
||||
fields = uniq(fields);
|
||||
|
||||
const deep = options.query.deep || {};
|
||||
|
||||
// Prevent fields/deep from showing up in the query object in further use
|
||||
delete options.query.fields;
|
||||
delete options.query.deep;
|
||||
|
||||
if (!options.query.sort) {
|
||||
// We'll default to the primary key for the standard sort output
|
||||
let sortField: string | null = context.schema.collections[options.collection]!.primary;
|
||||
|
||||
// If a custom manual sort field is configured, use that
|
||||
if (context.schema.collections[options.collection]?.sortField) {
|
||||
sortField = context.schema.collections[options.collection]!.sortField as string;
|
||||
}
|
||||
|
||||
if (options.accountability && options.accountability.admin === false) {
|
||||
// Verify that the user has access to the sort field
|
||||
|
||||
const allowedFields = await fetchAllowedFields(
|
||||
{
|
||||
collection: options.collection,
|
||||
action: 'read',
|
||||
accountability: options.accountability,
|
||||
},
|
||||
context,
|
||||
);
|
||||
|
||||
if (allowedFields.length === 0) {
|
||||
sortField = null;
|
||||
} else if (allowedFields.includes('*') === false && allowedFields.includes(sortField) === false) {
|
||||
// If the sort field is not allowed, default to the first allowed field
|
||||
sortField = allowedFields[0]!;
|
||||
}
|
||||
}
|
||||
|
||||
// When group by is used, default to the first column provided in the group by clause
|
||||
if (options.query.group?.[0]) {
|
||||
sortField = options.query.group[0];
|
||||
}
|
||||
|
||||
if (sortField) {
|
||||
options.query.sort = [sortField];
|
||||
}
|
||||
}
|
||||
|
||||
// When no group by is supplied, but an aggregate function is used, only a single row will be
|
||||
// returned. In those cases, we'll ignore the sort field altogether
|
||||
if (options.query.aggregate && Object.keys(options.query.aggregate).length && !options.query.group?.[0]) {
|
||||
delete options.query.sort;
|
||||
}
|
||||
|
||||
ast.children = await parseFields(
|
||||
{
|
||||
parentCollection: options.collection,
|
||||
fields,
|
||||
query: options.query,
|
||||
deep,
|
||||
accountability: options.accountability,
|
||||
},
|
||||
context,
|
||||
);
|
||||
|
||||
return ast;
|
||||
}
|
||||
110
api/src/database/get-ast-from-query/lib/convert-wildcards.ts
Normal file
110
api/src/database/get-ast-from-query/lib/convert-wildcards.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import type { Accountability, Query, SchemaOverview } from '@directus/types';
|
||||
import type { Knex } from 'knex';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { fetchAllowedFields } from '../../../permissions/modules/fetch-allowed-fields/fetch-allowed-fields.js';
|
||||
import { getRelation } from '../utils/get-relation.js';
|
||||
|
||||
export interface ConvertWildcardsOptions {
|
||||
parentCollection: string;
|
||||
fields: string[];
|
||||
query: Query;
|
||||
accountability: Accountability | null;
|
||||
}
|
||||
|
||||
export interface ConvertWildCardsContext {
|
||||
schema: SchemaOverview;
|
||||
knex: Knex;
|
||||
}
|
||||
|
||||
export async function convertWildcards(options: ConvertWildcardsOptions, context: ConvertWildCardsContext) {
|
||||
const fields = cloneDeep(options.fields);
|
||||
|
||||
const fieldsInCollection = Object.entries(context.schema.collections[options.parentCollection]!.fields).map(
|
||||
([name]) => name,
|
||||
);
|
||||
|
||||
let allowedFields: string[] | null = fieldsInCollection;
|
||||
|
||||
if (options.accountability && options.accountability.admin === false) {
|
||||
allowedFields = await fetchAllowedFields(
|
||||
{
|
||||
collection: options.parentCollection,
|
||||
action: 'read',
|
||||
accountability: options.accountability,
|
||||
},
|
||||
context,
|
||||
);
|
||||
}
|
||||
|
||||
if (!allowedFields || allowedFields.length === 0) return [];
|
||||
|
||||
// In case of full read permissions
|
||||
if (allowedFields[0] === '*') allowedFields = fieldsInCollection;
|
||||
|
||||
for (let index = 0; index < fields.length; index++) {
|
||||
const fieldKey = fields[index]!;
|
||||
|
||||
if (fieldKey.includes('*') === false) continue;
|
||||
|
||||
if (fieldKey === '*') {
|
||||
const aliases = Object.keys(options.query.alias ?? {});
|
||||
|
||||
// Set to all fields in collection
|
||||
if (allowedFields.includes('*')) {
|
||||
fields.splice(index, 1, ...fieldsInCollection, ...aliases);
|
||||
} else {
|
||||
// Set to all allowed fields
|
||||
const allowedAliases = aliases.filter((fieldKey) => {
|
||||
const name = options.query.alias![fieldKey]!;
|
||||
return allowedFields!.includes(name);
|
||||
});
|
||||
|
||||
fields.splice(index, 1, ...allowedFields, ...allowedAliases);
|
||||
}
|
||||
}
|
||||
|
||||
// Swap *.* case for *,<relational-field>.*,<another-relational>.*
|
||||
if (fieldKey.includes('.') && fieldKey.split('.')[0] === '*') {
|
||||
const parts = fieldKey.split('.');
|
||||
|
||||
const relationalFields = allowedFields.includes('*')
|
||||
? context.schema.relations
|
||||
.filter(
|
||||
(relation) =>
|
||||
relation.collection === options.parentCollection ||
|
||||
relation.related_collection === options.parentCollection,
|
||||
)
|
||||
.map((relation) => {
|
||||
const isMany = relation.collection === options.parentCollection;
|
||||
return isMany ? relation.field : relation.meta?.one_field;
|
||||
})
|
||||
: allowedFields.filter((fieldKey) => !!getRelation(context.schema, options.parentCollection, fieldKey));
|
||||
|
||||
const nonRelationalFields = allowedFields.filter((fieldKey) => relationalFields.includes(fieldKey) === false);
|
||||
|
||||
const aliasFields = Object.keys(options.query.alias ?? {}).map((fieldKey) => {
|
||||
const name = options.query.alias![fieldKey];
|
||||
|
||||
if (relationalFields.includes(name)) {
|
||||
return `${fieldKey}.${parts.slice(1).join('.')}`;
|
||||
}
|
||||
|
||||
return fieldKey;
|
||||
});
|
||||
|
||||
fields.splice(
|
||||
index,
|
||||
1,
|
||||
...[
|
||||
...relationalFields.map((relationalField) => {
|
||||
return `${relationalField}.${parts.slice(1).join('.')}`;
|
||||
}),
|
||||
...nonRelationalFields,
|
||||
...aliasFields,
|
||||
],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return fields;
|
||||
}
|
||||
277
api/src/database/get-ast-from-query/lib/parse-fields.ts
Normal file
277
api/src/database/get-ast-from-query/lib/parse-fields.ts
Normal file
@@ -0,0 +1,277 @@
|
||||
import { REGEX_BETWEEN_PARENS } from '@directus/constants';
|
||||
import type { Accountability, Query, SchemaOverview } from '@directus/types';
|
||||
import type { Knex } from 'knex';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { fetchPermissions } from '../../../permissions/lib/fetch-permissions.js';
|
||||
import { fetchPolicies } from '../../../permissions/lib/fetch-policies.js';
|
||||
import type { FieldNode, FunctionFieldNode, NestedCollectionNode } from '../../../types/index.js';
|
||||
import { getRelationType } from '../../../utils/get-relation-type.js';
|
||||
import { getDeepQuery } from '../utils/get-deep-query.js';
|
||||
import { getRelatedCollection } from '../utils/get-related-collection.js';
|
||||
import { getRelation } from '../utils/get-relation.js';
|
||||
import { convertWildcards } from './convert-wildcards.js';
|
||||
|
||||
interface CollectionScope {
|
||||
[collectionScope: string]: string[];
|
||||
}
|
||||
|
||||
export interface ParseFieldsOptions {
|
||||
accountability: Accountability | null;
|
||||
parentCollection: string;
|
||||
fields: string[] | null;
|
||||
query: Query;
|
||||
deep?: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface ParseFieldsContext {
|
||||
schema: SchemaOverview;
|
||||
knex: Knex;
|
||||
}
|
||||
|
||||
export async function parseFields(
|
||||
options: ParseFieldsOptions,
|
||||
context: ParseFieldsContext,
|
||||
): Promise<[] | (NestedCollectionNode | FieldNode | FunctionFieldNode)[]> {
|
||||
let { fields } = options;
|
||||
if (!fields) return [];
|
||||
|
||||
fields = await convertWildcards(
|
||||
{
|
||||
fields,
|
||||
parentCollection: options.parentCollection,
|
||||
query: options.query,
|
||||
accountability: options.accountability,
|
||||
},
|
||||
context,
|
||||
);
|
||||
|
||||
if (!fields || !Array.isArray(fields)) return [];
|
||||
|
||||
const children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[] = [];
|
||||
|
||||
const policies =
|
||||
options.accountability && options.accountability.admin === false
|
||||
? await fetchPolicies(options.accountability, context)
|
||||
: null;
|
||||
|
||||
const relationalStructure: Record<string, string[] | CollectionScope> = Object.create(null);
|
||||
|
||||
for (const fieldKey of fields) {
|
||||
let name = fieldKey;
|
||||
|
||||
if (options.query.alias) {
|
||||
// check for field alias (is one of the key)
|
||||
if (name in options.query.alias) {
|
||||
name = options.query.alias[fieldKey]!;
|
||||
}
|
||||
}
|
||||
|
||||
const isRelational =
|
||||
name.includes('.') ||
|
||||
// We'll always treat top level o2m fields as a related item. This is an alias field, otherwise it won't return
|
||||
// anything
|
||||
!!context.schema.relations.find(
|
||||
(relation) => relation.related_collection === options.parentCollection && relation.meta?.one_field === name,
|
||||
);
|
||||
|
||||
if (isRelational) {
|
||||
// field is relational
|
||||
const parts = fieldKey.split('.');
|
||||
|
||||
let rootField = parts[0]!;
|
||||
let collectionScope: string | null = null;
|
||||
|
||||
// a2o related collection scoped field selector `fields=sections.section_id:headings.title`
|
||||
if (rootField.includes(':')) {
|
||||
const [key, scope] = rootField.split(':');
|
||||
rootField = key!;
|
||||
collectionScope = scope!;
|
||||
}
|
||||
|
||||
if (rootField in relationalStructure === false) {
|
||||
if (collectionScope) {
|
||||
relationalStructure[rootField] = { [collectionScope]: [] };
|
||||
} else {
|
||||
relationalStructure[rootField] = [];
|
||||
}
|
||||
}
|
||||
|
||||
if (parts.length > 1) {
|
||||
const childKey = parts.slice(1).join('.');
|
||||
|
||||
if (collectionScope) {
|
||||
if (collectionScope in relationalStructure[rootField]! === false) {
|
||||
(relationalStructure[rootField] as CollectionScope)[collectionScope] = [];
|
||||
}
|
||||
|
||||
(relationalStructure[rootField] as CollectionScope)[collectionScope]!.push(childKey);
|
||||
} else {
|
||||
(relationalStructure[rootField] as string[]).push(childKey);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (name.includes('(') && name.includes(')')) {
|
||||
const columnName = name.match(REGEX_BETWEEN_PARENS)![1]!;
|
||||
const foundField = context.schema.collections[options.parentCollection]!.fields[columnName];
|
||||
|
||||
if (foundField && foundField.type === 'alias') {
|
||||
const foundRelation = context.schema.relations.find(
|
||||
(relation) =>
|
||||
relation.related_collection === options.parentCollection && relation.meta?.one_field === columnName,
|
||||
);
|
||||
|
||||
if (foundRelation) {
|
||||
children.push({
|
||||
type: 'functionField',
|
||||
name,
|
||||
fieldKey,
|
||||
query: {},
|
||||
relatedCollection: foundRelation.collection,
|
||||
whenCase: [],
|
||||
cases: [],
|
||||
});
|
||||
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (name.includes(':')) {
|
||||
const [key, scope] = name.split(':') as [string, string];
|
||||
|
||||
if (key in relationalStructure === false) {
|
||||
relationalStructure[key] = { [scope]: [] };
|
||||
} else if (scope in (relationalStructure[key] as CollectionScope) === false) {
|
||||
(relationalStructure[key] as CollectionScope)[scope] = [];
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
children.push({ type: 'field', name, fieldKey, whenCase: [] });
|
||||
}
|
||||
}
|
||||
|
||||
for (const [fieldKey, nestedFields] of Object.entries(relationalStructure)) {
|
||||
let fieldName = fieldKey;
|
||||
|
||||
if (options.query.alias && fieldKey in options.query.alias) {
|
||||
fieldName = options.query.alias[fieldKey]!;
|
||||
}
|
||||
|
||||
const relatedCollection = getRelatedCollection(context.schema, options.parentCollection, fieldName);
|
||||
const relation = getRelation(context.schema, options.parentCollection, fieldName);
|
||||
|
||||
if (!relation) continue;
|
||||
|
||||
const relationType = getRelationType({
|
||||
relation,
|
||||
collection: options.parentCollection,
|
||||
field: fieldName,
|
||||
});
|
||||
|
||||
if (!relationType) continue;
|
||||
|
||||
let child: NestedCollectionNode | null = null;
|
||||
|
||||
if (relationType === 'a2o') {
|
||||
const allowedCollections = relation.meta!.one_allowed_collections!;
|
||||
|
||||
child = {
|
||||
type: 'a2o',
|
||||
names: allowedCollections,
|
||||
children: {},
|
||||
query: {},
|
||||
relatedKey: {},
|
||||
parentKey: context.schema.collections[options.parentCollection]!.primary,
|
||||
fieldKey: fieldKey,
|
||||
relation: relation,
|
||||
cases: {},
|
||||
whenCase: [],
|
||||
};
|
||||
|
||||
for (const relatedCollection of allowedCollections) {
|
||||
child.children[relatedCollection] = await parseFields(
|
||||
{
|
||||
parentCollection: relatedCollection,
|
||||
fields: Array.isArray(nestedFields)
|
||||
? nestedFields
|
||||
: (nestedFields as CollectionScope)[relatedCollection] || [],
|
||||
query: options.query,
|
||||
deep: options.deep?.[`${fieldKey}:${relatedCollection}`],
|
||||
accountability: options.accountability,
|
||||
},
|
||||
context,
|
||||
);
|
||||
|
||||
child.query[relatedCollection] = getDeepQuery(options.deep?.[`${fieldKey}:${relatedCollection}`] || {});
|
||||
|
||||
child.relatedKey[relatedCollection] = context.schema.collections[relatedCollection]!.primary;
|
||||
}
|
||||
} else if (relatedCollection) {
|
||||
if (options.accountability && options.accountability.admin === false && policies) {
|
||||
const permissions = await fetchPermissions(
|
||||
{
|
||||
action: 'read',
|
||||
collections: [relatedCollection],
|
||||
policies: policies,
|
||||
accountability: options.accountability,
|
||||
},
|
||||
context,
|
||||
);
|
||||
|
||||
// Skip related collection if no permissions
|
||||
if (permissions.length === 0) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// update query alias for children parseFields
|
||||
const deepAlias = getDeepQuery(options.deep?.[fieldKey] || {})?.['alias'];
|
||||
if (!isEmpty(deepAlias)) options.query.alias = deepAlias;
|
||||
|
||||
child = {
|
||||
type: relationType,
|
||||
name: relatedCollection,
|
||||
fieldKey: fieldKey,
|
||||
parentKey: context.schema.collections[options.parentCollection]!.primary,
|
||||
relatedKey: context.schema.collections[relatedCollection]!.primary,
|
||||
relation: relation,
|
||||
query: getDeepQuery(options.deep?.[fieldKey] || {}),
|
||||
children: await parseFields(
|
||||
{
|
||||
parentCollection: relatedCollection,
|
||||
fields: nestedFields as string[],
|
||||
query: options.query,
|
||||
deep: options.deep?.[fieldKey] || {},
|
||||
accountability: options.accountability,
|
||||
},
|
||||
context,
|
||||
),
|
||||
cases: [],
|
||||
whenCase: [],
|
||||
};
|
||||
|
||||
if (relationType === 'o2m' && !child!.query.sort) {
|
||||
child!.query.sort = [relation.meta?.sort_field || context.schema.collections[relation.collection]!.primary];
|
||||
}
|
||||
}
|
||||
|
||||
if (child) {
|
||||
children.push(child);
|
||||
}
|
||||
}
|
||||
|
||||
// Deduplicate any children fields that are included both as a regular field, and as a nested m2o field
|
||||
const nestedCollectionNodes = children.filter((childNode) => childNode.type !== 'field');
|
||||
|
||||
return children.filter((childNode) => {
|
||||
const existsAsNestedRelational = !!nestedCollectionNodes.find(
|
||||
(nestedCollectionNode) => childNode.fieldKey === nestedCollectionNode.fieldKey,
|
||||
);
|
||||
|
||||
if (childNode.type === 'field' && existsAsNestedRelational) return false;
|
||||
|
||||
return true;
|
||||
});
|
||||
}
|
||||
21
api/src/database/get-ast-from-query/utils/get-deep-query.ts
Normal file
21
api/src/database/get-ast-from-query/utils/get-deep-query.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { mapKeys, omitBy } from 'lodash-es';
|
||||
|
||||
/**
|
||||
* Convert Deep query object to regular query object by ignoring all nested fields and returning the
|
||||
* `_` prefixed fields as top level query fields
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* ```js
|
||||
* getDeepQuery({
|
||||
* _sort: ['a']
|
||||
* });
|
||||
* // => { sort: ['a'] }
|
||||
* ```
|
||||
*/
|
||||
export function getDeepQuery(query: Record<string, any>): Record<string, any> {
|
||||
return mapKeys(
|
||||
omitBy(query, (_value, key) => key.startsWith('_') === false),
|
||||
(_value, key) => key.substring(1),
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
import type { SchemaOverview } from '@directus/types';
|
||||
import { getRelation } from './get-relation.js';
|
||||
|
||||
export function getRelatedCollection(schema: SchemaOverview, collection: string, field: string): string | null {
|
||||
const relation = getRelation(schema, collection, field);
|
||||
|
||||
if (!relation) return null;
|
||||
|
||||
if (relation.collection === collection && relation.field === field) {
|
||||
return relation.related_collection || null;
|
||||
}
|
||||
|
||||
if (relation.related_collection === collection && relation.meta?.one_field === field) {
|
||||
return relation.collection || null;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
12
api/src/database/get-ast-from-query/utils/get-relation.ts
Normal file
12
api/src/database/get-ast-from-query/utils/get-relation.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import type { SchemaOverview } from '@directus/types';
|
||||
|
||||
export function getRelation(schema: SchemaOverview, collection: string, field: string) {
|
||||
const relation = schema.relations.find((relation) => {
|
||||
return (
|
||||
(relation.collection === collection && relation.field === field) ||
|
||||
(relation.related_collection === collection && relation.meta?.one_field === field)
|
||||
);
|
||||
});
|
||||
|
||||
return relation;
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { Query, SchemaOverview } from '@directus/types';
|
||||
import type { Filter, Query, SchemaOverview } from '@directus/types';
|
||||
import type { Knex } from 'knex';
|
||||
import { applyFilter, generateAlias } from '../../../utils/apply-query.js';
|
||||
import type { AliasMap } from '../../../utils/get-column-path.js';
|
||||
@@ -7,6 +7,7 @@ import { DatabaseHelper } from '../types.js';
|
||||
export type FnHelperOptions = {
|
||||
type: string | undefined;
|
||||
query: Query | undefined;
|
||||
cases: Filter[] | undefined;
|
||||
originalCollectionName: string | undefined;
|
||||
};
|
||||
|
||||
@@ -66,6 +67,7 @@ export abstract class FnHelper extends DatabaseHelper {
|
||||
options.query.filter,
|
||||
relation.collection,
|
||||
aliasMap,
|
||||
options.cases ?? [],
|
||||
).query;
|
||||
}
|
||||
|
||||
|
||||
@@ -20,8 +20,9 @@ export class GeometryHelperMSSQL extends GeometryHelper {
|
||||
return table.specificType(field.field, 'geometry');
|
||||
}
|
||||
|
||||
override asText(table: string, column: string): Knex.Raw {
|
||||
return this.knex.raw('??.??.STAsText() as ??', [table, column, column]);
|
||||
override asText(table: string, column: string, alias: string | false): Knex.Raw {
|
||||
if (alias) return this.knex.raw('??.??.STAsText() as ??', [table, column, alias]);
|
||||
return this.knex.raw('??.??.STAsText()', [table, column]);
|
||||
}
|
||||
|
||||
override fromText(text: string): Knex.Raw {
|
||||
|
||||
@@ -5,7 +5,7 @@ export class GeometryHelperMySQL extends GeometryHelper {
|
||||
override collect(table: string, column: string): Knex.Raw {
|
||||
return this.knex.raw(
|
||||
`concat('geometrycollection(', group_concat(? separator ', '), ')'`,
|
||||
this.asText(table, column),
|
||||
this.asText(table, column, column),
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -20,8 +20,9 @@ export class GeometryHelperOracle extends GeometryHelper {
|
||||
return table.specificType(field.field, 'sdo_geometry');
|
||||
}
|
||||
|
||||
override asText(table: string, column: string): Knex.Raw {
|
||||
return this.knex.raw('sdo_util.to_wktgeometry(??.??) as ??', [table, column, column]);
|
||||
override asText(table: string, column: string, alias: string | false): Knex.Raw {
|
||||
if (alias) return this.knex.raw('sdo_util.to_wktgeometry(??.??) as ??', [table, column, alias]);
|
||||
return this.knex.raw('sdo_util.to_wktgeometry(??.??)', [table, column]);
|
||||
}
|
||||
|
||||
asGeoJSON(table: string, column: string): Knex.Raw {
|
||||
@@ -43,6 +44,6 @@ export class GeometryHelperOracle extends GeometryHelper {
|
||||
}
|
||||
|
||||
override collect(table: string, column: string): Knex.Raw {
|
||||
return this.knex.raw(`concat('geometrycollection(', listagg(?, ', '), ')'`, this.asText(table, column));
|
||||
return this.knex.raw(`concat('geometrycollection(', listagg(?, ', '), ')'`, this.asText(table, column, column));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,8 +22,9 @@ export abstract class GeometryHelper extends DatabaseHelper {
|
||||
return table.specificType(field.field, type);
|
||||
}
|
||||
|
||||
asText(table: string, column: string): Knex.Raw {
|
||||
return this.knex.raw('st_astext(??.??) as ??', [table, column, column]);
|
||||
asText(table: string, column: string, alias: string | false): Knex.Raw {
|
||||
if (alias) return this.knex.raw('st_astext(??.??) as ??', [table, column, alias]);
|
||||
return this.knex.raw('st_astext(??.??)', [table, column]);
|
||||
}
|
||||
|
||||
fromText(text: string): Knex.Raw {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import type { KNEX_TYPES } from '@directus/constants';
|
||||
import type { Options } from '../types.js';
|
||||
import type { Options, Sql } from '../types.js';
|
||||
import { SchemaHelper } from '../types.js';
|
||||
import { useEnv } from '@directus/env';
|
||||
import { preprocessBindings } from '../utils/preprocess-bindings.js';
|
||||
|
||||
const env = useEnv();
|
||||
|
||||
@@ -38,4 +39,8 @@ export class SchemaHelperCockroachDb extends SchemaHelper {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
override preprocessBindings(queryParams: Sql): Sql {
|
||||
return preprocessBindings(queryParams, { format: (index) => `$${index + 1}` });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { Knex } from 'knex';
|
||||
import { SchemaHelper } from '../types.js';
|
||||
import { SchemaHelper, type Sql } from '../types.js';
|
||||
import { preprocessBindings } from '../utils/preprocess-bindings.js';
|
||||
|
||||
export class SchemaHelperMSSQL extends SchemaHelper {
|
||||
override applyLimit(rootQuery: Knex.QueryBuilder, limit: number): void {
|
||||
@@ -30,4 +31,8 @@ export class SchemaHelperMSSQL extends SchemaHelper {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
override preprocessBindings(queryParams: Sql): Sql {
|
||||
return preprocessBindings(queryParams, { format: (index) => `@p${index}` });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import type { KNEX_TYPES } from '@directus/constants';
|
||||
import type { Field, Relation, Type } from '@directus/types';
|
||||
import type { Options } from '../types.js';
|
||||
import type { Options, Sql } from '../types.js';
|
||||
import { SchemaHelper } from '../types.js';
|
||||
import { preprocessBindings } from '../utils/preprocess-bindings.js';
|
||||
|
||||
export class SchemaHelperOracle extends SchemaHelper {
|
||||
override async changeToType(
|
||||
@@ -50,4 +51,8 @@ export class SchemaHelperOracle extends SchemaHelper {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
override preprocessBindings(queryParams: Sql): Sql {
|
||||
return preprocessBindings(queryParams, { format: (index) => `:${index + 1}` });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useEnv } from '@directus/env';
|
||||
import { SchemaHelper } from '../types.js';
|
||||
import { SchemaHelper, type Sql } from '../types.js';
|
||||
import { preprocessBindings } from '../utils/preprocess-bindings.js';
|
||||
|
||||
const env = useEnv();
|
||||
|
||||
@@ -13,4 +14,8 @@ export class SchemaHelperPostgres extends SchemaHelper {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
override preprocessBindings(queryParams: Sql): Sql {
|
||||
return preprocessBindings(queryParams, { format: (index) => `$${index + 1}` });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,11 @@ import { DatabaseHelper } from '../types.js';
|
||||
|
||||
export type Options = { nullable?: boolean; default?: any; length?: number };
|
||||
|
||||
export type Sql = {
|
||||
sql: string;
|
||||
bindings: readonly Knex.Value[];
|
||||
};
|
||||
|
||||
export abstract class SchemaHelper extends DatabaseHelper {
|
||||
isOneOfClients(clients: DatabaseClient[]): boolean {
|
||||
return clients.includes(getDatabaseClient(this.knex));
|
||||
@@ -146,4 +151,8 @@ export abstract class SchemaHelper extends DatabaseHelper {
|
||||
async getDatabaseSize(): Promise<number | null> {
|
||||
return null;
|
||||
}
|
||||
|
||||
preprocessBindings(queryParams: Sql): Sql {
|
||||
return queryParams;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
import { test, expect } from 'vitest';
|
||||
import { preprocessBindings } from './preprocess-bindings.js';
|
||||
|
||||
const format = (index: number) => `$${index + 1}`;
|
||||
|
||||
test('Returns an escaped question mark, so it stays escaped', () => {
|
||||
expect(preprocessBindings(`SELECT * FROM table WHERE column = "\\?"`, { format }).sql).toEqual(
|
||||
'SELECT * FROM table WHERE column = "\\?"',
|
||||
);
|
||||
|
||||
expect(preprocessBindings(`SELECT * FROM table WHERE column = "\\\\\\?"`, { format }).sql).toEqual(
|
||||
'SELECT * FROM table WHERE column = "\\\\\\?"',
|
||||
);
|
||||
});
|
||||
|
||||
test('Replaces question marks with $1, $2, etc.', () => {
|
||||
const bindings = preprocessBindings(
|
||||
{ sql: `SELECT * FROM table WHERE column = ? LIMIT ?`, bindings: [1, 100] },
|
||||
{ format },
|
||||
);
|
||||
|
||||
expect(bindings.sql).toEqual('SELECT * FROM table WHERE column = $1 LIMIT $2');
|
||||
expect(bindings.bindings).toEqual([1, 100]);
|
||||
});
|
||||
|
||||
test('Replaces question marks with $1, $2, etc. and skips duplicates', () => {
|
||||
const bindings = preprocessBindings(
|
||||
{
|
||||
sql: `SELECT * FROM table WHERE column = ? AND other = ? LIMIT ?`,
|
||||
bindings: [10, 'foo', 10],
|
||||
},
|
||||
{ format },
|
||||
);
|
||||
|
||||
expect(bindings.sql).toEqual('SELECT * FROM table WHERE column = $1 AND other = $2 LIMIT $1');
|
||||
expect(bindings.bindings).toEqual([10, 'foo']);
|
||||
});
|
||||
46
api/src/database/helpers/schema/utils/preprocess-bindings.ts
Normal file
46
api/src/database/helpers/schema/utils/preprocess-bindings.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { isString } from 'lodash-es';
|
||||
import type { Sql } from '../types.js';
|
||||
|
||||
export type PreprocessBindingsOptions = {
|
||||
format(index: number): string;
|
||||
};
|
||||
|
||||
export function preprocessBindings(
|
||||
queryParams: (Partial<Sql> & Pick<Sql, 'sql'>) | string,
|
||||
options: PreprocessBindingsOptions,
|
||||
) {
|
||||
const query: Sql = { bindings: [], ...(isString(queryParams) ? { sql: queryParams } : queryParams) };
|
||||
|
||||
const bindingIndices: number[] = new Array(query.bindings.length);
|
||||
|
||||
for (let i = 0; i < query.bindings.length; i++) {
|
||||
const binding = query.bindings[i];
|
||||
const prevIndex = query.bindings.findIndex((b, j) => j < i && b === binding);
|
||||
|
||||
if (prevIndex !== -1) {
|
||||
bindingIndices[i] = prevIndex;
|
||||
} else {
|
||||
bindingIndices[i] = i;
|
||||
}
|
||||
}
|
||||
|
||||
let matchIndex = 0;
|
||||
let currentBindingIndex = 0;
|
||||
|
||||
const sql = query.sql.replace(/(\\*)(\?)/g, function (_, escapes) {
|
||||
if (escapes.length % 2) {
|
||||
// Return an escaped question mark, so it stays escaped
|
||||
return `${'\\'.repeat(escapes.length)}?`;
|
||||
} else {
|
||||
const bindingIndex =
|
||||
bindingIndices[matchIndex] === matchIndex ? currentBindingIndex++ : bindingIndices[matchIndex]!;
|
||||
|
||||
matchIndex++;
|
||||
return options.format(bindingIndex);
|
||||
}
|
||||
});
|
||||
|
||||
const bindings = query.bindings.filter((_, i) => bindingIndices[i] === i);
|
||||
|
||||
return { ...query, sql, bindings };
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useEnv } from '@directus/env';
|
||||
import type { SchemaInspector } from '@directus/schema';
|
||||
import { createInspector } from '@directus/schema';
|
||||
import { isObject } from '@directus/utils';
|
||||
import fse from 'fs-extra';
|
||||
import type { Knex } from 'knex';
|
||||
import knex from 'knex';
|
||||
@@ -143,6 +144,11 @@ export function getDatabase(): Knex {
|
||||
}
|
||||
|
||||
if (client === 'mysql') {
|
||||
// Remove the conflicting `filename` option, defined by default in the Docker Image
|
||||
if (isObject(knexConfig.connection)) delete knexConfig.connection['filename'];
|
||||
|
||||
Object.assign(knexConfig, { client: 'mysql2' });
|
||||
|
||||
poolConfig.afterCreate = async (conn: any, callback: any) => {
|
||||
logger.trace('Retrieving database version');
|
||||
const run = promisify(conn.query.bind(conn));
|
||||
@@ -243,7 +249,7 @@ export function getDatabaseClient(database?: Knex): DatabaseClient {
|
||||
database = database ?? getDatabase();
|
||||
|
||||
switch (database.client.constructor.name) {
|
||||
case 'Client_MySQL':
|
||||
case 'Client_MySQL2':
|
||||
return 'mysql';
|
||||
case 'Client_PG':
|
||||
return 'postgres';
|
||||
|
||||
430
api/src/database/migrations/20240806A-permissions-policies.ts
Normal file
430
api/src/database/migrations/20240806A-permissions-policies.ts
Normal file
@@ -0,0 +1,430 @@
|
||||
import { processChunk, toBoolean } from '@directus/utils';
|
||||
import type { Knex } from 'knex';
|
||||
import { flatten, intersection, isEqual, merge, omit, uniq } from 'lodash-es';
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import { fetchPermissions } from '../../permissions/lib/fetch-permissions.js';
|
||||
import { fetchPolicies } from '../../permissions/lib/fetch-policies.js';
|
||||
import { fetchRolesTree } from '../../permissions/lib/fetch-roles-tree.js';
|
||||
import { getSchema } from '../../utils/get-schema.js';
|
||||
|
||||
import type { LogicalFilterAND, LogicalFilterOR, Permission } from '@directus/types';
|
||||
|
||||
type RoleAccess = {
|
||||
app_access: boolean;
|
||||
admin_access: boolean;
|
||||
ip_access: string | null;
|
||||
enforce_tfa: boolean;
|
||||
};
|
||||
|
||||
// Adapted from https://github.com/directus/directus/blob/141b8adbf4dd8e06530a7929f34e3fc68a522053/api/src/utils/merge-permissions.ts#L4
|
||||
export function mergePermissions(strategy: 'and' | 'or', ...permissions: Permission[][]) {
|
||||
const allPermissions = flatten(permissions);
|
||||
|
||||
const mergedPermissions = allPermissions
|
||||
.reduce((acc, val) => {
|
||||
const key = `${val.collection}__${val.action}`;
|
||||
const current = acc.get(key);
|
||||
acc.set(key, current ? mergePermission(strategy, current, val) : val);
|
||||
return acc;
|
||||
}, new Map())
|
||||
.values();
|
||||
|
||||
return Array.from(mergedPermissions);
|
||||
}
|
||||
|
||||
export function mergePermission(
|
||||
strategy: 'and' | 'or',
|
||||
currentPerm: Permission,
|
||||
newPerm: Permission,
|
||||
): Omit<Permission, 'id' | 'system'> {
|
||||
const logicalKey = `_${strategy}` as keyof LogicalFilterOR | keyof LogicalFilterAND;
|
||||
|
||||
let { permissions, validation, fields, presets } = currentPerm;
|
||||
|
||||
if (newPerm.permissions) {
|
||||
if (currentPerm.permissions && Object.keys(currentPerm.permissions)[0] === logicalKey) {
|
||||
permissions = {
|
||||
[logicalKey]: [
|
||||
...(currentPerm.permissions as LogicalFilterOR & LogicalFilterAND)[logicalKey],
|
||||
newPerm.permissions,
|
||||
],
|
||||
} as LogicalFilterAND | LogicalFilterOR;
|
||||
} else if (currentPerm.permissions) {
|
||||
// Empty {} supersedes other permissions in _OR merge
|
||||
if (strategy === 'or' && (isEqual(currentPerm.permissions, {}) || isEqual(newPerm.permissions, {}))) {
|
||||
permissions = {};
|
||||
} else {
|
||||
permissions = {
|
||||
[logicalKey]: [currentPerm.permissions, newPerm.permissions],
|
||||
} as LogicalFilterAND | LogicalFilterOR;
|
||||
}
|
||||
} else {
|
||||
permissions = {
|
||||
[logicalKey]: [newPerm.permissions],
|
||||
} as LogicalFilterAND | LogicalFilterOR;
|
||||
}
|
||||
}
|
||||
|
||||
if (newPerm.validation) {
|
||||
if (currentPerm.validation && Object.keys(currentPerm.validation)[0] === logicalKey) {
|
||||
validation = {
|
||||
[logicalKey]: [
|
||||
...(currentPerm.validation as LogicalFilterOR & LogicalFilterAND)[logicalKey],
|
||||
newPerm.validation,
|
||||
],
|
||||
} as LogicalFilterAND | LogicalFilterOR;
|
||||
} else if (currentPerm.validation) {
|
||||
// Empty {} supersedes other validations in _OR merge
|
||||
if (strategy === 'or' && (isEqual(currentPerm.validation, {}) || isEqual(newPerm.validation, {}))) {
|
||||
validation = {};
|
||||
} else {
|
||||
validation = {
|
||||
[logicalKey]: [currentPerm.validation, newPerm.validation],
|
||||
} as LogicalFilterAND | LogicalFilterOR;
|
||||
}
|
||||
} else {
|
||||
validation = {
|
||||
[logicalKey]: [newPerm.validation],
|
||||
} as LogicalFilterAND | LogicalFilterOR;
|
||||
}
|
||||
}
|
||||
|
||||
if (newPerm.fields) {
|
||||
if (Array.isArray(currentPerm.fields) && strategy === 'or') {
|
||||
fields = uniq([...currentPerm.fields, ...newPerm.fields]);
|
||||
} else if (Array.isArray(currentPerm.fields) && strategy === 'and') {
|
||||
fields = intersection(currentPerm.fields, newPerm.fields);
|
||||
} else {
|
||||
fields = newPerm.fields;
|
||||
}
|
||||
|
||||
if (fields.includes('*')) fields = ['*'];
|
||||
}
|
||||
|
||||
if (newPerm.presets) {
|
||||
presets = merge({}, presets, newPerm.presets);
|
||||
}
|
||||
|
||||
return omit(
|
||||
{
|
||||
...currentPerm,
|
||||
permissions,
|
||||
validation,
|
||||
fields,
|
||||
presets,
|
||||
},
|
||||
['id', 'system'],
|
||||
);
|
||||
}
|
||||
|
||||
async function fetchRoleAccess(roles: string[], context: { knex: Knex }) {
|
||||
const roleAccess: RoleAccess = {
|
||||
admin_access: false,
|
||||
app_access: false,
|
||||
ip_access: null,
|
||||
enforce_tfa: false,
|
||||
};
|
||||
|
||||
const accessRows = await context
|
||||
.knex('directus_access')
|
||||
.select(
|
||||
'directus_policies.id',
|
||||
'directus_policies.admin_access',
|
||||
'directus_policies.app_access',
|
||||
'directus_policies.ip_access',
|
||||
'directus_policies.enforce_tfa',
|
||||
)
|
||||
.where('role', 'in', roles)
|
||||
.leftJoin('directus_policies', 'directus_policies.id', 'directus_access.policy');
|
||||
|
||||
const ipAccess = new Set();
|
||||
|
||||
for (const { admin_access, app_access, ip_access, enforce_tfa } of accessRows) {
|
||||
roleAccess.admin_access ||= toBoolean(admin_access);
|
||||
roleAccess.app_access ||= toBoolean(app_access);
|
||||
roleAccess.enforce_tfa ||= toBoolean(enforce_tfa);
|
||||
|
||||
if (ip_access && ip_access.length) {
|
||||
ip_access.split(',').forEach((ip: string) => ipAccess.add(ip));
|
||||
}
|
||||
}
|
||||
|
||||
if (ipAccess.size > 0) {
|
||||
roleAccess.ip_access = Array.from(ipAccess).join(',');
|
||||
}
|
||||
|
||||
return roleAccess;
|
||||
}
|
||||
|
||||
/**
|
||||
* The public role used to be `null`, we gotta create a single new policy for the permissions
|
||||
* previously attached to the public role (marked through `role = null`).
|
||||
*/
|
||||
const PUBLIC_POLICY_ID = 'abf8a154-5b1c-4a46-ac9c-7300570f4f17';
|
||||
|
||||
export async function up(knex: Knex) {
|
||||
/////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// If the policies table already exists the migration has already run
|
||||
if (await knex.schema.hasTable('directus_policies')) {
|
||||
return;
|
||||
}
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Create new policies table that mirrors previous Roles
|
||||
|
||||
await knex.schema.createTable('directus_policies', (table) => {
|
||||
table.uuid('id').primary();
|
||||
table.string('name', 100).notNullable();
|
||||
table.string('icon', 64).notNullable().defaultTo('badge');
|
||||
table.text('description');
|
||||
table.text('ip_access');
|
||||
table.boolean('enforce_tfa').defaultTo(false).notNullable();
|
||||
table.boolean('admin_access').defaultTo(false).notNullable();
|
||||
table.boolean('app_access').defaultTo(false).notNullable();
|
||||
});
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Copy over all existing roles into new policies
|
||||
|
||||
const roles = await knex
|
||||
.select('id', 'name', 'icon', 'description', 'ip_access', 'enforce_tfa', 'admin_access', 'app_access')
|
||||
.from('directus_roles');
|
||||
|
||||
if (roles.length > 0) {
|
||||
await processChunk(roles, 100, async (chunk) => {
|
||||
await knex('directus_policies').insert(chunk);
|
||||
});
|
||||
}
|
||||
|
||||
await knex
|
||||
.insert({
|
||||
id: PUBLIC_POLICY_ID,
|
||||
name: '$t:public_label',
|
||||
icon: 'public',
|
||||
description: '$t:public_description',
|
||||
app_access: false,
|
||||
})
|
||||
.into('directus_policies');
|
||||
|
||||
// Change the admin policy description to $t:admin_policy_description
|
||||
await knex('directus_policies')
|
||||
.update({
|
||||
description: '$t:admin_policy_description',
|
||||
})
|
||||
.where('description', 'LIKE', '$t:admin_description');
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Remove access control + add nesting to roles
|
||||
|
||||
await knex.schema.alterTable('directus_roles', (table) => {
|
||||
table.dropColumn('ip_access');
|
||||
table.dropColumn('enforce_tfa');
|
||||
table.dropColumn('admin_access');
|
||||
table.dropColumn('app_access');
|
||||
|
||||
table.uuid('parent').references('directus_roles.id');
|
||||
});
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Link permissions to policies instead of roles
|
||||
|
||||
await knex.schema.alterTable('directus_permissions', (table) => {
|
||||
table.uuid('policy').references('directus_policies.id').onDelete('CASCADE');
|
||||
// Drop the foreign key constraint here in order to update `null` role to public policy ID
|
||||
table.dropForeign('role');
|
||||
});
|
||||
|
||||
await knex('directus_permissions')
|
||||
.update({
|
||||
role: PUBLIC_POLICY_ID,
|
||||
})
|
||||
.whereNull('role');
|
||||
|
||||
await knex('directus_permissions').update({
|
||||
policy: knex.ref('role'),
|
||||
});
|
||||
|
||||
await knex.schema.alterTable('directus_permissions', (table) => {
|
||||
table.dropColumns('role');
|
||||
table.dropNullable('policy');
|
||||
});
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Setup junction table between roles/users and policies
|
||||
|
||||
// This could be a A2O style setup with a collection/item field rather than individual foreign
|
||||
// keys, but we want to be able to show the reverse-relationship on the individual policies as
|
||||
// well, which would require the O2A type to exist in Directus which currently doesn't.
|
||||
// Shouldn't be the end of the world here, as we know we're only attaching policies to two other
|
||||
// collections.
|
||||
|
||||
await knex.schema.createTable('directus_access', (table) => {
|
||||
table.uuid('id').primary();
|
||||
table.uuid('role').references('directus_roles.id').nullable().onDelete('CASCADE');
|
||||
table.uuid('user').references('directus_users.id').nullable().onDelete('CASCADE');
|
||||
table.uuid('policy').references('directus_policies.id').notNullable().onDelete('CASCADE');
|
||||
table.integer('sort');
|
||||
});
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Attach policies to existing roles for backwards compatibility
|
||||
|
||||
const policyAttachments = roles.map((role) => ({
|
||||
id: randomUUID(),
|
||||
role: role.id,
|
||||
user: null,
|
||||
policy: role.id,
|
||||
sort: 1,
|
||||
}));
|
||||
|
||||
await processChunk(policyAttachments, 100, async (chunk) => {
|
||||
await knex('directus_access').insert(chunk);
|
||||
});
|
||||
|
||||
await knex('directus_access').insert({
|
||||
id: randomUUID(),
|
||||
role: null,
|
||||
user: null,
|
||||
policy: PUBLIC_POLICY_ID,
|
||||
sort: 1,
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex) {
|
||||
/////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Reinstate access control fields on directus roles
|
||||
|
||||
await knex.schema.alterTable('directus_roles', (table) => {
|
||||
table.text('ip_access');
|
||||
table.boolean('enforce_tfa').defaultTo(false).notNullable();
|
||||
table.boolean('admin_access').defaultTo(false).notNullable();
|
||||
table.boolean('app_access').defaultTo(true).notNullable();
|
||||
});
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Copy policy access control rules back to roles
|
||||
|
||||
const originalPermissions = await knex
|
||||
.select('id')
|
||||
.from('directus_permissions')
|
||||
.whereNot({ policy: PUBLIC_POLICY_ID });
|
||||
|
||||
await knex.schema.alterTable('directus_permissions', (table) => {
|
||||
table.uuid('role').nullable();
|
||||
table.setNullable('policy');
|
||||
});
|
||||
|
||||
const context = { knex, schema: await getSchema() };
|
||||
|
||||
// fetch all roles
|
||||
const roles: Array<{ id: string | null }> = await knex.select('id').from('directus_roles');
|
||||
|
||||
// simulate Public Role
|
||||
roles.push({ id: null });
|
||||
|
||||
// role permissions to be inserted once all processing is completed
|
||||
const rolePermissions: Array<Omit<Permission, 'id' | 'system' | 'policy'> | { role: string | null }> = [];
|
||||
|
||||
for (const role of roles) {
|
||||
const roleTree = await fetchRolesTree(role.id, knex);
|
||||
|
||||
let roleAccess = null;
|
||||
|
||||
if (role.id !== null) {
|
||||
roleAccess = await fetchRoleAccess(roleTree, context);
|
||||
await knex('directus_roles').update(roleAccess).where({ id: role.id });
|
||||
}
|
||||
|
||||
if (roleAccess === null || !roleAccess.admin_access) {
|
||||
// fetch all of the roles policies
|
||||
const policies = await fetchPolicies({ roles: roleTree, user: null, ip: null }, context);
|
||||
|
||||
// fetch all of the policies permissions
|
||||
const rawPermissions = await fetchPermissions(
|
||||
{
|
||||
accountability: { role: null, roles: roleTree, user: null, app: roleAccess?.app_access || false },
|
||||
policies,
|
||||
bypassDynamicVariableProcessing: true,
|
||||
},
|
||||
context,
|
||||
);
|
||||
|
||||
// merge all permissions to single version (v10) and save for later use
|
||||
mergePermissions('or', rawPermissions).forEach((permission) => {
|
||||
// System permissions are automatically populated
|
||||
if (permission.system) {
|
||||
return;
|
||||
}
|
||||
|
||||
// convert merged permissions to storage ready format
|
||||
if (Array.isArray(permission.fields)) {
|
||||
permission.fields = permission.fields.join(',');
|
||||
}
|
||||
|
||||
if (permission.permissions) {
|
||||
permission.permissions = JSON.stringify(permission.permissions);
|
||||
}
|
||||
|
||||
if (permission.validation) {
|
||||
permission.validation = JSON.stringify(permission.validation);
|
||||
}
|
||||
|
||||
if (permission.presets) {
|
||||
permission.presets = JSON.stringify(permission.presets);
|
||||
}
|
||||
|
||||
rolePermissions.push({ role: role.id, ...omit(permission, ['id', 'policy']) });
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Remove role nesting support
|
||||
|
||||
await knex.schema.alterTable('directus_roles', (table) => {
|
||||
table.dropForeign('parent');
|
||||
table.dropColumn('parent');
|
||||
});
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Drop all permissions that are only attached to a user
|
||||
|
||||
// TODO query all policies that are attached to a user and delete their permissions,
|
||||
// since we don't know were to put them now and it'll cause a foreign key problem
|
||||
// as soon as we reference directus_roles in directus_permissions again
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Drop policy attachments
|
||||
|
||||
await knex.schema.dropTable('directus_access');
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Reattach permissions to roles instead of policies
|
||||
|
||||
await knex('directus_permissions')
|
||||
.update({
|
||||
role: null,
|
||||
})
|
||||
.where({ role: PUBLIC_POLICY_ID });
|
||||
|
||||
// remove all v11 permissions
|
||||
await processChunk(originalPermissions, 100, async (chunk) => {
|
||||
await knex('directus_permissions').delete(chunk);
|
||||
});
|
||||
|
||||
// insert all v10 permissions
|
||||
await processChunk(rolePermissions, 100, async (chunk) => {
|
||||
await knex('directus_permissions').insert(chunk);
|
||||
});
|
||||
|
||||
await knex.schema.alterTable('directus_permissions', (table) => {
|
||||
table.uuid('role').references('directus_roles.id').alter();
|
||||
table.dropForeign('policy');
|
||||
table.dropColumn('policy');
|
||||
});
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Drop policies table
|
||||
|
||||
await knex.schema.dropTable('directus_policies');
|
||||
}
|
||||
@@ -1,650 +0,0 @@
|
||||
import { useEnv } from '@directus/env';
|
||||
import type { Item, Query, SchemaOverview } from '@directus/types';
|
||||
import { toArray } from '@directus/utils';
|
||||
import type { Knex } from 'knex';
|
||||
import { clone, cloneDeep, isNil, merge, pick, uniq } from 'lodash-es';
|
||||
import { PayloadService } from '../services/payload.js';
|
||||
import type { AST, FieldNode, FunctionFieldNode, M2ONode, NestedCollectionNode } from '../types/ast.js';
|
||||
import { applyFunctionToColumnName } from '../utils/apply-function-to-column-name.js';
|
||||
import applyQuery, { applyLimit, applySort, generateAlias, type ColumnSortRecord } from '../utils/apply-query.js';
|
||||
import { getCollectionFromAlias } from '../utils/get-collection-from-alias.js';
|
||||
import type { AliasMap } from '../utils/get-column-path.js';
|
||||
import { getColumn } from '../utils/get-column.js';
|
||||
import { parseFilterKey } from '../utils/parse-filter-key.js';
|
||||
import { getHelpers } from './helpers/index.js';
|
||||
import getDatabase from './index.js';
|
||||
|
||||
type RunASTOptions = {
|
||||
/**
|
||||
* Query override for the current level
|
||||
*/
|
||||
query?: AST['query'];
|
||||
|
||||
/**
|
||||
* Knex instance
|
||||
*/
|
||||
knex?: Knex;
|
||||
|
||||
/**
|
||||
* Whether or not the current execution is a nested dataset in another AST
|
||||
*/
|
||||
nested?: boolean;
|
||||
|
||||
/**
|
||||
* Whether or not to strip out non-requested required fields automatically (eg IDs / FKs)
|
||||
*/
|
||||
stripNonRequested?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* Execute a given AST using Knex. Returns array of items based on requested AST.
|
||||
*/
|
||||
export default async function runAST(
|
||||
originalAST: AST | NestedCollectionNode,
|
||||
schema: SchemaOverview,
|
||||
options?: RunASTOptions,
|
||||
): Promise<null | Item | Item[]> {
|
||||
const ast = cloneDeep(originalAST);
|
||||
|
||||
const knex = options?.knex || getDatabase();
|
||||
|
||||
if (ast.type === 'a2o') {
|
||||
const results: { [collection: string]: null | Item | Item[] } = {};
|
||||
|
||||
for (const collection of ast.names) {
|
||||
results[collection] = await run(collection, ast.children[collection]!, ast.query[collection]!);
|
||||
}
|
||||
|
||||
return results;
|
||||
} else {
|
||||
return await run(ast.name, ast.children, options?.query || ast.query);
|
||||
}
|
||||
|
||||
async function run(
|
||||
collection: string,
|
||||
children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[],
|
||||
query: Query,
|
||||
) {
|
||||
const env = useEnv();
|
||||
|
||||
// Retrieve the database columns to select in the current AST
|
||||
const { fieldNodes, primaryKeyField, nestedCollectionNodes } = await parseCurrentLevel(
|
||||
schema,
|
||||
collection,
|
||||
children,
|
||||
query,
|
||||
);
|
||||
|
||||
// The actual knex query builder instance. This is a promise that resolves with the raw items from the db
|
||||
const dbQuery = await getDBQuery(schema, knex, collection, fieldNodes, query);
|
||||
|
||||
const rawItems: Item | Item[] = await dbQuery;
|
||||
|
||||
if (!rawItems) return null;
|
||||
|
||||
// Run the items through the special transforms
|
||||
const payloadService = new PayloadService(collection, { knex, schema });
|
||||
let items: null | Item | Item[] = await payloadService.processValues('read', rawItems, query.alias ?? {});
|
||||
|
||||
if (!items || (Array.isArray(items) && items.length === 0)) return items;
|
||||
|
||||
// Apply the `_in` filters to the nested collection batches
|
||||
const nestedNodes = applyParentFilters(schema, nestedCollectionNodes, items);
|
||||
|
||||
for (const nestedNode of nestedNodes) {
|
||||
let nestedItems: Item[] | null = [];
|
||||
|
||||
if (nestedNode.type === 'o2m') {
|
||||
let hasMore = true;
|
||||
|
||||
let batchCount = 0;
|
||||
|
||||
while (hasMore) {
|
||||
const node = merge({}, nestedNode, {
|
||||
query: {
|
||||
limit: env['RELATIONAL_BATCH_SIZE'],
|
||||
offset: batchCount * (env['RELATIONAL_BATCH_SIZE'] as number),
|
||||
page: null,
|
||||
},
|
||||
});
|
||||
|
||||
nestedItems = (await runAST(node, schema, { knex, nested: true })) as Item[] | null;
|
||||
|
||||
if (nestedItems) {
|
||||
items = mergeWithParentItems(schema, nestedItems, items!, nestedNode)!;
|
||||
}
|
||||
|
||||
if (!nestedItems || nestedItems.length < (env['RELATIONAL_BATCH_SIZE'] as number)) {
|
||||
hasMore = false;
|
||||
}
|
||||
|
||||
batchCount++;
|
||||
}
|
||||
} else {
|
||||
const node = merge({}, nestedNode, {
|
||||
query: { limit: -1 },
|
||||
});
|
||||
|
||||
nestedItems = (await runAST(node, schema, { knex, nested: true })) as Item[] | null;
|
||||
|
||||
if (nestedItems) {
|
||||
// Merge all fetched nested records with the parent items
|
||||
items = mergeWithParentItems(schema, nestedItems, items!, nestedNode)!;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// During the fetching of data, we have to inject a couple of required fields for the child nesting
|
||||
// to work (primary / foreign keys) even if they're not explicitly requested. After all fetching
|
||||
// and nesting is done, we parse through the output structure, and filter out all non-requested
|
||||
// fields
|
||||
if (options?.nested !== true && options?.stripNonRequested !== false) {
|
||||
items = removeTemporaryFields(schema, items, originalAST, primaryKeyField);
|
||||
}
|
||||
|
||||
return items;
|
||||
}
|
||||
}
|
||||
|
||||
async function parseCurrentLevel(
|
||||
schema: SchemaOverview,
|
||||
collection: string,
|
||||
children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[],
|
||||
query: Query,
|
||||
) {
|
||||
const primaryKeyField = schema.collections[collection]!.primary;
|
||||
const columnsInCollection = Object.keys(schema.collections[collection]!.fields);
|
||||
|
||||
const columnsToSelectInternal: string[] = [];
|
||||
const nestedCollectionNodes: NestedCollectionNode[] = [];
|
||||
|
||||
for (const child of children) {
|
||||
if (child.type === 'field' || child.type === 'functionField') {
|
||||
const { fieldName } = parseFilterKey(child.name);
|
||||
|
||||
if (columnsInCollection.includes(fieldName)) {
|
||||
columnsToSelectInternal.push(child.fieldKey);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!child.relation) continue;
|
||||
|
||||
if (child.type === 'm2o') {
|
||||
columnsToSelectInternal.push(child.relation.field);
|
||||
}
|
||||
|
||||
if (child.type === 'a2o') {
|
||||
columnsToSelectInternal.push(child.relation.field);
|
||||
columnsToSelectInternal.push(child.relation.meta!.one_collection_field!);
|
||||
}
|
||||
|
||||
nestedCollectionNodes.push(child);
|
||||
}
|
||||
|
||||
const isAggregate = (query.group || (query.aggregate && Object.keys(query.aggregate).length > 0)) ?? false;
|
||||
|
||||
/** Always fetch primary key in case there's a nested relation that needs it. Aggregate payloads
|
||||
* can't have nested relational fields
|
||||
*/
|
||||
if (isAggregate === false && columnsToSelectInternal.includes(primaryKeyField) === false) {
|
||||
columnsToSelectInternal.push(primaryKeyField);
|
||||
}
|
||||
|
||||
/** Make sure select list has unique values */
|
||||
const columnsToSelect = [...new Set(columnsToSelectInternal)];
|
||||
|
||||
const fieldNodes = columnsToSelect.map(
|
||||
(column: string) =>
|
||||
children.find(
|
||||
(childNode) =>
|
||||
(childNode.type === 'field' || childNode.type === 'functionField') && childNode.fieldKey === column,
|
||||
) ?? {
|
||||
type: 'field',
|
||||
name: column,
|
||||
fieldKey: column,
|
||||
},
|
||||
) as FieldNode[];
|
||||
|
||||
return { fieldNodes, nestedCollectionNodes, primaryKeyField };
|
||||
}
|
||||
|
||||
function getColumnPreprocessor(knex: Knex, schema: SchemaOverview, table: string) {
|
||||
const helpers = getHelpers(knex);
|
||||
|
||||
return function (fieldNode: FieldNode | FunctionFieldNode | M2ONode): Knex.Raw<string> {
|
||||
let alias = undefined;
|
||||
|
||||
if (fieldNode.name !== fieldNode.fieldKey) {
|
||||
alias = fieldNode.fieldKey;
|
||||
}
|
||||
|
||||
let field;
|
||||
|
||||
if (fieldNode.type === 'field' || fieldNode.type === 'functionField') {
|
||||
const { fieldName } = parseFilterKey(fieldNode.name);
|
||||
field = schema.collections[table]!.fields[fieldName];
|
||||
} else {
|
||||
field = schema.collections[fieldNode.relation.collection]!.fields[fieldNode.relation.field];
|
||||
}
|
||||
|
||||
if (field?.type?.startsWith('geometry')) {
|
||||
return helpers.st.asText(table, field.field);
|
||||
}
|
||||
|
||||
if (fieldNode.type === 'functionField') {
|
||||
return getColumn(knex, table, fieldNode.name, alias, schema, { query: fieldNode.query });
|
||||
}
|
||||
|
||||
return getColumn(knex, table, fieldNode.name, alias, schema);
|
||||
};
|
||||
}
|
||||
|
||||
async function getDBQuery(
|
||||
schema: SchemaOverview,
|
||||
knex: Knex,
|
||||
table: string,
|
||||
fieldNodes: (FieldNode | FunctionFieldNode)[],
|
||||
query: Query,
|
||||
): Promise<Knex.QueryBuilder> {
|
||||
const env = useEnv();
|
||||
const preProcess = getColumnPreprocessor(knex, schema, table);
|
||||
const queryCopy = clone(query);
|
||||
const helpers = getHelpers(knex);
|
||||
|
||||
queryCopy.limit = typeof queryCopy.limit === 'number' ? queryCopy.limit : Number(env['QUERY_LIMIT_DEFAULT']);
|
||||
|
||||
// Queries with aggregates and groupBy will not have duplicate result
|
||||
if (queryCopy.aggregate || queryCopy.group) {
|
||||
const flatQuery = knex.select(fieldNodes.map(preProcess)).from(table);
|
||||
return await applyQuery(knex, table, flatQuery, queryCopy, schema).query;
|
||||
}
|
||||
|
||||
const primaryKey = schema.collections[table]!.primary;
|
||||
const aliasMap: AliasMap = Object.create(null);
|
||||
let dbQuery = knex.from(table);
|
||||
let sortRecords: ColumnSortRecord[] | undefined;
|
||||
const innerQuerySortRecords: { alias: string; order: 'asc' | 'desc' }[] = [];
|
||||
let hasMultiRelationalSort: boolean | undefined;
|
||||
|
||||
if (queryCopy.sort) {
|
||||
const sortResult = applySort(knex, schema, dbQuery, queryCopy, table, aliasMap, true);
|
||||
|
||||
if (sortResult) {
|
||||
sortRecords = sortResult.sortRecords;
|
||||
hasMultiRelationalSort = sortResult.hasMultiRelationalSort;
|
||||
}
|
||||
}
|
||||
|
||||
const { hasMultiRelationalFilter } = applyQuery(knex, table, dbQuery, queryCopy, schema, {
|
||||
aliasMap,
|
||||
isInnerQuery: true,
|
||||
hasMultiRelationalSort,
|
||||
});
|
||||
|
||||
const needsInnerQuery = hasMultiRelationalSort || hasMultiRelationalFilter;
|
||||
|
||||
if (needsInnerQuery) {
|
||||
dbQuery.select(`${table}.${primaryKey}`).distinct();
|
||||
} else {
|
||||
dbQuery.select(fieldNodes.map(preProcess));
|
||||
}
|
||||
|
||||
if (sortRecords) {
|
||||
// Clears the order if any, eg: from MSSQL offset
|
||||
dbQuery.clear('order');
|
||||
|
||||
if (needsInnerQuery) {
|
||||
let orderByString = '';
|
||||
const orderByFields: Knex.Raw[] = [];
|
||||
|
||||
sortRecords.map((sortRecord) => {
|
||||
if (orderByString.length !== 0) {
|
||||
orderByString += ', ';
|
||||
}
|
||||
|
||||
const sortAlias = `sort_${generateAlias()}`;
|
||||
|
||||
if (sortRecord.column.includes('.')) {
|
||||
const [alias, field] = sortRecord.column.split('.');
|
||||
const originalCollectionName = getCollectionFromAlias(alias!, aliasMap);
|
||||
dbQuery.select(getColumn(knex, alias!, field!, sortAlias, schema, { originalCollectionName }));
|
||||
|
||||
orderByString += `?? ${sortRecord.order}`;
|
||||
orderByFields.push(getColumn(knex, alias!, field!, false, schema, { originalCollectionName }));
|
||||
} else {
|
||||
dbQuery.select(getColumn(knex, table, sortRecord.column, sortAlias, schema));
|
||||
|
||||
orderByString += `?? ${sortRecord.order}`;
|
||||
orderByFields.push(getColumn(knex, table, sortRecord.column, false, schema));
|
||||
}
|
||||
|
||||
innerQuerySortRecords.push({ alias: sortAlias, order: sortRecord.order });
|
||||
});
|
||||
|
||||
if (hasMultiRelationalSort) {
|
||||
dbQuery = helpers.schema.applyMultiRelationalSort(
|
||||
knex,
|
||||
dbQuery,
|
||||
table,
|
||||
primaryKey,
|
||||
orderByString,
|
||||
orderByFields,
|
||||
);
|
||||
|
||||
// Start order by with directus_row_number. The directus_row_number is derived from a window function that
|
||||
// is ordered by the sort fields within every primary key partition. That ensures that the result with the
|
||||
// row number = 1 is the top-most row of every partition, according to the selected sort fields.
|
||||
// Since the only relevant result is the first row of this partition, adding the directus_row_number to the
|
||||
// order by here ensures that all rows with a directus_row_number = 1 show up first in the inner query result,
|
||||
// and are correctly truncated by the limit, but not earlier.
|
||||
orderByString = `?? asc, ${orderByString}`;
|
||||
orderByFields.unshift(knex.ref('directus_row_number'));
|
||||
}
|
||||
|
||||
dbQuery.orderByRaw(orderByString, orderByFields);
|
||||
} else {
|
||||
sortRecords.map((sortRecord) => {
|
||||
if (sortRecord.column.includes('.')) {
|
||||
const [alias, field] = sortRecord.column.split('.');
|
||||
|
||||
sortRecord.column = getColumn(knex, alias!, field!, false, schema, {
|
||||
originalCollectionName: getCollectionFromAlias(alias!, aliasMap),
|
||||
}) as any;
|
||||
} else {
|
||||
sortRecord.column = getColumn(knex, table, sortRecord.column, false, schema) as any;
|
||||
}
|
||||
});
|
||||
|
||||
dbQuery.orderBy(sortRecords);
|
||||
}
|
||||
}
|
||||
|
||||
if (!needsInnerQuery) return dbQuery;
|
||||
|
||||
const wrapperQuery = knex
|
||||
.select(fieldNodes.map(preProcess))
|
||||
.from(table)
|
||||
.innerJoin(knex.raw('??', dbQuery.as('inner')), `${table}.${primaryKey}`, `inner.${primaryKey}`);
|
||||
|
||||
if (sortRecords) {
|
||||
innerQuerySortRecords.map((innerQuerySortRecord) => {
|
||||
wrapperQuery.orderBy(`inner.${innerQuerySortRecord.alias}`, innerQuerySortRecord.order);
|
||||
});
|
||||
|
||||
if (hasMultiRelationalSort) {
|
||||
wrapperQuery.where('inner.directus_row_number', '=', 1);
|
||||
applyLimit(knex, wrapperQuery, queryCopy.limit);
|
||||
}
|
||||
}
|
||||
|
||||
return wrapperQuery;
|
||||
}
|
||||
|
||||
function applyParentFilters(
|
||||
schema: SchemaOverview,
|
||||
nestedCollectionNodes: NestedCollectionNode[],
|
||||
parentItem: Item | Item[],
|
||||
) {
|
||||
const parentItems = toArray(parentItem);
|
||||
|
||||
for (const nestedNode of nestedCollectionNodes) {
|
||||
if (!nestedNode.relation) continue;
|
||||
|
||||
if (nestedNode.type === 'm2o') {
|
||||
const foreignField = schema.collections[nestedNode.relation.related_collection!]!.primary;
|
||||
const foreignIds = uniq(parentItems.map((res) => res[nestedNode.relation.field])).filter((id) => !isNil(id));
|
||||
|
||||
merge(nestedNode, { query: { filter: { [foreignField]: { _in: foreignIds } } } });
|
||||
} else if (nestedNode.type === 'o2m') {
|
||||
const relatedM2OisFetched = !!nestedNode.children.find((child) => {
|
||||
return child.type === 'field' && child.name === nestedNode.relation.field;
|
||||
});
|
||||
|
||||
if (relatedM2OisFetched === false) {
|
||||
nestedNode.children.push({
|
||||
type: 'field',
|
||||
name: nestedNode.relation.field,
|
||||
fieldKey: nestedNode.relation.field,
|
||||
});
|
||||
}
|
||||
|
||||
if (nestedNode.relation.meta?.sort_field) {
|
||||
nestedNode.children.push({
|
||||
type: 'field',
|
||||
name: nestedNode.relation.meta.sort_field,
|
||||
fieldKey: nestedNode.relation.meta.sort_field,
|
||||
});
|
||||
}
|
||||
|
||||
const foreignField = nestedNode.relation.field;
|
||||
const foreignIds = uniq(parentItems.map((res) => res[nestedNode.parentKey])).filter((id) => !isNil(id));
|
||||
|
||||
merge(nestedNode, { query: { filter: { [foreignField]: { _in: foreignIds } } } });
|
||||
} else if (nestedNode.type === 'a2o') {
|
||||
const keysPerCollection: { [collection: string]: (string | number)[] } = {};
|
||||
|
||||
for (const parentItem of parentItems) {
|
||||
const collection = parentItem[nestedNode.relation.meta!.one_collection_field!];
|
||||
if (!keysPerCollection[collection]) keysPerCollection[collection] = [];
|
||||
keysPerCollection[collection]!.push(parentItem[nestedNode.relation.field]);
|
||||
}
|
||||
|
||||
for (const relatedCollection of nestedNode.names) {
|
||||
const foreignField = nestedNode.relatedKey[relatedCollection]!;
|
||||
const foreignIds = uniq(keysPerCollection[relatedCollection]);
|
||||
|
||||
merge(nestedNode, {
|
||||
query: { [relatedCollection]: { filter: { [foreignField]: { _in: foreignIds } }, limit: foreignIds.length } },
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nestedCollectionNodes;
|
||||
}
|
||||
|
||||
function mergeWithParentItems(
|
||||
schema: SchemaOverview,
|
||||
nestedItem: Item | Item[],
|
||||
parentItem: Item | Item[],
|
||||
nestedNode: NestedCollectionNode,
|
||||
) {
|
||||
const env = useEnv();
|
||||
const nestedItems = toArray(nestedItem);
|
||||
const parentItems = clone(toArray(parentItem));
|
||||
|
||||
if (nestedNode.type === 'm2o') {
|
||||
for (const parentItem of parentItems) {
|
||||
const itemChild = nestedItems.find((nestedItem) => {
|
||||
return (
|
||||
nestedItem[schema.collections[nestedNode.relation.related_collection!]!.primary] ==
|
||||
parentItem[nestedNode.relation.field]
|
||||
);
|
||||
});
|
||||
|
||||
parentItem[nestedNode.fieldKey] = itemChild || null;
|
||||
}
|
||||
} else if (nestedNode.type === 'o2m') {
|
||||
for (const parentItem of parentItems) {
|
||||
if (!parentItem[nestedNode.fieldKey]) parentItem[nestedNode.fieldKey] = [] as Item[];
|
||||
|
||||
const itemChildren = nestedItems.filter((nestedItem) => {
|
||||
if (nestedItem === null) return false;
|
||||
if (Array.isArray(nestedItem[nestedNode.relation.field])) return true;
|
||||
|
||||
return (
|
||||
nestedItem[nestedNode.relation.field] ==
|
||||
parentItem[schema.collections[nestedNode.relation.related_collection!]!.primary] ||
|
||||
nestedItem[nestedNode.relation.field]?.[
|
||||
schema.collections[nestedNode.relation.related_collection!]!.primary
|
||||
] == parentItem[schema.collections[nestedNode.relation.related_collection!]!.primary]
|
||||
);
|
||||
});
|
||||
|
||||
parentItem[nestedNode.fieldKey].push(...itemChildren);
|
||||
|
||||
const limit = nestedNode.query.limit ?? Number(env['QUERY_LIMIT_DEFAULT']);
|
||||
|
||||
if (nestedNode.query.page && nestedNode.query.page > 1) {
|
||||
parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].slice(limit * (nestedNode.query.page - 1));
|
||||
}
|
||||
|
||||
if (nestedNode.query.offset && nestedNode.query.offset >= 0) {
|
||||
parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].slice(nestedNode.query.offset);
|
||||
}
|
||||
|
||||
if (limit !== -1) {
|
||||
parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].slice(0, limit);
|
||||
}
|
||||
|
||||
parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].sort((a: Item, b: Item) => {
|
||||
// This is pre-filled in get-ast-from-query
|
||||
const sortField = nestedNode.query.sort![0]!;
|
||||
let column = sortField;
|
||||
let order: 'asc' | 'desc' = 'asc';
|
||||
|
||||
if (sortField.startsWith('-')) {
|
||||
column = sortField.substring(1);
|
||||
order = 'desc';
|
||||
}
|
||||
|
||||
if (a[column] === b[column]) return 0;
|
||||
if (a[column] === null) return 1;
|
||||
if (b[column] === null) return -1;
|
||||
|
||||
if (order === 'asc') {
|
||||
return a[column] < b[column] ? -1 : 1;
|
||||
} else {
|
||||
return a[column] < b[column] ? 1 : -1;
|
||||
}
|
||||
});
|
||||
}
|
||||
} else if (nestedNode.type === 'a2o') {
|
||||
for (const parentItem of parentItems) {
|
||||
if (!nestedNode.relation.meta?.one_collection_field) {
|
||||
parentItem[nestedNode.fieldKey] = null;
|
||||
continue;
|
||||
}
|
||||
|
||||
const relatedCollection = parentItem[nestedNode.relation.meta.one_collection_field];
|
||||
|
||||
if (!(nestedItem as Record<string, any[]>)[relatedCollection]) {
|
||||
parentItem[nestedNode.fieldKey] = null;
|
||||
continue;
|
||||
}
|
||||
|
||||
const itemChild = (nestedItem as Record<string, any[]>)[relatedCollection]!.find((nestedItem) => {
|
||||
return nestedItem[nestedNode.relatedKey[relatedCollection]!] == parentItem[nestedNode.fieldKey];
|
||||
});
|
||||
|
||||
parentItem[nestedNode.fieldKey] = itemChild || null;
|
||||
}
|
||||
}
|
||||
|
||||
return Array.isArray(parentItem) ? parentItems : parentItems[0];
|
||||
}
|
||||
|
||||
function removeTemporaryFields(
|
||||
schema: SchemaOverview,
|
||||
rawItem: Item | Item[],
|
||||
ast: AST | NestedCollectionNode,
|
||||
primaryKeyField: string,
|
||||
parentItem?: Item,
|
||||
): null | Item | Item[] {
|
||||
const rawItems = cloneDeep(toArray(rawItem));
|
||||
const items: Item[] = [];
|
||||
|
||||
if (ast.type === 'a2o') {
|
||||
const fields: Record<string, string[]> = {};
|
||||
const nestedCollectionNodes: Record<string, NestedCollectionNode[]> = {};
|
||||
|
||||
for (const relatedCollection of ast.names) {
|
||||
if (!fields[relatedCollection]) fields[relatedCollection] = [];
|
||||
if (!nestedCollectionNodes[relatedCollection]) nestedCollectionNodes[relatedCollection] = [];
|
||||
|
||||
for (const child of ast.children[relatedCollection]!) {
|
||||
if (child.type === 'field' || child.type === 'functionField') {
|
||||
fields[relatedCollection]!.push(child.name);
|
||||
} else {
|
||||
fields[relatedCollection]!.push(child.fieldKey);
|
||||
nestedCollectionNodes[relatedCollection]!.push(child);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const rawItem of rawItems) {
|
||||
const relatedCollection: string = parentItem?.[ast.relation.meta!.one_collection_field!];
|
||||
|
||||
if (rawItem === null || rawItem === undefined) return rawItem;
|
||||
|
||||
let item = rawItem;
|
||||
|
||||
for (const nestedNode of nestedCollectionNodes[relatedCollection]!) {
|
||||
item[nestedNode.fieldKey] = removeTemporaryFields(
|
||||
schema,
|
||||
item[nestedNode.fieldKey],
|
||||
nestedNode,
|
||||
schema.collections[nestedNode.relation.collection]!.primary,
|
||||
item,
|
||||
);
|
||||
}
|
||||
|
||||
const fieldsWithFunctionsApplied = fields[relatedCollection]!.map((field) => applyFunctionToColumnName(field));
|
||||
|
||||
item =
|
||||
fields[relatedCollection]!.length > 0 ? pick(rawItem, fieldsWithFunctionsApplied) : rawItem[primaryKeyField];
|
||||
|
||||
items.push(item);
|
||||
}
|
||||
} else {
|
||||
const fields: string[] = [];
|
||||
const nestedCollectionNodes: NestedCollectionNode[] = [];
|
||||
|
||||
for (const child of ast.children) {
|
||||
fields.push(child.fieldKey);
|
||||
|
||||
if (child.type !== 'field' && child.type !== 'functionField') {
|
||||
nestedCollectionNodes.push(child);
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure any requested aggregate fields are included
|
||||
if (ast.query?.aggregate) {
|
||||
for (const [operation, aggregateFields] of Object.entries(ast.query.aggregate)) {
|
||||
if (!fields) continue;
|
||||
|
||||
if (operation === 'count' && aggregateFields.includes('*')) fields.push('count');
|
||||
|
||||
fields.push(...aggregateFields.map((field) => `${operation}.${field}`));
|
||||
}
|
||||
}
|
||||
|
||||
for (const rawItem of rawItems) {
|
||||
if (rawItem === null || rawItem === undefined) return rawItem;
|
||||
|
||||
let item = rawItem;
|
||||
|
||||
for (const nestedNode of nestedCollectionNodes) {
|
||||
item[nestedNode.fieldKey] = removeTemporaryFields(
|
||||
schema,
|
||||
item[nestedNode.fieldKey],
|
||||
nestedNode,
|
||||
nestedNode.type === 'm2o'
|
||||
? schema.collections[nestedNode.relation.related_collection!]!.primary
|
||||
: schema.collections[nestedNode.relation.collection]!.primary,
|
||||
item,
|
||||
);
|
||||
}
|
||||
|
||||
const fieldsWithFunctionsApplied = fields.map((field) => applyFunctionToColumnName(field));
|
||||
|
||||
item = fields.length > 0 ? pick(rawItem, fieldsWithFunctionsApplied) : rawItem[primaryKeyField];
|
||||
|
||||
items.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
return Array.isArray(rawItem) ? items : items[0]!;
|
||||
}
|
||||
301
api/src/database/run-ast/lib/get-db-query.ts
Normal file
301
api/src/database/run-ast/lib/get-db-query.ts
Normal file
@@ -0,0 +1,301 @@
|
||||
import { useEnv } from '@directus/env';
|
||||
import type { Filter, Query, SchemaOverview } from '@directus/types';
|
||||
import type { Knex } from 'knex';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import type { FieldNode, FunctionFieldNode, O2MNode } from '../../../types/ast.js';
|
||||
import type { ColumnSortRecord } from '../../../utils/apply-query.js';
|
||||
import applyQuery, { applyLimit, applySort, generateAlias } from '../../../utils/apply-query.js';
|
||||
import { getCollectionFromAlias } from '../../../utils/get-collection-from-alias.js';
|
||||
import type { AliasMap } from '../../../utils/get-column-path.js';
|
||||
import { getColumn } from '../../../utils/get-column.js';
|
||||
import { getHelpers } from '../../helpers/index.js';
|
||||
import { applyCaseWhen } from '../utils/apply-case-when.js';
|
||||
import { getColumnPreprocessor } from '../utils/get-column-pre-processor.js';
|
||||
import { getNodeAlias } from '../utils/get-field-alias.js';
|
||||
import { getInnerQueryColumnPreProcessor } from '../utils/get-inner-query-column-pre-processor.js';
|
||||
import { withPreprocessBindings } from '../utils/with-preprocess-bindings.js';
|
||||
|
||||
export function getDBQuery(
|
||||
schema: SchemaOverview,
|
||||
knex: Knex,
|
||||
table: string,
|
||||
fieldNodes: (FieldNode | FunctionFieldNode)[],
|
||||
o2mNodes: O2MNode[],
|
||||
query: Query,
|
||||
cases: Filter[],
|
||||
): Knex.QueryBuilder {
|
||||
const aliasMap: AliasMap = Object.create(null);
|
||||
const env = useEnv();
|
||||
const preProcess = getColumnPreprocessor(knex, schema, table, cases, aliasMap);
|
||||
const queryCopy = cloneDeep(query);
|
||||
const helpers = getHelpers(knex);
|
||||
|
||||
const hasCaseWhen =
|
||||
o2mNodes.some((node) => node.whenCase && node.whenCase.length > 0) ||
|
||||
fieldNodes.some((node) => node.whenCase && node.whenCase.length > 0);
|
||||
|
||||
queryCopy.limit = typeof queryCopy.limit === 'number' ? queryCopy.limit : Number(env['QUERY_LIMIT_DEFAULT']);
|
||||
|
||||
// Queries with aggregates and groupBy will not have duplicate result
|
||||
if (queryCopy.aggregate || queryCopy.group) {
|
||||
const flatQuery = knex.from(table);
|
||||
|
||||
// Map the group fields to their respective field nodes
|
||||
const groupWhenCases = hasCaseWhen
|
||||
? queryCopy.group?.map((field) => fieldNodes.find(({ fieldKey }) => fieldKey === field)?.whenCase ?? [])
|
||||
: undefined;
|
||||
|
||||
const dbQuery = applyQuery(knex, table, flatQuery, queryCopy, schema, cases, { aliasMap, groupWhenCases }).query;
|
||||
|
||||
flatQuery.select(fieldNodes.map((node) => preProcess(node)));
|
||||
|
||||
withPreprocessBindings(knex, dbQuery);
|
||||
|
||||
return dbQuery;
|
||||
}
|
||||
|
||||
const primaryKey = schema.collections[table]!.primary;
|
||||
let dbQuery = knex.from(table);
|
||||
let sortRecords: ColumnSortRecord[] | undefined;
|
||||
const innerQuerySortRecords: { alias: string; order: 'asc' | 'desc' }[] = [];
|
||||
let hasMultiRelationalSort: boolean | undefined;
|
||||
|
||||
if (queryCopy.sort) {
|
||||
const sortResult = applySort(knex, schema, dbQuery, queryCopy, table, aliasMap, true);
|
||||
|
||||
if (sortResult) {
|
||||
sortRecords = sortResult.sortRecords;
|
||||
hasMultiRelationalSort = sortResult.hasMultiRelationalSort;
|
||||
}
|
||||
}
|
||||
|
||||
const { hasMultiRelationalFilter } = applyQuery(knex, table, dbQuery, queryCopy, schema, cases, {
|
||||
aliasMap,
|
||||
isInnerQuery: true,
|
||||
hasMultiRelationalSort,
|
||||
});
|
||||
|
||||
const needsInnerQuery = hasMultiRelationalSort || hasMultiRelationalFilter;
|
||||
|
||||
if (needsInnerQuery) {
|
||||
dbQuery.select(`${table}.${primaryKey}`);
|
||||
|
||||
// Only add distinct if there are no case/when constructs, since otherwise we rely on group by
|
||||
if (!hasCaseWhen) dbQuery.distinct();
|
||||
} else {
|
||||
dbQuery.select(fieldNodes.map((node) => preProcess(node)));
|
||||
|
||||
// Add flags for o2m fields with case/when to the let the DB to the partial item permissions
|
||||
dbQuery.select(
|
||||
o2mNodes
|
||||
.filter((node) => node.whenCase && node.whenCase.length > 0)
|
||||
.map((node) => {
|
||||
const columnCases = node.whenCase!.map((index) => cases[index]!);
|
||||
return applyCaseWhen(
|
||||
{
|
||||
column: knex.raw(1),
|
||||
columnCases,
|
||||
aliasMap,
|
||||
cases,
|
||||
table,
|
||||
alias: node.fieldKey,
|
||||
},
|
||||
{ knex, schema },
|
||||
);
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (sortRecords) {
|
||||
// Clears the order if any, eg: from MSSQL offset
|
||||
dbQuery.clear('order');
|
||||
|
||||
if (needsInnerQuery) {
|
||||
let orderByString = '';
|
||||
const orderByFields: Knex.Raw[] = [];
|
||||
|
||||
sortRecords.map((sortRecord) => {
|
||||
if (orderByString.length !== 0) {
|
||||
orderByString += ', ';
|
||||
}
|
||||
|
||||
const sortAlias = `sort_${generateAlias()}`;
|
||||
|
||||
if (sortRecord.column.includes('.')) {
|
||||
const [alias, field] = sortRecord.column.split('.');
|
||||
const originalCollectionName = getCollectionFromAlias(alias!, aliasMap);
|
||||
dbQuery.select(getColumn(knex, alias!, field!, sortAlias, schema, { originalCollectionName }));
|
||||
|
||||
orderByString += `?? ${sortRecord.order}`;
|
||||
orderByFields.push(getColumn(knex, alias!, field!, false, schema, { originalCollectionName }));
|
||||
} else {
|
||||
dbQuery.select(getColumn(knex, table, sortRecord.column, sortAlias, schema));
|
||||
|
||||
orderByString += `?? ${sortRecord.order}`;
|
||||
orderByFields.push(getColumn(knex, table, sortRecord.column, false, schema));
|
||||
}
|
||||
|
||||
innerQuerySortRecords.push({ alias: sortAlias, order: sortRecord.order });
|
||||
});
|
||||
|
||||
if (hasMultiRelationalSort) {
|
||||
dbQuery = helpers.schema.applyMultiRelationalSort(
|
||||
knex,
|
||||
dbQuery,
|
||||
table,
|
||||
primaryKey,
|
||||
orderByString,
|
||||
orderByFields,
|
||||
);
|
||||
|
||||
// Start order by with directus_row_number. The directus_row_number is derived from a window function that
|
||||
// is ordered by the sort fields within every primary key partition. That ensures that the result with the
|
||||
// row number = 1 is the top-most row of every partition, according to the selected sort fields.
|
||||
// Since the only relevant result is the first row of this partition, adding the directus_row_number to the
|
||||
// order by here ensures that all rows with a directus_row_number = 1 show up first in the inner query result,
|
||||
// and are correctly truncated by the limit, but not earlier.
|
||||
orderByString = `?? asc, ${orderByString}`;
|
||||
orderByFields.unshift(knex.ref('directus_row_number'));
|
||||
}
|
||||
|
||||
dbQuery.orderByRaw(orderByString, orderByFields);
|
||||
} else {
|
||||
sortRecords.map((sortRecord) => {
|
||||
if (sortRecord.column.includes('.')) {
|
||||
const [alias, field] = sortRecord.column.split('.');
|
||||
|
||||
sortRecord.column = getColumn(knex, alias!, field!, false, schema, {
|
||||
originalCollectionName: getCollectionFromAlias(alias!, aliasMap),
|
||||
}) as any;
|
||||
} else {
|
||||
sortRecord.column = getColumn(knex, table, sortRecord.column, false, schema) as any;
|
||||
}
|
||||
});
|
||||
|
||||
dbQuery.orderBy(sortRecords);
|
||||
}
|
||||
}
|
||||
|
||||
if (!needsInnerQuery) return dbQuery;
|
||||
|
||||
const innerCaseWhenAliasPrefix = generateAlias();
|
||||
|
||||
if (hasCaseWhen) {
|
||||
/* If there are cases, we need to employ a trick in order to evaluate the case/when structure in the inner query,
|
||||
while passing the result of the evaluation to the outer query. The case/when needs to be evaluated in the inner
|
||||
query since only there all joined in tables, that might be required for the case/when, are available.
|
||||
|
||||
The problem is, that the resulting columns can not be directly selected in the inner query,
|
||||
as a `SELECT DISTINCT` does not work for all datatypes in all vendors.
|
||||
|
||||
So instead of having an inner query which might look like this:
|
||||
|
||||
SELECT DISTINCT ...,
|
||||
CASE WHEN <condition> THEN <actual-column> END AS <alias>
|
||||
|
||||
a group-by query is generated.
|
||||
|
||||
Another problem is that all not all rows with the same primary key are guaranteed to have the same value for
|
||||
the columns with the case/when, so we to `or` those together, but counting the number of flags in a group by
|
||||
operation. This way the flag is set to > 0 if any of the rows in the group allows access to the column.
|
||||
|
||||
The inner query only evaluates the condition and passes up or-ed flag, that is used in the wrapper query to select
|
||||
the actual column:
|
||||
|
||||
SELECT ...,
|
||||
COUNT (CASE WHEN <condition> THEN 1 END) AS <random-prefix>_<alias>
|
||||
...
|
||||
GROUP BY <primary-key>
|
||||
|
||||
Then, in the wrapper query there is no need to evaluate the condition again, but instead rely on the flag:
|
||||
|
||||
SELECT ...,
|
||||
CASE WHEN `inner`.<random-prefix>_<alias> > 0 THEN <actual-column> END AS <alias>
|
||||
*/
|
||||
|
||||
const innerPreprocess = getInnerQueryColumnPreProcessor(
|
||||
knex,
|
||||
schema,
|
||||
table,
|
||||
cases,
|
||||
aliasMap,
|
||||
innerCaseWhenAliasPrefix,
|
||||
);
|
||||
|
||||
// To optimize the query we avoid having unnecessary columns in the inner query, that don't have a caseWhen, since
|
||||
// they are selected in the outer query directly
|
||||
dbQuery.select(fieldNodes.map(innerPreprocess).filter((x) => x !== null));
|
||||
|
||||
// In addition to the regular columns select a flag that indicates if a user has access to o2m related field
|
||||
// based on the case/when of that field.
|
||||
dbQuery.select(o2mNodes.map(innerPreprocess).filter((x) => x !== null));
|
||||
|
||||
const groupByFields = [knex.raw('??.??', [table, primaryKey])];
|
||||
|
||||
if (hasMultiRelationalSort) {
|
||||
// Sort fields that are not directly in the table the primary key is from need to be included in the group
|
||||
// by clause, otherwise this causes problems on some DBs
|
||||
groupByFields.push(...innerQuerySortRecords.map(({ alias }) => knex.raw('??', alias)));
|
||||
}
|
||||
|
||||
dbQuery.groupBy(groupByFields);
|
||||
}
|
||||
|
||||
const wrapperQuery = knex
|
||||
.from(table)
|
||||
.innerJoin(knex.raw('??', dbQuery.as('inner')), `${table}.${primaryKey}`, `inner.${primaryKey}`);
|
||||
|
||||
if (!hasCaseWhen) {
|
||||
// No need for case/when in the wrapper query, just select the preprocessed columns
|
||||
wrapperQuery.select(fieldNodes.map((node) => preProcess(node)));
|
||||
} else {
|
||||
// This applies a simplified case/when construct in the wrapper query, that only looks at flag > 1
|
||||
|
||||
// Distinguish between column with and without case/when and handle them differently
|
||||
const plainColumns = fieldNodes.filter((fieldNode) => !fieldNode.whenCase || fieldNode.whenCase.length === 0);
|
||||
const whenCaseColumns = fieldNodes.filter((fieldNode) => fieldNode.whenCase && fieldNode.whenCase.length > 0);
|
||||
|
||||
// Select the plain columns
|
||||
wrapperQuery.select(plainColumns.map((node) => preProcess(node)));
|
||||
|
||||
// Select the case/when columns based on the flag from the inner query
|
||||
wrapperQuery.select(
|
||||
whenCaseColumns.map((fieldNode) => {
|
||||
const alias = getNodeAlias(fieldNode);
|
||||
|
||||
const innerAlias = `${innerCaseWhenAliasPrefix}_${alias}`;
|
||||
|
||||
// Preprocess the column without the case/when, since that is applied in a simpler fashion in the select
|
||||
const column = preProcess({ ...fieldNode, whenCase: [] }, { noAlias: true });
|
||||
|
||||
return knex.raw(`CASE WHEN ??.?? > 0 THEN ?? END as ??`, ['inner', innerAlias, column, alias]);
|
||||
}),
|
||||
);
|
||||
|
||||
// Pass the flags of o2m fields up through the wrapper query
|
||||
wrapperQuery.select(
|
||||
o2mNodes
|
||||
.filter((node) => node.whenCase && node.whenCase.length > 0)
|
||||
.map((node) => {
|
||||
const alias = node.fieldKey;
|
||||
|
||||
const innerAlias = `${innerCaseWhenAliasPrefix}_${alias}`;
|
||||
|
||||
return knex.raw(`CASE WHEN ??.?? > 0 THEN 1 END as ??`, ['inner', innerAlias, alias]);
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (sortRecords) {
|
||||
innerQuerySortRecords.map((innerQuerySortRecord) => {
|
||||
wrapperQuery.orderBy(`inner.${innerQuerySortRecord.alias}`, innerQuerySortRecord.order);
|
||||
});
|
||||
|
||||
if (hasMultiRelationalSort) {
|
||||
wrapperQuery.where('inner.directus_row_number', '=', 1);
|
||||
applyLimit(knex, wrapperQuery, queryCopy.limit);
|
||||
}
|
||||
}
|
||||
|
||||
return wrapperQuery;
|
||||
}
|
||||
67
api/src/database/run-ast/lib/parse-current-level.ts
Normal file
67
api/src/database/run-ast/lib/parse-current-level.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import type { Query, SchemaOverview } from '@directus/types';
|
||||
import type { FieldNode, FunctionFieldNode, NestedCollectionNode } from '../../../types/ast.js';
|
||||
import { parseFilterKey } from '../../../utils/parse-filter-key.js';
|
||||
|
||||
export async function parseCurrentLevel(
|
||||
schema: SchemaOverview,
|
||||
collection: string,
|
||||
children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[],
|
||||
query: Query,
|
||||
) {
|
||||
const primaryKeyField = schema.collections[collection]!.primary;
|
||||
const columnsInCollection = Object.keys(schema.collections[collection]!.fields);
|
||||
|
||||
const columnsToSelectInternal: string[] = [];
|
||||
const nestedCollectionNodes: NestedCollectionNode[] = [];
|
||||
|
||||
for (const child of children) {
|
||||
if (child.type === 'field' || child.type === 'functionField') {
|
||||
const { fieldName } = parseFilterKey(child.name);
|
||||
|
||||
if (columnsInCollection.includes(fieldName)) {
|
||||
columnsToSelectInternal.push(child.fieldKey);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!child.relation) continue;
|
||||
|
||||
if (child.type === 'm2o') {
|
||||
columnsToSelectInternal.push(child.relation.field);
|
||||
}
|
||||
|
||||
if (child.type === 'a2o') {
|
||||
columnsToSelectInternal.push(child.relation.field);
|
||||
columnsToSelectInternal.push(child.relation.meta!.one_collection_field!);
|
||||
}
|
||||
|
||||
nestedCollectionNodes.push(child);
|
||||
}
|
||||
|
||||
const isAggregate = (query.group || (query.aggregate && Object.keys(query.aggregate).length > 0)) ?? false;
|
||||
|
||||
/** Always fetch primary key in case there's a nested relation that needs it. Aggregate payloads
|
||||
* can't have nested relational fields
|
||||
*/
|
||||
if (isAggregate === false && columnsToSelectInternal.includes(primaryKeyField) === false) {
|
||||
columnsToSelectInternal.push(primaryKeyField);
|
||||
}
|
||||
|
||||
/** Make sure select list has unique values */
|
||||
const columnsToSelect = [...new Set(columnsToSelectInternal)];
|
||||
|
||||
const fieldNodes = columnsToSelect.map(
|
||||
(column: string) =>
|
||||
children.find(
|
||||
(childNode) =>
|
||||
(childNode.type === 'field' || childNode.type === 'functionField') && childNode.fieldKey === column,
|
||||
) ?? {
|
||||
type: 'field',
|
||||
name: column,
|
||||
fieldKey: column,
|
||||
},
|
||||
) as FieldNode[];
|
||||
|
||||
return { fieldNodes, nestedCollectionNodes, primaryKeyField };
|
||||
}
|
||||
153
api/src/database/run-ast/run-ast.ts
Normal file
153
api/src/database/run-ast/run-ast.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
import { useEnv } from '@directus/env';
|
||||
import type { Filter, Item, Query, SchemaOverview } from '@directus/types';
|
||||
import { cloneDeep, merge } from 'lodash-es';
|
||||
import { PayloadService } from '../../services/payload.js';
|
||||
import type { AST, FieldNode, FunctionFieldNode, NestedCollectionNode, O2MNode } from '../../types/ast.js';
|
||||
import getDatabase from '../index.js';
|
||||
import { getDBQuery } from './lib/get-db-query.js';
|
||||
import { parseCurrentLevel } from './lib/parse-current-level.js';
|
||||
import type { RunASTOptions } from './types.js';
|
||||
import { applyParentFilters } from './utils/apply-parent-filters.js';
|
||||
import { mergeWithParentItems } from './utils/merge-with-parent-items.js';
|
||||
import { removeTemporaryFields } from './utils/remove-temporary-fields.js';
|
||||
|
||||
/**
|
||||
* Execute a given AST using Knex. Returns array of items based on requested AST.
|
||||
*/
|
||||
export async function runAst(
|
||||
originalAST: AST | NestedCollectionNode,
|
||||
schema: SchemaOverview,
|
||||
options?: RunASTOptions,
|
||||
): Promise<null | Item | Item[]> {
|
||||
const ast = cloneDeep(originalAST);
|
||||
|
||||
const knex = options?.knex || getDatabase();
|
||||
|
||||
if (ast.type === 'a2o') {
|
||||
const results: { [collection: string]: null | Item | Item[] } = {};
|
||||
|
||||
for (const collection of ast.names) {
|
||||
results[collection] = await run(
|
||||
collection,
|
||||
ast.children[collection]!,
|
||||
ast.query[collection]!,
|
||||
ast.cases[collection] ?? [],
|
||||
);
|
||||
}
|
||||
|
||||
return results;
|
||||
} else {
|
||||
return await run(ast.name, ast.children, options?.query || ast.query, ast.cases);
|
||||
}
|
||||
|
||||
async function run(
|
||||
collection: string,
|
||||
children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[],
|
||||
query: Query,
|
||||
cases: Filter[],
|
||||
) {
|
||||
const env = useEnv();
|
||||
|
||||
// Retrieve the database columns to select in the current AST
|
||||
const { fieldNodes, primaryKeyField, nestedCollectionNodes } = await parseCurrentLevel(
|
||||
schema,
|
||||
collection,
|
||||
children,
|
||||
query,
|
||||
);
|
||||
|
||||
const o2mNodes = nestedCollectionNodes.filter((node): node is O2MNode => node.type === 'o2m');
|
||||
|
||||
// The actual knex query builder instance. This is a promise that resolves with the raw items from the db
|
||||
const dbQuery = getDBQuery(schema, knex, collection, fieldNodes, o2mNodes, query, cases);
|
||||
|
||||
const rawItems: Item | Item[] = await dbQuery;
|
||||
|
||||
if (!rawItems) return null;
|
||||
|
||||
// Run the items through the special transforms
|
||||
const payloadService = new PayloadService(collection, { knex, schema });
|
||||
let items: null | Item | Item[] = await payloadService.processValues('read', rawItems, query.alias ?? {});
|
||||
|
||||
if (!items || (Array.isArray(items) && items.length === 0)) return items;
|
||||
|
||||
// Apply the `_in` filters to the nested collection batches
|
||||
const nestedNodes = applyParentFilters(schema, nestedCollectionNodes, items);
|
||||
|
||||
for (const nestedNode of nestedNodes) {
|
||||
let nestedItems: Item[] | null = [];
|
||||
|
||||
if (nestedNode.type === 'o2m') {
|
||||
let hasMore = true;
|
||||
|
||||
let batchCount = 0;
|
||||
|
||||
// If a nested node has a whenCase it indicates that the user might not be able to access the field for all items.
|
||||
// In that case the queried item includes a flag under the fieldKey that is populated in the db and indicates
|
||||
// if the user has access to that field for that specific item.
|
||||
const hasWhenCase = nestedNode.whenCase && nestedNode.whenCase.length > 0;
|
||||
let fieldAllowed: boolean | boolean[] = true;
|
||||
|
||||
if (hasWhenCase) {
|
||||
// Extract flag and remove field from item, so it can be populated with the actual items
|
||||
if (Array.isArray(items)) {
|
||||
fieldAllowed = [];
|
||||
|
||||
for (const item of items) {
|
||||
fieldAllowed.push(!!item[nestedNode.fieldKey]);
|
||||
delete item[nestedNode.fieldKey];
|
||||
}
|
||||
} else {
|
||||
fieldAllowed = !!items[nestedNode.fieldKey];
|
||||
delete items[nestedNode.fieldKey];
|
||||
}
|
||||
}
|
||||
|
||||
while (hasMore) {
|
||||
const node = merge({}, nestedNode, {
|
||||
query: {
|
||||
limit: env['RELATIONAL_BATCH_SIZE'],
|
||||
offset: batchCount * (env['RELATIONAL_BATCH_SIZE'] as number),
|
||||
page: null,
|
||||
},
|
||||
});
|
||||
|
||||
nestedItems = (await runAst(node, schema, { knex, nested: true })) as Item[] | null;
|
||||
|
||||
if (nestedItems) {
|
||||
items = mergeWithParentItems(schema, nestedItems, items!, nestedNode, fieldAllowed)!;
|
||||
}
|
||||
|
||||
if (!nestedItems || nestedItems.length < (env['RELATIONAL_BATCH_SIZE'] as number)) {
|
||||
hasMore = false;
|
||||
}
|
||||
|
||||
batchCount++;
|
||||
}
|
||||
} else {
|
||||
const node = merge({}, nestedNode, {
|
||||
query: { limit: -1 },
|
||||
});
|
||||
|
||||
nestedItems = (await runAst(node, schema, { knex, nested: true })) as Item[] | null;
|
||||
|
||||
if (nestedItems) {
|
||||
// Merge all fetched nested records with the parent items
|
||||
items = mergeWithParentItems(schema, nestedItems, items!, nestedNode, true)!;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// During the fetching of data, we have to inject a couple of required fields for the child nesting
|
||||
// to work (primary / foreign keys) even if they're not explicitly requested. After all fetching
|
||||
// and nesting is done, we parse through the output structure, and filter out all non-requested
|
||||
// fields
|
||||
// The field allowed flags injected in `getDBQuery` are already removed while processing the nested nodes in
|
||||
// the previous step.
|
||||
if (options?.nested !== true && options?.stripNonRequested !== false) {
|
||||
items = removeTemporaryFields(schema, items, originalAST, primaryKeyField);
|
||||
}
|
||||
|
||||
return items;
|
||||
}
|
||||
}
|
||||
24
api/src/database/run-ast/types.ts
Normal file
24
api/src/database/run-ast/types.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import type { Knex } from 'knex';
|
||||
import type { AST } from '../../types/ast.js';
|
||||
|
||||
export interface RunASTOptions {
|
||||
/**
|
||||
* Query override for the current level
|
||||
*/
|
||||
query?: AST['query'];
|
||||
|
||||
/**
|
||||
* Knex instance
|
||||
*/
|
||||
knex?: Knex;
|
||||
|
||||
/**
|
||||
* Whether or not the current execution is a nested dataset in another AST
|
||||
*/
|
||||
nested?: boolean;
|
||||
|
||||
/**
|
||||
* Whether or not to strip out non-requested required fields automatically (eg IDs / FKs)
|
||||
*/
|
||||
stripNonRequested?: boolean;
|
||||
}
|
||||
58
api/src/database/run-ast/utils/apply-case-when.ts
Normal file
58
api/src/database/run-ast/utils/apply-case-when.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import type { Filter, SchemaOverview } from '@directus/types';
|
||||
import type { Knex } from 'knex';
|
||||
import { applyFilter } from '../../../utils/apply-query.js';
|
||||
import type { AliasMap } from '../../../utils/get-column-path.js';
|
||||
|
||||
export interface ApplyCaseWhenOptions {
|
||||
column: Knex.Raw;
|
||||
columnCases: Filter[];
|
||||
table: string;
|
||||
cases: Filter[];
|
||||
aliasMap: AliasMap;
|
||||
alias?: string;
|
||||
}
|
||||
|
||||
export interface ApplyCaseWhenContext {
|
||||
knex: Knex;
|
||||
schema: SchemaOverview;
|
||||
}
|
||||
|
||||
export function applyCaseWhen(
|
||||
{ columnCases, table, aliasMap, cases, column, alias }: ApplyCaseWhenOptions,
|
||||
{ knex, schema }: ApplyCaseWhenContext,
|
||||
): Knex.Raw {
|
||||
const caseQuery = knex.queryBuilder();
|
||||
|
||||
applyFilter(knex, schema, caseQuery, { _or: columnCases }, table, aliasMap, cases);
|
||||
|
||||
const compiler = knex.client.queryCompiler(caseQuery);
|
||||
|
||||
const sqlParts = [];
|
||||
|
||||
// Only empty filters, so no where was generated, skip it
|
||||
if (!compiler.grouped.where) return column;
|
||||
|
||||
for (const statement of compiler.grouped.where) {
|
||||
const val = compiler[statement.type](statement);
|
||||
|
||||
if (val) {
|
||||
if (sqlParts.length > 0) {
|
||||
sqlParts.push(statement.bool);
|
||||
}
|
||||
|
||||
sqlParts.push(val);
|
||||
}
|
||||
}
|
||||
|
||||
const sql = sqlParts.join(' ');
|
||||
const bindings = [...caseQuery.toSQL().bindings, column];
|
||||
|
||||
let rawCase = `(CASE WHEN ${sql} THEN ?? END)`;
|
||||
|
||||
if (alias) {
|
||||
rawCase += ' AS ??';
|
||||
bindings.push(alias);
|
||||
}
|
||||
|
||||
return knex.raw(rawCase, bindings);
|
||||
}
|
||||
69
api/src/database/run-ast/utils/apply-parent-filters.ts
Normal file
69
api/src/database/run-ast/utils/apply-parent-filters.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import type { Item, SchemaOverview } from '@directus/types';
|
||||
import { toArray } from '@directus/utils';
|
||||
import { isNil, merge, uniq } from 'lodash-es';
|
||||
import type { NestedCollectionNode } from '../../../types/ast.js';
|
||||
|
||||
export function applyParentFilters(
|
||||
schema: SchemaOverview,
|
||||
nestedCollectionNodes: NestedCollectionNode[],
|
||||
parentItem: Item | Item[],
|
||||
) {
|
||||
const parentItems = toArray(parentItem);
|
||||
|
||||
for (const nestedNode of nestedCollectionNodes) {
|
||||
if (!nestedNode.relation) continue;
|
||||
|
||||
if (nestedNode.type === 'm2o') {
|
||||
const foreignField = schema.collections[nestedNode.relation.related_collection!]!.primary;
|
||||
const foreignIds = uniq(parentItems.map((res) => res[nestedNode.relation.field])).filter((id) => !isNil(id));
|
||||
|
||||
merge(nestedNode, { query: { filter: { [foreignField]: { _in: foreignIds } } } });
|
||||
} else if (nestedNode.type === 'o2m') {
|
||||
const relatedM2OisFetched = !!nestedNode.children.find((child) => {
|
||||
return child.type === 'field' && child.name === nestedNode.relation.field;
|
||||
});
|
||||
|
||||
if (relatedM2OisFetched === false) {
|
||||
nestedNode.children.push({
|
||||
type: 'field',
|
||||
name: nestedNode.relation.field,
|
||||
fieldKey: nestedNode.relation.field,
|
||||
whenCase: [],
|
||||
});
|
||||
}
|
||||
|
||||
if (nestedNode.relation.meta?.sort_field) {
|
||||
nestedNode.children.push({
|
||||
type: 'field',
|
||||
name: nestedNode.relation.meta.sort_field,
|
||||
fieldKey: nestedNode.relation.meta.sort_field,
|
||||
whenCase: [],
|
||||
});
|
||||
}
|
||||
|
||||
const foreignField = nestedNode.relation.field;
|
||||
const foreignIds = uniq(parentItems.map((res) => res[nestedNode.parentKey])).filter((id) => !isNil(id));
|
||||
|
||||
merge(nestedNode, { query: { filter: { [foreignField]: { _in: foreignIds } } } });
|
||||
} else if (nestedNode.type === 'a2o') {
|
||||
const keysPerCollection: { [collection: string]: (string | number)[] } = {};
|
||||
|
||||
for (const parentItem of parentItems) {
|
||||
const collection = parentItem[nestedNode.relation.meta!.one_collection_field!];
|
||||
if (!keysPerCollection[collection]) keysPerCollection[collection] = [];
|
||||
keysPerCollection[collection]!.push(parentItem[nestedNode.relation.field]);
|
||||
}
|
||||
|
||||
for (const relatedCollection of nestedNode.names) {
|
||||
const foreignField = nestedNode.relatedKey[relatedCollection]!;
|
||||
const foreignIds = uniq(keysPerCollection[relatedCollection]);
|
||||
|
||||
merge(nestedNode, {
|
||||
query: { [relatedCollection]: { filter: { [foreignField]: { _in: foreignIds } }, limit: foreignIds.length } },
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nestedCollectionNodes;
|
||||
}
|
||||
86
api/src/database/run-ast/utils/get-column-pre-processor.ts
Normal file
86
api/src/database/run-ast/utils/get-column-pre-processor.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import type { Filter, SchemaOverview } from '@directus/types';
|
||||
import type { Knex } from 'knex';
|
||||
import type { FieldNode, FunctionFieldNode, M2ONode } from '../../../types/ast.js';
|
||||
import { joinFilterWithCases } from '../../../utils/apply-query.js';
|
||||
import type { AliasMap } from '../../../utils/get-column-path.js';
|
||||
import { getColumn } from '../../../utils/get-column.js';
|
||||
import { parseFilterKey } from '../../../utils/parse-filter-key.js';
|
||||
import { getHelpers } from '../../helpers/index.js';
|
||||
import { applyCaseWhen } from './apply-case-when.js';
|
||||
import { getNodeAlias } from './get-field-alias.js';
|
||||
|
||||
interface NodePreProcessOptions {
|
||||
/** Don't assign an alias to the column but instead return the column as is */
|
||||
noAlias?: boolean;
|
||||
}
|
||||
|
||||
export function getColumnPreprocessor(
|
||||
knex: Knex,
|
||||
schema: SchemaOverview,
|
||||
table: string,
|
||||
cases: Filter[],
|
||||
aliasMap: AliasMap,
|
||||
) {
|
||||
const helpers = getHelpers(knex);
|
||||
|
||||
return function (
|
||||
fieldNode: FieldNode | FunctionFieldNode | M2ONode,
|
||||
options?: NodePreProcessOptions,
|
||||
): Knex.Raw<string> {
|
||||
// Don't assign an alias to the column expression if the field has a whenCase
|
||||
// (since the alias will be assigned in applyCaseWhen) or if the noAlias option is set
|
||||
const hasWhenCase = fieldNode.whenCase && fieldNode.whenCase.length > 0;
|
||||
const noAlias = options?.noAlias || hasWhenCase;
|
||||
const alias = getNodeAlias(fieldNode);
|
||||
|
||||
const rawColumnAlias = noAlias ? false : alias;
|
||||
|
||||
let field;
|
||||
|
||||
if (fieldNode.type === 'field' || fieldNode.type === 'functionField') {
|
||||
const { fieldName } = parseFilterKey(fieldNode.name);
|
||||
field = schema.collections[table]!.fields[fieldName];
|
||||
} else {
|
||||
field = schema.collections[fieldNode.relation.collection]!.fields[fieldNode.relation.field];
|
||||
}
|
||||
|
||||
let column;
|
||||
|
||||
if (field?.type?.startsWith('geometry')) {
|
||||
column = helpers.st.asText(table, field.field, rawColumnAlias);
|
||||
} else if (fieldNode.type === 'functionField') {
|
||||
// Include the field cases in the functionField query filter
|
||||
column = getColumn(knex, table, fieldNode.name, rawColumnAlias, schema, {
|
||||
query: {
|
||||
...fieldNode.query,
|
||||
filter: joinFilterWithCases(fieldNode.query.filter, fieldNode.cases),
|
||||
},
|
||||
cases: fieldNode.cases,
|
||||
});
|
||||
} else {
|
||||
column = getColumn(knex, table, fieldNode.name, rawColumnAlias, schema);
|
||||
}
|
||||
|
||||
if (hasWhenCase) {
|
||||
const columnCases: Filter[] = [];
|
||||
|
||||
for (const index of fieldNode.whenCase) {
|
||||
columnCases.push(cases[index]!);
|
||||
}
|
||||
|
||||
column = applyCaseWhen(
|
||||
{
|
||||
column,
|
||||
columnCases,
|
||||
aliasMap,
|
||||
cases,
|
||||
table,
|
||||
alias,
|
||||
},
|
||||
{ knex, schema },
|
||||
);
|
||||
}
|
||||
|
||||
return column;
|
||||
};
|
||||
}
|
||||
6
api/src/database/run-ast/utils/get-field-alias.ts
Normal file
6
api/src/database/run-ast/utils/get-field-alias.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import type { FieldNode, FunctionFieldNode, M2ONode, O2MNode } from '../../../types/index.js';
|
||||
import { applyFunctionToColumnName } from '../../../utils/apply-function-to-column-name.js';
|
||||
|
||||
export function getNodeAlias(node: FieldNode | FunctionFieldNode | M2ONode | O2MNode) {
|
||||
return applyFunctionToColumnName(node.fieldKey);
|
||||
}
|
||||
@@ -0,0 +1,43 @@
|
||||
import type { Filter, SchemaOverview } from '@directus/types';
|
||||
import type { Knex } from 'knex';
|
||||
import type { FieldNode, FunctionFieldNode, M2ONode, O2MNode } from '../../../types/index.js';
|
||||
import type { AliasMap } from '../../../utils/get-column-path.js';
|
||||
import { applyCaseWhen } from './apply-case-when.js';
|
||||
import { getNodeAlias } from './get-field-alias.js';
|
||||
|
||||
export function getInnerQueryColumnPreProcessor(
|
||||
knex: Knex,
|
||||
schema: SchemaOverview,
|
||||
table: string,
|
||||
cases: Filter[],
|
||||
aliasMap: AliasMap,
|
||||
aliasPrefix: string,
|
||||
) {
|
||||
return function (fieldNode: FieldNode | FunctionFieldNode | M2ONode | O2MNode): Knex.Raw<string> | null {
|
||||
const alias = getNodeAlias(fieldNode);
|
||||
|
||||
if (fieldNode.whenCase && fieldNode.whenCase.length > 0) {
|
||||
const columnCases: Filter[] = [];
|
||||
|
||||
for (const index of fieldNode.whenCase) {
|
||||
columnCases.push(cases[index]!);
|
||||
}
|
||||
|
||||
// Don't pass in the alias as we need to wrap the whole case/when in a count() an alias that
|
||||
const caseWhen = applyCaseWhen(
|
||||
{
|
||||
column: knex.raw(1),
|
||||
columnCases,
|
||||
aliasMap,
|
||||
cases,
|
||||
table,
|
||||
},
|
||||
{ knex, schema },
|
||||
);
|
||||
|
||||
return knex.raw('COUNT(??) AS ??', [caseWhen, `${aliasPrefix}_${alias}`]);
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
}
|
||||
112
api/src/database/run-ast/utils/merge-with-parent-items.ts
Normal file
112
api/src/database/run-ast/utils/merge-with-parent-items.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
import { useEnv } from '@directus/env';
|
||||
import type { Item, SchemaOverview } from '@directus/types';
|
||||
import { toArray } from '@directus/utils';
|
||||
import { clone, isArray } from 'lodash-es';
|
||||
import type { NestedCollectionNode } from '../../../types/ast.js';
|
||||
|
||||
export function mergeWithParentItems(
|
||||
schema: SchemaOverview,
|
||||
nestedItem: Item | Item[],
|
||||
parentItem: Item | Item[],
|
||||
nestedNode: NestedCollectionNode,
|
||||
fieldAllowed: boolean | boolean[],
|
||||
) {
|
||||
const env = useEnv();
|
||||
const nestedItems = toArray(nestedItem);
|
||||
const parentItems = clone(toArray(parentItem));
|
||||
|
||||
if (nestedNode.type === 'm2o') {
|
||||
for (const parentItem of parentItems) {
|
||||
const itemChild = nestedItems.find((nestedItem) => {
|
||||
return (
|
||||
nestedItem[schema.collections[nestedNode.relation.related_collection!]!.primary] ==
|
||||
parentItem[nestedNode.relation.field]
|
||||
);
|
||||
});
|
||||
|
||||
parentItem[nestedNode.fieldKey] = itemChild || null;
|
||||
}
|
||||
} else if (nestedNode.type === 'o2m') {
|
||||
for (const [index, parentItem] of parentItems.entries()) {
|
||||
if (fieldAllowed === false || (isArray(fieldAllowed) && !fieldAllowed[index])) {
|
||||
parentItem[nestedNode.fieldKey] = null;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!parentItem[nestedNode.fieldKey]) parentItem[nestedNode.fieldKey] = [] as Item[];
|
||||
|
||||
const itemChildren = nestedItems.filter((nestedItem) => {
|
||||
if (nestedItem === null) return false;
|
||||
if (Array.isArray(nestedItem[nestedNode.relation.field])) return true;
|
||||
|
||||
return (
|
||||
nestedItem[nestedNode.relation.field] ==
|
||||
parentItem[schema.collections[nestedNode.relation.related_collection!]!.primary] ||
|
||||
nestedItem[nestedNode.relation.field]?.[
|
||||
schema.collections[nestedNode.relation.related_collection!]!.primary
|
||||
] == parentItem[schema.collections[nestedNode.relation.related_collection!]!.primary]
|
||||
);
|
||||
});
|
||||
|
||||
parentItem[nestedNode.fieldKey].push(...itemChildren);
|
||||
|
||||
const limit = nestedNode.query.limit ?? Number(env['QUERY_LIMIT_DEFAULT']);
|
||||
|
||||
if (nestedNode.query.page && nestedNode.query.page > 1) {
|
||||
parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].slice(limit * (nestedNode.query.page - 1));
|
||||
}
|
||||
|
||||
if (nestedNode.query.offset && nestedNode.query.offset >= 0) {
|
||||
parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].slice(nestedNode.query.offset);
|
||||
}
|
||||
|
||||
if (limit !== -1) {
|
||||
parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].slice(0, limit);
|
||||
}
|
||||
|
||||
parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].sort((a: Item, b: Item) => {
|
||||
// This is pre-filled in get-ast-from-query
|
||||
const sortField = nestedNode.query.sort![0]!;
|
||||
let column = sortField;
|
||||
let order: 'asc' | 'desc' = 'asc';
|
||||
|
||||
if (sortField.startsWith('-')) {
|
||||
column = sortField.substring(1);
|
||||
order = 'desc';
|
||||
}
|
||||
|
||||
if (a[column] === b[column]) return 0;
|
||||
if (a[column] === null) return 1;
|
||||
if (b[column] === null) return -1;
|
||||
|
||||
if (order === 'asc') {
|
||||
return a[column] < b[column] ? -1 : 1;
|
||||
} else {
|
||||
return a[column] < b[column] ? 1 : -1;
|
||||
}
|
||||
});
|
||||
}
|
||||
} else if (nestedNode.type === 'a2o') {
|
||||
for (const parentItem of parentItems) {
|
||||
if (!nestedNode.relation.meta?.one_collection_field) {
|
||||
parentItem[nestedNode.fieldKey] = null;
|
||||
continue;
|
||||
}
|
||||
|
||||
const relatedCollection = parentItem[nestedNode.relation.meta.one_collection_field];
|
||||
|
||||
if (!(nestedItem as Record<string, any[]>)[relatedCollection]) {
|
||||
parentItem[nestedNode.fieldKey] = null;
|
||||
continue;
|
||||
}
|
||||
|
||||
const itemChild = (nestedItem as Record<string, any[]>)[relatedCollection]!.find((nestedItem) => {
|
||||
return nestedItem[nestedNode.relatedKey[relatedCollection]!] == parentItem[nestedNode.fieldKey];
|
||||
});
|
||||
|
||||
parentItem[nestedNode.fieldKey] = itemChild || null;
|
||||
}
|
||||
}
|
||||
|
||||
return Array.isArray(parentItem) ? parentItems : parentItems[0];
|
||||
}
|
||||
108
api/src/database/run-ast/utils/remove-temporary-fields.ts
Normal file
108
api/src/database/run-ast/utils/remove-temporary-fields.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
import type { Item, SchemaOverview } from '@directus/types';
|
||||
import { toArray } from '@directus/utils';
|
||||
import { cloneDeep, pick } from 'lodash-es';
|
||||
import type { AST, NestedCollectionNode } from '../../../types/ast.js';
|
||||
import { applyFunctionToColumnName } from '../../../utils/apply-function-to-column-name.js';
|
||||
|
||||
export function removeTemporaryFields(
|
||||
schema: SchemaOverview,
|
||||
rawItem: Item | Item[],
|
||||
ast: AST | NestedCollectionNode,
|
||||
primaryKeyField: string,
|
||||
parentItem?: Item,
|
||||
): null | Item | Item[] {
|
||||
const rawItems = cloneDeep(toArray(rawItem));
|
||||
const items: Item[] = [];
|
||||
|
||||
if (ast.type === 'a2o') {
|
||||
const fields: Record<string, string[]> = {};
|
||||
const nestedCollectionNodes: Record<string, NestedCollectionNode[]> = {};
|
||||
|
||||
for (const relatedCollection of ast.names) {
|
||||
if (!fields[relatedCollection]) fields[relatedCollection] = [];
|
||||
if (!nestedCollectionNodes[relatedCollection]) nestedCollectionNodes[relatedCollection] = [];
|
||||
|
||||
for (const child of ast.children[relatedCollection]!) {
|
||||
if (child.type === 'field' || child.type === 'functionField') {
|
||||
fields[relatedCollection]!.push(child.name);
|
||||
} else {
|
||||
fields[relatedCollection]!.push(child.fieldKey);
|
||||
nestedCollectionNodes[relatedCollection]!.push(child);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const rawItem of rawItems) {
|
||||
const relatedCollection: string = parentItem?.[ast.relation.meta!.one_collection_field!];
|
||||
|
||||
if (rawItem === null || rawItem === undefined) return rawItem;
|
||||
|
||||
let item = rawItem;
|
||||
|
||||
for (const nestedNode of nestedCollectionNodes[relatedCollection]!) {
|
||||
item[nestedNode.fieldKey] = removeTemporaryFields(
|
||||
schema,
|
||||
item[nestedNode.fieldKey],
|
||||
nestedNode,
|
||||
schema.collections[nestedNode.relation.collection]!.primary,
|
||||
item,
|
||||
);
|
||||
}
|
||||
|
||||
const fieldsWithFunctionsApplied = fields[relatedCollection]!.map((field) => applyFunctionToColumnName(field));
|
||||
|
||||
item =
|
||||
fields[relatedCollection]!.length > 0 ? pick(rawItem, fieldsWithFunctionsApplied) : rawItem[primaryKeyField];
|
||||
|
||||
items.push(item);
|
||||
}
|
||||
} else {
|
||||
const fields: string[] = [];
|
||||
const nestedCollectionNodes: NestedCollectionNode[] = [];
|
||||
|
||||
for (const child of ast.children) {
|
||||
fields.push(child.fieldKey);
|
||||
|
||||
if (child.type !== 'field' && child.type !== 'functionField') {
|
||||
nestedCollectionNodes.push(child);
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure any requested aggregate fields are included
|
||||
if (ast.query?.aggregate) {
|
||||
for (const [operation, aggregateFields] of Object.entries(ast.query.aggregate)) {
|
||||
if (!fields) continue;
|
||||
|
||||
if (operation === 'count' && aggregateFields.includes('*')) fields.push('count');
|
||||
|
||||
fields.push(...aggregateFields.map((field) => `${operation}.${field}`));
|
||||
}
|
||||
}
|
||||
|
||||
for (const rawItem of rawItems) {
|
||||
if (rawItem === null || rawItem === undefined) return rawItem;
|
||||
|
||||
let item = rawItem;
|
||||
|
||||
for (const nestedNode of nestedCollectionNodes) {
|
||||
item[nestedNode.fieldKey] = removeTemporaryFields(
|
||||
schema,
|
||||
item[nestedNode.fieldKey],
|
||||
nestedNode,
|
||||
nestedNode.type === 'm2o'
|
||||
? schema.collections[nestedNode.relation.related_collection!]!.primary
|
||||
: schema.collections[nestedNode.relation.collection]!.primary,
|
||||
item,
|
||||
);
|
||||
}
|
||||
|
||||
const fieldsWithFunctionsApplied = fields.map((field) => applyFunctionToColumnName(field));
|
||||
|
||||
item = fields.length > 0 ? pick(rawItem, fieldsWithFunctionsApplied) : rawItem[primaryKeyField];
|
||||
|
||||
items.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
return Array.isArray(rawItem) ? items : items[0]!;
|
||||
}
|
||||
21
api/src/database/run-ast/utils/with-preprocess-bindings.ts
Normal file
21
api/src/database/run-ast/utils/with-preprocess-bindings.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import type { Knex } from 'knex';
|
||||
import { getHelpers } from '../../helpers/index.js';
|
||||
|
||||
export function withPreprocessBindings(knex: Knex, dbQuery: Knex.QueryBuilder) {
|
||||
const schemaHelper = getHelpers(knex).schema;
|
||||
|
||||
dbQuery.client = new Proxy(dbQuery.client, {
|
||||
get(target, prop, receiver) {
|
||||
if (prop === 'query') {
|
||||
return (connection: any, queryParam: any) => {
|
||||
return Reflect.get(target, prop, receiver).bind(target)(
|
||||
connection,
|
||||
schemaHelper.preprocessBindings(queryParam),
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
return Reflect.get(target, prop, receiver);
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -2,10 +2,11 @@ import { Action } from '@directus/constants';
|
||||
import { useEnv } from '@directus/env';
|
||||
import { ForbiddenError } from '@directus/errors';
|
||||
import type { OperationHandler } from '@directus/extensions';
|
||||
import { isSystemCollection } from '@directus/system-data';
|
||||
import type { Accountability, ActionHandler, FilterHandler, Flow, Operation, SchemaOverview } from '@directus/types';
|
||||
import { applyOptionsData, getRedactedString, isValidJSON, parseJSON, toArray } from '@directus/utils';
|
||||
import type { Knex } from 'knex';
|
||||
import { omit, pick } from 'lodash-es';
|
||||
import { pick } from 'lodash-es';
|
||||
import { get } from 'micromustache';
|
||||
import { useBus } from './bus/index.js';
|
||||
import getDatabase from './database/index.js';
|
||||
@@ -22,7 +23,6 @@ import { JobQueue } from './utils/job-queue.js';
|
||||
import { mapValuesDeep } from './utils/map-values-deep.js';
|
||||
import { redactObject } from './utils/redact-object.js';
|
||||
import { scheduleSynchronizedJob, validateCron } from './utils/schedule.js';
|
||||
import { isSystemCollection } from '@directus/system-data';
|
||||
|
||||
let flowManager: FlowManager | undefined;
|
||||
|
||||
@@ -371,7 +371,7 @@ class FlowManager {
|
||||
data: {
|
||||
steps: steps.map((step) => redactObject(step, { values: this.envs }, getRedactedString)),
|
||||
data: redactObject(
|
||||
omit(keyedData, '$accountability.permissions'), // Permissions is a ton of data, and is just a copy of what's in the directus_permissions table
|
||||
keyedData,
|
||||
{
|
||||
keys: [
|
||||
['**', 'headers', 'authorization'],
|
||||
|
||||
@@ -5,9 +5,23 @@ import type { Knex } from 'knex';
|
||||
import { afterEach, expect, test, vi } from 'vitest';
|
||||
import getDatabase from '../database/index.js';
|
||||
import emitter from '../emitter.js';
|
||||
import { createDefaultAccountability } from '../permissions/utils/create-default-accountability.js';
|
||||
import { fetchRolesTree } from '../permissions/lib/fetch-roles-tree.js';
|
||||
import { fetchGlobalAccess } from '../permissions/modules/fetch-global-access/fetch-global-access.js';
|
||||
import '../types/express.d.ts';
|
||||
import { handler } from './authenticate.js';
|
||||
|
||||
const reqGetImplementation = (string: any) => {
|
||||
switch (string) {
|
||||
case 'user-agent':
|
||||
return 'fake-user-agent';
|
||||
case 'origin':
|
||||
return 'fake-origin';
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
vi.mock('../database/index');
|
||||
|
||||
// This is required because logger uses global env which is imported before the tests run. Can be
|
||||
@@ -27,6 +41,9 @@ vi.mock('@directus/env', () => ({
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock('../permissions/lib/fetch-roles-tree.js');
|
||||
vi.mock('../permissions/modules/fetch-global-access/fetch-global-access.js');
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
@@ -35,7 +52,7 @@ test('Short-circuits when authenticate filter is used', async () => {
|
||||
const req = {
|
||||
ip: '127.0.0.1',
|
||||
cookies: {},
|
||||
get: vi.fn(),
|
||||
get: vi.fn(reqGetImplementation),
|
||||
} as unknown as Request;
|
||||
|
||||
const res = {} as Response;
|
||||
@@ -55,16 +72,7 @@ test('Uses default public accountability when no token is given', async () => {
|
||||
const req = {
|
||||
ip: '127.0.0.1',
|
||||
cookies: {},
|
||||
get: vi.fn((string) => {
|
||||
switch (string) {
|
||||
case 'user-agent':
|
||||
return 'fake-user-agent';
|
||||
case 'origin':
|
||||
return 'fake-origin';
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}),
|
||||
get: vi.fn(reqGetImplementation),
|
||||
} as unknown as Request;
|
||||
|
||||
const res = {} as Response;
|
||||
@@ -74,15 +82,13 @@ test('Uses default public accountability when no token is given', async () => {
|
||||
|
||||
await handler(req, res, next);
|
||||
|
||||
expect(req.accountability).toEqual({
|
||||
user: null,
|
||||
role: null,
|
||||
admin: false,
|
||||
app: false,
|
||||
ip: '127.0.0.1',
|
||||
userAgent: 'fake-user-agent',
|
||||
origin: 'fake-origin',
|
||||
});
|
||||
expect(req.accountability).toEqual(
|
||||
createDefaultAccountability({
|
||||
ip: '127.0.0.1',
|
||||
userAgent: 'fake-user-agent',
|
||||
origin: 'fake-origin',
|
||||
}),
|
||||
);
|
||||
|
||||
expect(next).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
@@ -116,27 +122,22 @@ test('Sets accountability to payload contents if valid token is passed', async (
|
||||
const req = {
|
||||
ip: '127.0.0.1',
|
||||
cookies: {},
|
||||
get: vi.fn((string) => {
|
||||
switch (string) {
|
||||
case 'user-agent':
|
||||
return 'fake-user-agent';
|
||||
case 'origin':
|
||||
return 'fake-origin';
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}),
|
||||
get: vi.fn(reqGetImplementation),
|
||||
token,
|
||||
} as unknown as Request;
|
||||
|
||||
const res = {} as Response;
|
||||
const next = vi.fn();
|
||||
|
||||
vi.mocked(fetchRolesTree).mockResolvedValue([roleID]);
|
||||
vi.mocked(fetchGlobalAccess).mockResolvedValue({ app: appAccess, admin: adminAccess });
|
||||
|
||||
await handler(req, res, next);
|
||||
|
||||
expect(req.accountability).toEqual({
|
||||
user: userID,
|
||||
role: roleID,
|
||||
roles: [roleID],
|
||||
app: appAccess,
|
||||
admin: adminAccess,
|
||||
share,
|
||||
@@ -169,6 +170,7 @@ test('Sets accountability to payload contents if valid token is passed', async (
|
||||
expect(req.accountability).toEqual({
|
||||
user: userID,
|
||||
role: roleID,
|
||||
roles: [roleID],
|
||||
app: appAccess,
|
||||
admin: adminAccess,
|
||||
share,
|
||||
@@ -193,16 +195,7 @@ test('Throws InvalidCredentialsError when static token is used, but user does no
|
||||
const req = {
|
||||
ip: '127.0.0.1',
|
||||
cookies: {},
|
||||
get: vi.fn((string) => {
|
||||
switch (string) {
|
||||
case 'user-agent':
|
||||
return 'fake-user-agent';
|
||||
case 'origin':
|
||||
return 'fake-origin';
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}),
|
||||
get: vi.fn(reqGetImplementation),
|
||||
token: 'static-token',
|
||||
} as unknown as Request;
|
||||
|
||||
@@ -217,16 +210,7 @@ test('Sets accountability to user information when static token is used', async
|
||||
const req = {
|
||||
ip: '127.0.0.1',
|
||||
cookies: {},
|
||||
get: vi.fn((string) => {
|
||||
switch (string) {
|
||||
case 'user-agent':
|
||||
return 'fake-user-agent';
|
||||
case 'origin':
|
||||
return 'fake-origin';
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}),
|
||||
get: vi.fn(reqGetImplementation),
|
||||
token: 'static-token',
|
||||
} as unknown as Request;
|
||||
|
||||
@@ -238,6 +222,7 @@ test('Sets accountability to user information when static token is used', async
|
||||
const expectedAccountability = {
|
||||
user: testUser.id,
|
||||
role: testUser.role,
|
||||
roles: [testUser.role],
|
||||
app: testUser.app_access,
|
||||
admin: testUser.admin_access,
|
||||
ip: '127.0.0.1',
|
||||
@@ -253,6 +238,9 @@ test('Sets accountability to user information when static token is used', async
|
||||
first: vi.fn().mockResolvedValue(testUser),
|
||||
} as unknown as Knex);
|
||||
|
||||
vi.mocked(fetchRolesTree).mockResolvedValue([testUser.role]);
|
||||
vi.mocked(fetchGlobalAccess).mockResolvedValue({ app: testUser.app_access, admin: testUser.admin_access });
|
||||
|
||||
await handler(req, res, next);
|
||||
|
||||
expect(req.accountability).toEqual(expectedAccountability);
|
||||
@@ -272,6 +260,9 @@ test('Sets accountability to user information when static token is used', async
|
||||
testUser.app_access = '1' as never;
|
||||
expectedAccountability.admin = false;
|
||||
expectedAccountability.app = true;
|
||||
|
||||
vi.mocked(fetchGlobalAccess).mockResolvedValue({ app: true, admin: false });
|
||||
|
||||
await handler(req, res, next);
|
||||
expect(req.accountability).toEqual(expectedAccountability);
|
||||
expect(next).toHaveBeenCalledTimes(1);
|
||||
@@ -283,16 +274,7 @@ test('Invalid session token responds with error and clears the cookie', async ()
|
||||
cookies: {
|
||||
directus_session: 'session-token',
|
||||
},
|
||||
get: vi.fn((string) => {
|
||||
switch (string) {
|
||||
case 'user-agent':
|
||||
return 'fake-user-agent';
|
||||
case 'origin':
|
||||
return 'fake-origin';
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}),
|
||||
get: vi.fn(reqGetImplementation),
|
||||
token: 'session-token',
|
||||
} as unknown as Request;
|
||||
|
||||
@@ -321,16 +303,7 @@ test('Invalid query token responds with error but does not clear the session coo
|
||||
cookies: {
|
||||
directus_session: 'session-token',
|
||||
},
|
||||
get: vi.fn((string) => {
|
||||
switch (string) {
|
||||
case 'user-agent':
|
||||
return 'fake-user-agent';
|
||||
case 'origin':
|
||||
return 'fake-origin';
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}),
|
||||
get: vi.fn(reqGetImplementation),
|
||||
token: 'static-token',
|
||||
} as unknown as Request;
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ import type { NextFunction, Request, Response } from 'express';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import getDatabase from '../database/index.js';
|
||||
import emitter from '../emitter.js';
|
||||
import { createDefaultAccountability } from '../permissions/utils/create-default-accountability.js';
|
||||
import asyncHandler from '../utils/async-handler.js';
|
||||
import { getAccountabilityForToken } from '../utils/get-accountability-for-token.js';
|
||||
import { getIPFromReq } from '../utils/get-ip-from-req.js';
|
||||
@@ -16,13 +17,7 @@ import { SESSION_COOKIE_OPTIONS } from '../constants.js';
|
||||
export const handler = async (req: Request, res: Response, next: NextFunction) => {
|
||||
const env = useEnv();
|
||||
|
||||
const defaultAccountability: Accountability = {
|
||||
user: null,
|
||||
role: null,
|
||||
admin: false,
|
||||
app: false,
|
||||
ip: getIPFromReq(req),
|
||||
};
|
||||
const defaultAccountability: Accountability = createDefaultAccountability({ ip: getIPFromReq(req) });
|
||||
|
||||
const userAgent = req.get('user-agent')?.substring(0, 1024);
|
||||
if (userAgent) defaultAccountability.userAgent = userAgent;
|
||||
|
||||
@@ -21,7 +21,7 @@ const checkCacheMiddleware: RequestHandler = asyncHandler(async (req, res, next)
|
||||
return next();
|
||||
}
|
||||
|
||||
const key = getCacheKey(req);
|
||||
const key = await getCacheKey(req);
|
||||
|
||||
let cachedData;
|
||||
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
import { InvalidIpError } from '@directus/errors';
|
||||
import type { RequestHandler } from 'express';
|
||||
import getDatabase from '../database/index.js';
|
||||
import { useLogger } from '../logger/index.js';
|
||||
import asyncHandler from '../utils/async-handler.js';
|
||||
import { ipInNetworks } from '../utils/ip-in-networks.js';
|
||||
|
||||
export const checkIP: RequestHandler = asyncHandler(async (req, _res, next) => {
|
||||
const database = getDatabase();
|
||||
const logger = useLogger();
|
||||
|
||||
const { role: roleId, ip } = req.accountability!;
|
||||
|
||||
const query = database.select('ip_access').from('directus_roles');
|
||||
|
||||
if (roleId) {
|
||||
query.where({ id: roleId });
|
||||
} else {
|
||||
query.whereNull('id');
|
||||
}
|
||||
|
||||
const role: { ip_access: string | null } | undefined = await query.first();
|
||||
|
||||
if (!role?.ip_access) return next();
|
||||
|
||||
const ipAllowList = role.ip_access.split(',').filter((ip) => ip);
|
||||
|
||||
if (ipAllowList.length > 0) {
|
||||
if (!ip) throw new InvalidIpError();
|
||||
|
||||
let allowed;
|
||||
|
||||
try {
|
||||
allowed = ipInNetworks(ip, ipAllowList);
|
||||
} catch (error) {
|
||||
logger.warn(`Invalid IP access configuration for role "${roleId}"`);
|
||||
logger.warn(error);
|
||||
|
||||
throw new InvalidIpError();
|
||||
}
|
||||
|
||||
if (!allowed) throw new InvalidIpError();
|
||||
}
|
||||
|
||||
return next();
|
||||
});
|
||||
@@ -1,15 +0,0 @@
|
||||
import type { RequestHandler } from 'express';
|
||||
import asyncHandler from '../utils/async-handler.js';
|
||||
import { getPermissions as getPermissionsUtil } from '../utils/get-permissions.js';
|
||||
|
||||
const getPermissions: RequestHandler = asyncHandler(async (req, _res, next) => {
|
||||
if (!req.accountability) {
|
||||
throw new Error('getPermissions middleware needs to be called after authenticate');
|
||||
}
|
||||
|
||||
req.accountability.permissions = await getPermissionsUtil(req.accountability, req.schema);
|
||||
|
||||
return next();
|
||||
});
|
||||
|
||||
export default getPermissions;
|
||||
@@ -33,7 +33,7 @@ export const respond: RequestHandler = asyncHandler(async (req, res) => {
|
||||
res.locals['cache'] !== false &&
|
||||
exceedsMaxSize === false
|
||||
) {
|
||||
const key = getCacheKey(req);
|
||||
const key = await getCacheKey(req);
|
||||
|
||||
try {
|
||||
await setCacheValue(cache, key, res.locals['payload'], getMilliseconds(env['CACHE_TTL']));
|
||||
|
||||
27
api/src/permissions/cache.ts
Normal file
27
api/src/permissions/cache.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { defineCache, type CacheConfig } from '@directus/memory';
|
||||
import { redisConfigAvailable, useRedis } from '../redis/index.js';
|
||||
|
||||
const localOnly = redisConfigAvailable() === false;
|
||||
|
||||
const config: CacheConfig = localOnly
|
||||
? {
|
||||
type: 'local',
|
||||
maxKeys: 500,
|
||||
}
|
||||
: {
|
||||
type: 'multi',
|
||||
redis: {
|
||||
namespace: 'permissions',
|
||||
redis: useRedis(),
|
||||
},
|
||||
local: {
|
||||
maxKeys: 100,
|
||||
},
|
||||
};
|
||||
|
||||
export const useCache = defineCache(config);
|
||||
|
||||
export function clearCache() {
|
||||
const cache = useCache();
|
||||
return cache.clear();
|
||||
}
|
||||
125
api/src/permissions/lib/fetch-permissions.test.ts
Normal file
125
api/src/permissions/lib/fetch-permissions.test.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import type { Accountability, Permission } from '@directus/types';
|
||||
import { beforeEach, expect, test, vi } from 'vitest';
|
||||
import { PermissionsService } from '../../services/permissions.js';
|
||||
import type { Context } from '../types.js';
|
||||
import { fetchDynamicVariableContext } from '../utils/fetch-dynamic-variable-context.js';
|
||||
import { processPermissions } from '../utils/process-permissions.js';
|
||||
import { _fetchPermissions as fetchPermissions } from './fetch-permissions.js';
|
||||
import { withAppMinimalPermissions } from './with-app-minimal-permissions.js';
|
||||
|
||||
vi.mock('../../services/permissions.js', () => ({
|
||||
PermissionsService: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('./with-app-minimal-permissions.js');
|
||||
vi.mock('../utils/fetch-dynamic-variable-context.js');
|
||||
vi.mock('../utils/process-permissions.js');
|
||||
|
||||
beforeEach(() => {
|
||||
PermissionsService.prototype.readByQuery = vi.fn();
|
||||
|
||||
vi.mocked(fetchDynamicVariableContext).mockResolvedValue({});
|
||||
|
||||
vi.mocked(withAppMinimalPermissions).mockImplementation((_, permissions) => permissions);
|
||||
vi.mocked(processPermissions).mockImplementation(({ permissions }) => permissions);
|
||||
});
|
||||
|
||||
test('Returns permissions read through service sorted by the order of policies', async () => {
|
||||
const permissions: Permission[] = [
|
||||
{ policy: 'policy-2' },
|
||||
{ policy: 'policy-1' },
|
||||
{ policy: 'policy-1' },
|
||||
] as Permission[];
|
||||
|
||||
const policies = ['policy-1', 'policy-2'] as string[];
|
||||
const collections = [] as string[];
|
||||
|
||||
vi.mocked(PermissionsService.prototype.readByQuery).mockResolvedValue(permissions);
|
||||
|
||||
const res = await fetchPermissions({ action: 'read', policies, collections }, {} as Context);
|
||||
|
||||
expect(res).toStrictEqual([{ policy: 'policy-1' }, { policy: 'policy-1' }, { policy: 'policy-2' }]);
|
||||
|
||||
expect(PermissionsService.prototype.readByQuery).toHaveBeenCalledWith({
|
||||
filter: {
|
||||
_and: [{ policy: { _in: policies } }, { action: { _eq: 'read' } }, { collection: { _in: collections } }],
|
||||
},
|
||||
limit: -1,
|
||||
});
|
||||
});
|
||||
|
||||
test('Returns all action permissions if action is undefined', async () => {
|
||||
const permissions: Permission[] = [{ policy: 'policy-1' }] as Permission[];
|
||||
const policies = [] as string[];
|
||||
const collections = [] as string[];
|
||||
|
||||
vi.mocked(PermissionsService.prototype.readByQuery).mockResolvedValue(permissions);
|
||||
|
||||
const res = await fetchPermissions({ policies, collections }, {} as Context);
|
||||
|
||||
expect(res).toStrictEqual(permissions);
|
||||
|
||||
expect(PermissionsService.prototype.readByQuery).toHaveBeenCalledWith({
|
||||
filter: {
|
||||
_and: [{ policy: { _in: policies } }, { collection: { _in: collections } }],
|
||||
},
|
||||
limit: -1,
|
||||
});
|
||||
});
|
||||
|
||||
test('Fetches for all collections when collections filter is undefined', async () => {
|
||||
const permissions: Permission[] = [{ policy: 'policy-1' }] as Permission[];
|
||||
const policies = [] as string[];
|
||||
|
||||
vi.mocked(PermissionsService.prototype.readByQuery).mockResolvedValue(permissions);
|
||||
|
||||
const res = await fetchPermissions({ action: 'read', policies }, {} as Context);
|
||||
|
||||
expect(res).toStrictEqual(permissions);
|
||||
|
||||
expect(PermissionsService.prototype.readByQuery).toHaveBeenCalledWith({
|
||||
filter: {
|
||||
_and: [{ policy: { _in: policies } }, { action: { _eq: 'read' } }],
|
||||
},
|
||||
limit: -1,
|
||||
});
|
||||
});
|
||||
|
||||
test('Adds minimal permissions if accountability is passed', async () => {
|
||||
const permissions: Permission[] = [{ policy: 'policy-1' }] as Permission[];
|
||||
const accountability = {} as unknown as Accountability;
|
||||
vi.mocked(PermissionsService.prototype.readByQuery).mockResolvedValue(permissions);
|
||||
|
||||
const res = await fetchPermissions({ accountability, policies: [], action: 'read' }, {} as Context);
|
||||
|
||||
expect(res).toStrictEqual(permissions);
|
||||
|
||||
expect(withAppMinimalPermissions).toHaveBeenCalledWith(accountability, permissions, {
|
||||
_and: [{ action: { _eq: 'read' } }],
|
||||
});
|
||||
});
|
||||
|
||||
test('Injects dynamic variables by calling process permissions', async () => {
|
||||
const permissions: Permission[] = [{ policy: 'policy-1' }] as Permission[];
|
||||
const accountability = {} as unknown as Accountability;
|
||||
vi.mocked(PermissionsService.prototype.readByQuery).mockResolvedValue(permissions);
|
||||
|
||||
const res = await fetchPermissions({ accountability, policies: ['policy-1'], action: 'read' }, {} as Context);
|
||||
|
||||
expect(res).toStrictEqual(permissions);
|
||||
|
||||
expect(fetchDynamicVariableContext).toHaveBeenCalledWith(
|
||||
{
|
||||
accountability,
|
||||
policies: ['policy-1'],
|
||||
permissions,
|
||||
},
|
||||
{},
|
||||
);
|
||||
|
||||
expect(processPermissions).toHaveBeenCalledWith({
|
||||
permissions,
|
||||
accountability,
|
||||
permissionsContext: {},
|
||||
});
|
||||
});
|
||||
86
api/src/permissions/lib/fetch-permissions.ts
Normal file
86
api/src/permissions/lib/fetch-permissions.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import type { Accountability, Filter, Permission, PermissionsAction } from '@directus/types';
|
||||
import { pick, sortBy } from 'lodash-es';
|
||||
import type { Context } from '../types.js';
|
||||
import { fetchDynamicVariableContext } from '../utils/fetch-dynamic-variable-context.js';
|
||||
import { processPermissions } from '../utils/process-permissions.js';
|
||||
import { withCache } from '../utils/with-cache.js';
|
||||
import { withAppMinimalPermissions } from './with-app-minimal-permissions.js';
|
||||
|
||||
export const fetchPermissions = withCache(
|
||||
'permissions',
|
||||
_fetchPermissions,
|
||||
({ action, policies, collections, accountability, bypassDynamicVariableProcessing }) => ({
|
||||
policies, // we assume that policies always come from the same source, so they should be in the same order
|
||||
...(action && { action }),
|
||||
...(collections && { collections: sortBy(collections) }),
|
||||
...(accountability && { accountability: pick(accountability, ['user', 'role', 'roles', 'app']) }),
|
||||
...(bypassDynamicVariableProcessing && { bypassDynamicVariableProcessing }),
|
||||
}),
|
||||
);
|
||||
|
||||
export interface FetchPermissionsOptions {
|
||||
action?: PermissionsAction;
|
||||
policies: string[];
|
||||
collections?: string[];
|
||||
accountability?: Pick<Accountability, 'user' | 'role' | 'roles' | 'app'>;
|
||||
bypassDynamicVariableProcessing?: boolean;
|
||||
}
|
||||
|
||||
export async function _fetchPermissions(options: FetchPermissionsOptions, context: Context) {
|
||||
const { PermissionsService } = await import('../../services/permissions.js');
|
||||
const permissionsService = new PermissionsService(context);
|
||||
|
||||
const filter: Filter = {
|
||||
_and: [{ policy: { _in: options.policies } }],
|
||||
};
|
||||
|
||||
if (options.action) {
|
||||
filter._and.push({ action: { _eq: options.action } });
|
||||
}
|
||||
|
||||
if (options.collections) {
|
||||
filter._and.push({ collection: { _in: options.collections } });
|
||||
}
|
||||
|
||||
let permissions = (await permissionsService.readByQuery({
|
||||
filter,
|
||||
limit: -1,
|
||||
})) as Permission[];
|
||||
|
||||
// Sort permissions by their order in the policies array
|
||||
// This ensures that if a sorted array of policies is passed in the permissions are returned in the same order
|
||||
// which is necessary for correctly applying the presets in order
|
||||
permissions = sortBy(permissions, (permission) => options.policies.indexOf(permission.policy!));
|
||||
|
||||
if (options.accountability && !options.bypassDynamicVariableProcessing) {
|
||||
// Add app minimal permissions for the request accountability, if applicable.
|
||||
// Normally this is done in the permissions service readByQuery, but it also needs to do it here
|
||||
// since the permissions service is created without accountability.
|
||||
// We call it without the policies filter, since the static minimal app permissions don't have a policy attached.
|
||||
const permissionsWithAppPermissions = withAppMinimalPermissions(options.accountability ?? null, permissions, {
|
||||
_and: filter._and.slice(1),
|
||||
});
|
||||
|
||||
const permissionsContext = await fetchDynamicVariableContext(
|
||||
{
|
||||
accountability: options.accountability,
|
||||
policies: options.policies,
|
||||
permissions: permissionsWithAppPermissions,
|
||||
},
|
||||
context,
|
||||
);
|
||||
|
||||
// Replace dynamic variables with their actual values
|
||||
const processedPermissions = processPermissions({
|
||||
permissions: permissionsWithAppPermissions,
|
||||
accountability: options.accountability,
|
||||
permissionsContext,
|
||||
});
|
||||
|
||||
// TODO merge in permissions coming from the share scope
|
||||
|
||||
return processedPermissions;
|
||||
}
|
||||
|
||||
return permissions;
|
||||
}
|
||||
185
api/src/permissions/lib/fetch-policies.test.ts
Normal file
185
api/src/permissions/lib/fetch-policies.test.ts
Normal file
@@ -0,0 +1,185 @@
|
||||
import type { Accountability } from '@directus/types';
|
||||
import { beforeEach, expect, test, vi } from 'vitest';
|
||||
import { AccessService } from '../../services/access.js';
|
||||
import type { Context } from '../types.js';
|
||||
import { _fetchPolicies as fetchPolicies, type AccessRow } from './fetch-policies.js';
|
||||
|
||||
vi.mock('../../services/access.js', () => ({
|
||||
AccessService: vi.fn(),
|
||||
}));
|
||||
|
||||
let rows: AccessRow[];
|
||||
|
||||
beforeEach(() => {
|
||||
rows = [];
|
||||
|
||||
AccessService.prototype.readByQuery = vi.fn().mockResolvedValue(rows);
|
||||
});
|
||||
|
||||
test('Fetches policies for public role and user when user is given without role', async () => {
|
||||
const acc = { roles: [], user: 'user-a' } as unknown as Accountability;
|
||||
|
||||
const policies = await fetchPolicies(acc, {} as Context);
|
||||
|
||||
expect(AccessService.prototype.readByQuery).toHaveBeenCalledWith({
|
||||
filter: {
|
||||
_or: [
|
||||
{ user: { _eq: 'user-a' } },
|
||||
{
|
||||
_and: [
|
||||
{
|
||||
role: {
|
||||
_null: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
user: {
|
||||
_null: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
fields: ['policy.id', 'policy.ip_access', 'role'],
|
||||
limit: -1,
|
||||
});
|
||||
|
||||
expect(policies).toEqual([]);
|
||||
});
|
||||
|
||||
test('Fetches policies for public role when no roles and user are given', async () => {
|
||||
const acc = { roles: [], user: null } as unknown as Accountability;
|
||||
|
||||
const policies = await fetchPolicies(acc, {} as Context);
|
||||
|
||||
expect(AccessService.prototype.readByQuery).toHaveBeenCalledWith({
|
||||
filter: {
|
||||
_and: [
|
||||
{
|
||||
role: {
|
||||
_null: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
user: {
|
||||
_null: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
fields: ['policy.id', 'policy.ip_access', 'role'],
|
||||
limit: -1,
|
||||
});
|
||||
|
||||
expect(policies).toEqual([]);
|
||||
});
|
||||
|
||||
test('Fetched policies for user roles', async () => {
|
||||
const acc = { roles: ['role-a', 'role-b'], user: null } as unknown as Accountability;
|
||||
|
||||
const policies = await fetchPolicies(acc, {} as Context);
|
||||
|
||||
expect(AccessService.prototype.readByQuery).toHaveBeenCalledWith({
|
||||
filter: {
|
||||
role: {
|
||||
_in: ['role-a', 'role-b'],
|
||||
},
|
||||
},
|
||||
fields: ['policy.id', 'policy.ip_access', 'role'],
|
||||
limit: -1,
|
||||
});
|
||||
|
||||
expect(policies).toEqual([]);
|
||||
});
|
||||
|
||||
test('Fetches policies for user roles and user if user is passed', async () => {
|
||||
const acc = { roles: ['role-a', 'role-b'], user: 'user-a' } as unknown as Accountability;
|
||||
|
||||
const policies = await fetchPolicies(acc, {} as Context);
|
||||
|
||||
expect(AccessService.prototype.readByQuery).toHaveBeenCalledWith({
|
||||
filter: {
|
||||
_or: [
|
||||
{
|
||||
user: {
|
||||
_eq: 'user-a',
|
||||
},
|
||||
},
|
||||
{
|
||||
role: {
|
||||
_in: ['role-a', 'role-b'],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
fields: ['policy.id', 'policy.ip_access', 'role'],
|
||||
limit: -1,
|
||||
});
|
||||
|
||||
expect(policies).toEqual([]);
|
||||
});
|
||||
|
||||
test('Filters policies based on ip access on access row', async () => {
|
||||
const acc = { roles: ['role-a', 'role-b'], user: 'user-a', ip: '127.0.0.5' } as unknown as Accountability;
|
||||
|
||||
rows.push(
|
||||
{
|
||||
policy: {
|
||||
id: 'policy-a',
|
||||
ip_access: ['127.0.0.0/29'],
|
||||
},
|
||||
role: null,
|
||||
},
|
||||
{
|
||||
policy: {
|
||||
id: 'policy-b',
|
||||
ip_access: ['1.1.1.1/32'],
|
||||
},
|
||||
role: null,
|
||||
},
|
||||
);
|
||||
|
||||
const policies = await fetchPolicies(acc, {} as Context);
|
||||
|
||||
expect(policies).toEqual(['policy-a']);
|
||||
});
|
||||
|
||||
test('Sorts policies by priority', async () => {
|
||||
const acc = { roles: ['role-a', 'role-b'], user: 'user-a' } as unknown as Accountability;
|
||||
|
||||
rows.push(
|
||||
{
|
||||
policy: {
|
||||
id: 'policy-c',
|
||||
ip_access: null,
|
||||
},
|
||||
role: null,
|
||||
},
|
||||
{
|
||||
policy: {
|
||||
id: 'policy-d',
|
||||
ip_access: null,
|
||||
},
|
||||
role: null,
|
||||
},
|
||||
{
|
||||
policy: {
|
||||
id: 'policy-b',
|
||||
ip_access: null,
|
||||
},
|
||||
role: 'role-b',
|
||||
},
|
||||
{
|
||||
policy: {
|
||||
id: 'policy-a',
|
||||
ip_access: null,
|
||||
},
|
||||
role: 'role-a',
|
||||
},
|
||||
);
|
||||
|
||||
const policies = await fetchPolicies(acc, {} as Context);
|
||||
|
||||
expect(policies).toEqual(['policy-a', 'policy-b', 'policy-c', 'policy-d']);
|
||||
});
|
||||
60
api/src/permissions/lib/fetch-policies.ts
Normal file
60
api/src/permissions/lib/fetch-policies.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import type { Accountability, Filter } from '@directus/types';
|
||||
import type { Context } from '../types.js';
|
||||
import { filterPoliciesByIp } from '../utils/filter-policies-by-ip.js';
|
||||
import { withCache } from '../utils/with-cache.js';
|
||||
|
||||
export interface AccessRow {
|
||||
policy: { id: string; ip_access: string[] | null };
|
||||
role: string | null;
|
||||
}
|
||||
|
||||
export const fetchPolicies = withCache('policies', _fetchPolicies, ({ roles, user, ip }) => ({ roles, user, ip }));
|
||||
|
||||
/**
|
||||
* Fetch the policies associated with the current user accountability
|
||||
*/
|
||||
export async function _fetchPolicies(
|
||||
{ roles, user, ip }: Pick<Accountability, 'user' | 'roles' | 'ip'>,
|
||||
context: Context,
|
||||
): Promise<string[]> {
|
||||
const { AccessService } = await import('../../services/access.js');
|
||||
const accessService = new AccessService(context);
|
||||
|
||||
let roleFilter: Filter;
|
||||
|
||||
if (roles.length === 0) {
|
||||
// Users without role assumes the Public role permissions along with their attached policies
|
||||
roleFilter = { _and: [{ role: { _null: true } }, { user: { _null: true } }] };
|
||||
} else {
|
||||
roleFilter = { role: { _in: roles } };
|
||||
}
|
||||
|
||||
// If the user is not null, we also want to include the policies attached to the user
|
||||
const filter = user ? { _or: [{ user: { _eq: user } }, roleFilter] } : roleFilter;
|
||||
|
||||
const accessRows = (await accessService.readByQuery({
|
||||
filter,
|
||||
fields: ['policy.id', 'policy.ip_access', 'role'],
|
||||
limit: -1,
|
||||
})) as AccessRow[];
|
||||
|
||||
const filteredAccessRows = filterPoliciesByIp(accessRows, ip);
|
||||
|
||||
/*
|
||||
* Sort rows by priority (goes bottom up):
|
||||
* - Parent role policies
|
||||
* - Child role policies
|
||||
* - User policies
|
||||
*/
|
||||
filteredAccessRows.sort((a, b) => {
|
||||
if (!a.role && !b.role) return 0;
|
||||
if (!a.role) return 1;
|
||||
if (!b.role) return -1;
|
||||
|
||||
return roles.indexOf(a.role) - roles.indexOf(b.role);
|
||||
});
|
||||
|
||||
const ids = filteredAccessRows.map(({ policy }) => policy.id);
|
||||
|
||||
return ids;
|
||||
}
|
||||
53
api/src/permissions/lib/fetch-roles-tree.test.ts
Normal file
53
api/src/permissions/lib/fetch-roles-tree.test.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import type { Knex } from 'knex';
|
||||
import { beforeEach, expect, test, vi } from 'vitest';
|
||||
import { _fetchRolesTree } from './fetch-roles-tree.js';
|
||||
|
||||
let knex: Knex;
|
||||
|
||||
beforeEach(() => {
|
||||
knex = {
|
||||
select: vi.fn().mockReturnThis(),
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
first: vi.fn(),
|
||||
} as unknown as Knex;
|
||||
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
test('Returns empty array if start value is null', async () => {
|
||||
const roles = await _fetchRolesTree(null, knex);
|
||||
expect(roles).toEqual([]);
|
||||
});
|
||||
|
||||
test('Returns array of all parents in top-down order', async () => {
|
||||
vi.mocked(knex.first).mockResolvedValueOnce({ id: 'start', parent: 'second' });
|
||||
vi.mocked(knex.first).mockResolvedValueOnce({ id: 'second', parent: 'third' });
|
||||
vi.mocked(knex.first).mockResolvedValueOnce({ id: 'third', parent: null });
|
||||
vi.mocked(knex.first).mockResolvedValueOnce({ id: 'unrelated', parent: null });
|
||||
|
||||
const roles = await _fetchRolesTree('start', knex);
|
||||
|
||||
expect(roles).toEqual(['third', 'second', 'start']);
|
||||
});
|
||||
|
||||
test('Exits if parent row is undefined', async () => {
|
||||
vi.mocked(knex.first).mockResolvedValueOnce({ id: 'start', parent: 'second' });
|
||||
vi.mocked(knex.first).mockResolvedValueOnce({ id: 'second', parent: 'third' });
|
||||
vi.mocked(knex.first).mockResolvedValueOnce(undefined);
|
||||
vi.mocked(knex.first).mockResolvedValueOnce({ id: 'unrelated', parent: null });
|
||||
|
||||
const roles = await _fetchRolesTree('start', knex);
|
||||
|
||||
expect(roles).toEqual(['second', 'start']);
|
||||
});
|
||||
|
||||
test('Throws error if infinite recursion occurs', async () => {
|
||||
vi.mocked(knex.first).mockResolvedValueOnce({ id: 'first', parent: 'second' });
|
||||
vi.mocked(knex.first).mockResolvedValueOnce({ id: 'second', parent: 'third' });
|
||||
vi.mocked(knex.first).mockResolvedValueOnce({ id: 'third', parent: 'first' });
|
||||
|
||||
await expect(_fetchRolesTree('first', knex)).rejects.toMatchInlineSnapshot(
|
||||
`[Error: Recursion encountered: role "third" already exists in tree path "third"->"second"->"first"]`,
|
||||
);
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user