diff --git a/.changeset/afraid-trainers-brush.md b/.changeset/afraid-trainers-brush.md new file mode 100644 index 0000000000..1d4ec1b18d --- /dev/null +++ b/.changeset/afraid-trainers-brush.md @@ -0,0 +1,5 @@ +--- +'@directus/api': patch +--- + +Fixed `groupBy` behavior that resulted in an internal server error when used with relational item permissions diff --git a/.changeset/bright-plants-rule.md b/.changeset/bright-plants-rule.md new file mode 100644 index 0000000000..8377d16d99 --- /dev/null +++ b/.changeset/bright-plants-rule.md @@ -0,0 +1,5 @@ +--- +'@directus/constants': minor +--- + +Added permission actions constant diff --git a/.changeset/chilled-seas-care.md b/.changeset/chilled-seas-care.md new file mode 100644 index 0000000000..2910ec6032 --- /dev/null +++ b/.changeset/chilled-seas-care.md @@ -0,0 +1,5 @@ +--- +'@directus/utils': major +--- + +Added new dynamic variables to `parseFilter` and added the `processChunk` helper diff --git a/.changeset/dull-rings-drive.md b/.changeset/dull-rings-drive.md new file mode 100644 index 0000000000..695279daef --- /dev/null +++ b/.changeset/dull-rings-drive.md @@ -0,0 +1,5 @@ +--- +"@directus/app": patch +--- + +Ensured collections in system permissions interface are scrolled into view when added but out of view diff --git a/.changeset/dull-spies-worry.md b/.changeset/dull-spies-worry.md new file mode 100644 index 0000000000..4f11b0430e --- /dev/null +++ b/.changeset/dull-spies-worry.md @@ -0,0 +1,5 @@ +--- +'@directus/api': patch +--- + +Fixed an issue that would cause the API to return an error when a root field in a m2a builder was queried diff --git a/.changeset/eighty-toys-clap.md b/.changeset/eighty-toys-clap.md new file mode 100644 index 0000000000..12ae080544 --- /dev/null +++ b/.changeset/eighty-toys-clap.md @@ -0,0 +1,5 @@ +--- +'@directus/api': patch +--- + +Fixed `reduceSchema` to strip out collection the user does not have access to diff --git a/.changeset/famous-candles-camp.md b/.changeset/famous-candles-camp.md new file mode 100644 index 0000000000..f7518f4dfc --- /dev/null +++ b/.changeset/famous-candles-camp.md @@ -0,0 +1,5 @@ +--- +'@directus/api': major +--- + +Replaced the database client library `mysql` with `mysql2`, used for MySQL/MariaDB diff --git a/.changeset/famous-carpets-exercise.md b/.changeset/famous-carpets-exercise.md new file mode 100644 index 0000000000..c30563b8f5 --- /dev/null +++ b/.changeset/famous-carpets-exercise.md @@ -0,0 +1,5 @@ +--- +"@directus/sdk": minor +--- + +Implemented new SDK functions for policies diff --git a/.changeset/fuzzy-news-drop.md b/.changeset/fuzzy-news-drop.md new file mode 100644 index 0000000000..1747d597ab --- /dev/null +++ b/.changeset/fuzzy-news-drop.md @@ -0,0 +1,5 @@ +--- +'@directus/api': patch +--- + +Fixed user counting where users were double counted and inactive users with policies were counted as well diff --git a/.changeset/heavy-geese-kneel.md b/.changeset/heavy-geese-kneel.md new file mode 100644 index 0000000000..81fd56a2b8 --- /dev/null +++ b/.changeset/heavy-geese-kneel.md @@ -0,0 +1,5 @@ +--- +'@directus/types': major +--- + +Added new types and modified existing types required for Policies diff --git a/.changeset/lucky-humans-carry.md b/.changeset/lucky-humans-carry.md new file mode 100644 index 0000000000..de3aaa4cce --- /dev/null +++ b/.changeset/lucky-humans-carry.md @@ -0,0 +1,5 @@ +--- +"@directus/app": patch +--- + +Ensured the permissions table under policies is displayed correctly on mobile devices diff --git a/.changeset/nine-geckos-jog.md b/.changeset/nine-geckos-jog.md new file mode 100644 index 0000000000..5a19766ca8 --- /dev/null +++ b/.changeset/nine-geckos-jog.md @@ -0,0 +1,5 @@ +--- +"@directus/api": minor +--- + +Updated WebSocket subscriptions to include the new policies collection diff --git a/.changeset/pink-wolves-beg.md b/.changeset/pink-wolves-beg.md new file mode 100644 index 0000000000..5a5bad8bb6 --- /dev/null +++ b/.changeset/pink-wolves-beg.md @@ -0,0 +1,5 @@ +--- +'@directus/api': patch +--- + +Fixed filter creation in `fetchPolicies` for users without roles diff --git a/.changeset/polite-crabs-eat.md b/.changeset/polite-crabs-eat.md new file mode 100644 index 0000000000..c39b1a5ab6 --- /dev/null +++ b/.changeset/polite-crabs-eat.md @@ -0,0 +1,5 @@ +--- +'@directus/extensions': major +--- + +Changed module `preRegisterCheck` signature to align with the changes made for Policies diff --git a/.changeset/poor-ladybugs-help.md b/.changeset/poor-ladybugs-help.md new file mode 100644 index 0000000000..49cb04aacf --- /dev/null +++ b/.changeset/poor-ladybugs-help.md @@ -0,0 +1,5 @@ +--- +'@directus/app': patch +--- + +Fixed table layout default query, to not include presetational fields diff --git a/.changeset/proud-cameras-travel.md b/.changeset/proud-cameras-travel.md new file mode 100644 index 0000000000..b0ec49903e --- /dev/null +++ b/.changeset/proud-cameras-travel.md @@ -0,0 +1,5 @@ +--- +'@directus/api': patch +--- + +Fixed aggregation field existence and permission checks diff --git a/.changeset/rare-squids-compete.md b/.changeset/rare-squids-compete.md new file mode 100644 index 0000000000..f65f95c134 --- /dev/null +++ b/.changeset/rare-squids-compete.md @@ -0,0 +1,5 @@ +--- +'@directus/system-data': major +--- + +Added new collections and fields and updated existing fields and permissions needed for Policies diff --git a/.changeset/red-buckets-wink.md b/.changeset/red-buckets-wink.md new file mode 100644 index 0000000000..16960cd59d --- /dev/null +++ b/.changeset/red-buckets-wink.md @@ -0,0 +1,5 @@ +--- +'@directus/app': patch +--- + +Fixed missing policies in public role policy selection diff --git a/.changeset/rude-peas-confess.md b/.changeset/rude-peas-confess.md new file mode 100644 index 0000000000..29ce8a17fc --- /dev/null +++ b/.changeset/rude-peas-confess.md @@ -0,0 +1,5 @@ +--- +'@directus/api': minor +--- + +Used explicit headings for CSV export diff --git a/.changeset/serious-mangos-tease.md b/.changeset/serious-mangos-tease.md new file mode 100644 index 0000000000..3bb3cbbb58 --- /dev/null +++ b/.changeset/serious-mangos-tease.md @@ -0,0 +1,5 @@ +--- +'@directus/memory': minor +--- + +Added new `clear` method to cache implementations diff --git a/.changeset/serious-rings-carry.md b/.changeset/serious-rings-carry.md new file mode 100644 index 0000000000..5c7b2fb2c9 --- /dev/null +++ b/.changeset/serious-rings-carry.md @@ -0,0 +1,5 @@ +--- +'@directus/api': patch +--- + +Fixed down migration erroring on post migration permissions diff --git a/.changeset/slow-rats-smoke.md b/.changeset/slow-rats-smoke.md new file mode 100644 index 0000000000..d83e1aeda6 --- /dev/null +++ b/.changeset/slow-rats-smoke.md @@ -0,0 +1,5 @@ +--- +'@directus/api': patch +--- + +Fixed permission checking for o2m related fields diff --git a/.changeset/slow-snakes-occur.md b/.changeset/slow-snakes-occur.md new file mode 100644 index 0000000000..a5ce4ba067 --- /dev/null +++ b/.changeset/slow-snakes-occur.md @@ -0,0 +1,5 @@ +--- +"@directus/api": minor +--- + +Implemented new GraphQL queries for policies diff --git a/.changeset/strong-numbers-warn.md b/.changeset/strong-numbers-warn.md new file mode 100644 index 0000000000..561505523e --- /dev/null +++ b/.changeset/strong-numbers-warn.md @@ -0,0 +1,5 @@ +--- +'@directus/errors': minor +--- + +Added error extension to the `ForbiddenError` diff --git a/.changeset/tasty-guests-fry.md b/.changeset/tasty-guests-fry.md new file mode 100644 index 0000000000..6da7abd802 --- /dev/null +++ b/.changeset/tasty-guests-fry.md @@ -0,0 +1,5 @@ +--- +'@directus/api': patch +--- + +Ensured that policies attached to a user, role and parent roles are correctly prioritized diff --git a/.changeset/ten-beds-pretend.md b/.changeset/ten-beds-pretend.md new file mode 100644 index 0000000000..750c0c9c20 --- /dev/null +++ b/.changeset/ten-beds-pretend.md @@ -0,0 +1,5 @@ +--- +"@directus/api": patch +--- + +Ensured the default `DB_FILENAME` option from the Docker Image is not applied when using MySQL/MariaDB, fixing a corresponding warning diff --git a/.changeset/thick-dingos-film.md b/.changeset/thick-dingos-film.md new file mode 100644 index 0000000000..cdfc6a005f --- /dev/null +++ b/.changeset/thick-dingos-film.md @@ -0,0 +1,5 @@ +--- +"@directus/api": patch +--- + +Fixed the policies migration for the case where permissions had been configured for the public role diff --git a/.changeset/thin-feet-float.md b/.changeset/thin-feet-float.md new file mode 100644 index 0000000000..b7136d7236 --- /dev/null +++ b/.changeset/thin-feet-float.md @@ -0,0 +1,6 @@ +--- +'@directus/api': major +'@directus/app': major +--- + +Added a new policy based permissions system diff --git a/.changeset/three-teachers-destroy.md b/.changeset/three-teachers-destroy.md new file mode 100644 index 0000000000..6114d7eef8 --- /dev/null +++ b/.changeset/three-teachers-destroy.md @@ -0,0 +1,5 @@ +--- +'@directus/api': patch +--- + +Fix query error on some DB vendors when using multi relation sort diff --git a/.changeset/twenty-yaks-live.md b/.changeset/twenty-yaks-live.md new file mode 100644 index 0000000000..f7c3f39549 --- /dev/null +++ b/.changeset/twenty-yaks-live.md @@ -0,0 +1,5 @@ +--- +'@directus/api': patch +--- + +Migrated `NotificationsService` to new policies system diff --git a/.changeset/two-items-joke.md b/.changeset/two-items-joke.md new file mode 100644 index 0000000000..d6b130c43a --- /dev/null +++ b/.changeset/two-items-joke.md @@ -0,0 +1,5 @@ +--- +'@directus/api': patch +--- + +Fixed broken permissions for sorting of aggregate query when using the aggregate result as sort field diff --git a/.changeset/violet-numbers-retire.md b/.changeset/violet-numbers-retire.md new file mode 100644 index 0000000000..98d3e0ba4d --- /dev/null +++ b/.changeset/violet-numbers-retire.md @@ -0,0 +1,5 @@ +--- +'@directus/api': patch +--- + +Fixed an issue where keys in filter operand objects where incorrectly checked for field permissions diff --git a/.github/workflows/blackbox.yml b/.github/workflows/blackbox.yml index 8034d5c5d9..ed0977bedb 100644 --- a/.github/workflows/blackbox.yml +++ b/.github/workflows/blackbox.yml @@ -4,7 +4,7 @@ on: push: branches: - main - - next + - v11-rc paths: - api/** - tests/blackbox/** diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml index 334dc7a3eb..af08ae787c 100644 --- a/.github/workflows/check.yml +++ b/.github/workflows/check.yml @@ -4,7 +4,7 @@ on: pull_request: branches: - main - - next + - v11-rc concurrency: group: check-${{ github.ref }} diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 5763619515..1c79426a30 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -4,7 +4,7 @@ on: pull_request: branches: - main - - next + - v11-rc paths: - docs/** - .github/workflows/docs.yml diff --git a/api/package.json b/api/package.json index 3bee77cad1..47908782af 100644 --- a/api/package.json +++ b/api/package.json @@ -61,7 +61,8 @@ "build": "tsc --project tsconfig.prod.json && copyfiles \"src/**/*.{yaml,liquid}\" -u 1 dist", "cli": "NODE_ENV=development SERVE_APP=false tsx src/cli/run.ts", "dev": "NODE_ENV=development SERVE_APP=true tsx watch --ignore extensions --clear-screen=false src/start.ts", - "test": "vitest --watch=false" + "test": "vitest run", + "test:watch": "vitest" }, "dependencies": { "@authenio/samlify-node-xmllint": "2.0.0", @@ -224,7 +225,7 @@ }, "optionalDependencies": { "@keyv/redis": "2.8.5", - "mysql": "2.18.1", + "mysql2": "3.10.0", "nodemailer-mailgun-transport": "2.1.5", "nodemailer-sendgrid": "1.0.3", "oracledb": "6.5.1", diff --git a/api/src/app.test.ts b/api/src/app.test.ts index 262ae37a0f..e6c8182e4c 100644 --- a/api/src/app.test.ts +++ b/api/src/app.test.ts @@ -47,18 +47,10 @@ vi.mock('./flows', () => ({ }), })); -vi.mock('./middleware/check-ip', () => ({ - checkIP: Router(), -})); - vi.mock('./middleware/schema', () => ({ default: Router(), })); -vi.mock('./middleware/get-permissions', () => ({ - default: Router(), -})); - vi.mock('./auth', () => ({ registerAuthProviders: vi.fn(), })); diff --git a/api/src/app.ts b/api/src/app.ts index c6ed1d95bf..9e03a7f1ed 100644 --- a/api/src/app.ts +++ b/api/src/app.ts @@ -12,6 +12,7 @@ import path from 'path'; import qs from 'qs'; import { registerAuthProviders } from './auth.js'; import activityRouter from './controllers/activity.js'; +import accessRouter from './controllers/access.js'; import assetsRouter from './controllers/assets.js'; import authRouter from './controllers/auth.js'; import collectionsRouter from './controllers/collections.js'; @@ -28,6 +29,7 @@ import notificationsRouter from './controllers/notifications.js'; import operationsRouter from './controllers/operations.js'; import panelsRouter from './controllers/panels.js'; import permissionsRouter from './controllers/permissions.js'; +import policiesRouter from './controllers/policies.js'; import presetsRouter from './controllers/presets.js'; import relationsRouter from './controllers/relations.js'; import revisionsRouter from './controllers/revisions.js'; @@ -54,11 +56,9 @@ import { getFlowManager } from './flows.js'; import { createExpressLogger, useLogger } from './logger/index.js'; import authenticate from './middleware/authenticate.js'; import cache from './middleware/cache.js'; -import { checkIP } from './middleware/check-ip.js'; import cors from './middleware/cors.js'; import { errorHandler } from './middleware/error-handler.js'; import extractToken from './middleware/extract-token.js'; -import getPermissions from './middleware/get-permissions.js'; import rateLimiterGlobal from './middleware/rate-limiter-global.js'; import rateLimiter from './middleware/rate-limiter-ip.js'; import sanitizeQuery from './middleware/sanitize-query.js'; @@ -260,16 +260,12 @@ export default async function createApp(): Promise { app.use(authenticate); - app.use(checkIP); - app.use(sanitizeQuery); app.use(cache); app.use(schema); - app.use(getPermissions); - await emitter.emitInit('middlewares.after', { app }); await emitter.emitInit('routes.before', { app }); @@ -279,6 +275,7 @@ export default async function createApp(): Promise { app.use('/graphql', graphqlRouter); app.use('/activity', activityRouter); + app.use('/access', accessRouter); app.use('/assets', assetsRouter); app.use('/collections', collectionsRouter); app.use('/dashboards', dashboardsRouter); @@ -297,6 +294,7 @@ export default async function createApp(): Promise { app.use('/operations', operationsRouter); app.use('/panels', panelsRouter); app.use('/permissions', permissionsRouter); + app.use('/policies', policiesRouter); app.use('/presets', presetsRouter); app.use('/translations', translationsRouter); app.use('/relations', relationsRouter); diff --git a/api/src/auth/drivers/ldap.ts b/api/src/auth/drivers/ldap.ts index 88afe663f9..5af357536c 100644 --- a/api/src/auth/drivers/ldap.ts +++ b/api/src/auth/drivers/ldap.ts @@ -14,17 +14,18 @@ import { Router } from 'express'; import Joi from 'joi'; import type { Client, Error, LDAPResult, SearchCallbackResponse, SearchEntry } from 'ldapjs'; import ldap from 'ldapjs'; +import { REFRESH_COOKIE_OPTIONS, SESSION_COOKIE_OPTIONS } from '../../constants.js'; import getDatabase from '../../database/index.js'; import emitter from '../../emitter.js'; import { useLogger } from '../../logger/index.js'; import { respond } from '../../middleware/respond.js'; +import { createDefaultAccountability } from '../../permissions/utils/create-default-accountability.js'; import { AuthenticationService } from '../../services/authentication.js'; import { UsersService } from '../../services/users.js'; import type { AuthDriverOptions, AuthenticationMode, User } from '../../types/index.js'; import asyncHandler from '../../utils/async-handler.js'; import { getIPFromReq } from '../../utils/get-ip-from-req.js'; import { AuthDriver } from '../auth.js'; -import { REFRESH_COOKIE_OPTIONS, SESSION_COOKIE_OPTIONS } from '../../constants.js'; interface UserInfo { dn: string; @@ -417,10 +418,9 @@ export function createLDAPAuthRouter(provider: string): Router { asyncHandler(async (req, res, next) => { const env = useEnv(); - const accountability: Accountability = { + const accountability: Accountability = createDefaultAccountability({ ip: getIPFromReq(req), - role: null, - }; + }); const userAgent = req.get('user-agent')?.substring(0, 1024); if (userAgent) accountability.userAgent = userAgent; diff --git a/api/src/auth/drivers/local.ts b/api/src/auth/drivers/local.ts index 4c9129aa4e..f5af53ab60 100644 --- a/api/src/auth/drivers/local.ts +++ b/api/src/auth/drivers/local.ts @@ -1,3 +1,4 @@ +import { useEnv } from '@directus/env'; import { InvalidCredentialsError, InvalidPayloadError } from '@directus/errors'; import type { Accountability } from '@directus/types'; import argon2 from 'argon2'; @@ -5,8 +6,8 @@ import { Router } from 'express'; import Joi from 'joi'; import { performance } from 'perf_hooks'; import { REFRESH_COOKIE_OPTIONS, SESSION_COOKIE_OPTIONS } from '../../constants.js'; -import { useEnv } from '@directus/env'; import { respond } from '../../middleware/respond.js'; +import { createDefaultAccountability } from '../../permissions/utils/create-default-accountability.js'; import { AuthenticationService } from '../../services/authentication.js'; import type { AuthenticationMode, User } from '../../types/index.js'; import asyncHandler from '../../utils/async-handler.js'; @@ -62,10 +63,9 @@ export function createLocalAuthRouter(provider: string): Router { const STALL_TIME = env['LOGIN_STALL_TIME'] as number; const timeStart = performance.now(); - const accountability: Accountability = { + const accountability: Accountability = createDefaultAccountability({ ip: getIPFromReq(req), - role: null, - }; + }); const userAgent = req.get('user-agent')?.substring(0, 1024); if (userAgent) accountability.userAgent = userAgent; diff --git a/api/src/auth/drivers/oauth2.ts b/api/src/auth/drivers/oauth2.ts index a5dedc709c..c52d5f80ef 100644 --- a/api/src/auth/drivers/oauth2.ts +++ b/api/src/auth/drivers/oauth2.ts @@ -22,16 +22,17 @@ import getDatabase from '../../database/index.js'; import emitter from '../../emitter.js'; import { useLogger } from '../../logger/index.js'; import { respond } from '../../middleware/respond.js'; +import { createDefaultAccountability } from '../../permissions/utils/create-default-accountability.js'; import { AuthenticationService } from '../../services/authentication.js'; import { UsersService } from '../../services/users.js'; import type { AuthData, AuthDriverOptions, User } from '../../types/index.js'; import asyncHandler from '../../utils/async-handler.js'; import { getConfigFromEnv } from '../../utils/get-config-from-env.js'; import { getIPFromReq } from '../../utils/get-ip-from-req.js'; +import { getSecret } from '../../utils/get-secret.js'; import { isLoginRedirectAllowed } from '../../utils/is-login-redirect-allowed.js'; import { Url } from '../../utils/url.js'; import { LocalAuthDriver } from './local.js'; -import { getSecret } from '../../utils/get-secret.js'; export class OAuth2AuthDriver extends LocalAuthDriver { client: Client; @@ -353,10 +354,9 @@ export function createOAuth2AuthRouter(providerName: string): Router { const { verifier, redirect, prompt } = tokenData; - const accountability: Accountability = { + const accountability: Accountability = createDefaultAccountability({ ip: getIPFromReq(req), - role: null, - }; + }); const userAgent = req.get('user-agent')?.substring(0, 1024); if (userAgent) accountability.userAgent = userAgent; diff --git a/api/src/auth/drivers/openid.ts b/api/src/auth/drivers/openid.ts index 7b4a104ba2..a7a8b7c5c7 100644 --- a/api/src/auth/drivers/openid.ts +++ b/api/src/auth/drivers/openid.ts @@ -22,6 +22,7 @@ import getDatabase from '../../database/index.js'; import emitter from '../../emitter.js'; import { useLogger } from '../../logger/index.js'; import { respond } from '../../middleware/respond.js'; +import { createDefaultAccountability } from '../../permissions/utils/create-default-accountability.js'; import { AuthenticationService } from '../../services/authentication.js'; import { UsersService } from '../../services/users.js'; import type { AuthData, AuthDriverOptions, User } from '../../types/index.js'; @@ -383,10 +384,7 @@ export function createOpenIDAuthRouter(providerName: string): Router { const { verifier, redirect, prompt } = tokenData; - const accountability: Accountability = { - ip: getIPFromReq(req), - role: null, - }; + const accountability: Accountability = createDefaultAccountability({ ip: getIPFromReq(req) }); const userAgent = req.get('user-agent')?.substring(0, 1024); if (userAgent) accountability.userAgent = userAgent; diff --git a/api/src/cache.ts b/api/src/cache.ts index 5499b098da..43b3477da4 100644 --- a/api/src/cache.ts +++ b/api/src/cache.ts @@ -9,6 +9,7 @@ import { compress, decompress } from './utils/compress.js'; import { getConfigFromEnv } from './utils/get-config-from-env.js'; import { getMilliseconds } from './utils/get-milliseconds.js'; import { validateEnv } from './utils/validate-env.js'; +import { clearCache as clearPermissionCache } from './permissions/cache.js'; import { createRequire } from 'node:module'; @@ -97,6 +98,10 @@ export async function clearSystemCache(opts?: { } await localSchemaCache.clear(); + + // Since a lot of cached permission function rely on the schema it needs to be cleared as well + await clearPermissionCache(); + messenger.publish('schemaChanged', { autoPurgeCache: opts?.autoPurgeCache }); } diff --git a/api/src/cli/commands/bootstrap/index.ts b/api/src/cli/commands/bootstrap/index.ts index 1127922afb..74420fa6bc 100644 --- a/api/src/cli/commands/bootstrap/index.ts +++ b/api/src/cli/commands/bootstrap/index.ts @@ -9,11 +9,13 @@ import getDatabase, { import runMigrations from '../../../database/migrations/run.js'; import installDatabase from '../../../database/seeds/run.js'; import { useLogger } from '../../../logger/index.js'; +import { AccessService } from '../../../services/access.js'; +import { PoliciesService } from '../../../services/policies.js'; import { RolesService } from '../../../services/roles.js'; import { SettingsService } from '../../../services/settings.js'; import { UsersService } from '../../../services/users.js'; import { getSchema } from '../../../utils/get-schema.js'; -import { defaultAdminRole, defaultAdminUser } from '../../utils/defaults.js'; +import { defaultAdminPolicy, defaultAdminRole, defaultAdminUser } from '../../utils/defaults.js'; export default async function bootstrap({ skipAdminInit }: { skipAdminInit?: boolean }): Promise { const logger = useLogger(); @@ -82,8 +84,14 @@ async function createDefaultAdmin(schema: SchemaOverview) { const { nanoid } = await import('nanoid'); logger.info('Setting up first admin role...'); + const accessService = new AccessService({ schema }); + const policiesService = new PoliciesService({ schema }); const rolesService = new RolesService({ schema }); + const role = await rolesService.createOne(defaultAdminRole); + const policy = await policiesService.createOne(defaultAdminPolicy); + + await accessService.createOne({ policy, role }); logger.info('Adding first admin user...'); const usersService = new UsersService({ schema }); @@ -104,5 +112,5 @@ async function createDefaultAdmin(schema: SchemaOverview) { const token = env['ADMIN_TOKEN'] ?? null; - await usersService.createOne({ email: adminEmail, password: adminPassword, token, role, ...defaultAdminUser }); + await usersService.createOne({ ...defaultAdminUser, email: adminEmail, password: adminPassword, token, role }); } diff --git a/api/src/cli/commands/init/index.ts b/api/src/cli/commands/init/index.ts index 411c31ec34..00b3e3739a 100644 --- a/api/src/cli/commands/init/index.ts +++ b/api/src/cli/commands/init/index.ts @@ -11,7 +11,7 @@ import { generateHash } from '../../../utils/generate-hash.js'; import type { Credentials } from '../../utils/create-db-connection.js'; import createDBConnection from '../../utils/create-db-connection.js'; import createEnv from '../../utils/create-env/index.js'; -import { defaultAdminRole, defaultAdminUser } from '../../utils/defaults.js'; +import { defaultAdminPolicy, defaultAdminRole, defaultAdminUser } from '../../utils/defaults.js'; import { drivers, getDriverForClient } from '../../utils/drivers.js'; import { databaseQuestions } from './questions.js'; @@ -98,20 +98,19 @@ export default async function init(): Promise { firstUser.password = await generateHash(firstUser.password); - const userID = randomUUID(); - const roleID = randomUUID(); + const role = randomUUID(); + const policy = randomUUID(); - await db('directus_roles').insert({ - id: roleID, - ...defaultAdminRole, - }); + await db('directus_roles').insert({ ...defaultAdminRole, id: role }); + await db('directus_policies').insert({ ...defaultAdminPolicy, id: policy }); + await db('directus_access').insert({ id: randomUUID(), role, policy }); await db('directus_users').insert({ - id: userID, + ...defaultAdminUser, + id: randomUUID(), email: firstUser.email, password: firstUser.password, - role: roleID, - ...defaultAdminUser, + role, }); await db.destroy(); diff --git a/api/src/cli/utils/defaults.ts b/api/src/cli/utils/defaults.ts index 38ea43502d..562305df70 100644 --- a/api/src/cli/utils/defaults.ts +++ b/api/src/cli/utils/defaults.ts @@ -1,12 +1,21 @@ -export const defaultAdminRole = { +import type { Policy, Role, User } from '@directus/types'; + +export const defaultAdminRole: Partial = { name: 'Administrator', icon: 'verified', - admin_access: true, description: '$t:admin_description', }; -export const defaultAdminUser = { +export const defaultAdminUser: Partial = { status: 'active', first_name: 'Admin', last_name: 'User', }; + +export const defaultAdminPolicy: Partial = { + name: 'Administrator', + icon: 'verified', + admin_access: true, + app_access: true, + description: '$t:admin_description', +}; diff --git a/api/src/constants.ts b/api/src/constants.ts index a1c8d1e1ac..8c334f1ce9 100644 --- a/api/src/constants.ts +++ b/api/src/constants.ts @@ -61,7 +61,7 @@ export const DEFAULT_AUTH_PROVIDER = 'default'; export const COLUMN_TRANSFORMS = ['year', 'month', 'day', 'weekday', 'hour', 'minute', 'second']; -export const GENERATE_SPECIAL = ['uuid', 'date-created', 'role-created', 'user-created']; +export const GENERATE_SPECIAL = ['uuid', 'date-created', 'role-created', 'user-created'] as const; export const UUID_REGEX = '[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}'; diff --git a/api/src/controllers/access.ts b/api/src/controllers/access.ts new file mode 100644 index 0000000000..525c951f9a --- /dev/null +++ b/api/src/controllers/access.ts @@ -0,0 +1,201 @@ +import { ErrorCode, isDirectusError } from '@directus/errors'; +import type { PrimaryKey } from '@directus/types'; +import express from 'express'; +import { respond } from '../middleware/respond.js'; +import useCollection from '../middleware/use-collection.js'; +import { validateBatch } from '../middleware/validate-batch.js'; +import { MetaService } from '../services/meta.js'; +import { AccessService } from '../services/access.js'; +import asyncHandler from '../utils/async-handler.js'; +import { sanitizeQuery } from '../utils/sanitize-query.js'; + +const router = express.Router(); + +router.use(useCollection('directus_access')); + +router.post( + '/', + asyncHandler(async (req, res, next) => { + const service = new AccessService({ + accountability: req.accountability, + schema: req.schema, + }); + + const savedKeys: PrimaryKey[] = []; + + if (Array.isArray(req.body)) { + const keys = await service.createMany(req.body); + savedKeys.push(...keys); + } else { + const key = await service.createOne(req.body); + savedKeys.push(key); + } + + try { + if (Array.isArray(req.body)) { + const items = await service.readMany(savedKeys, req.sanitizedQuery); + res.locals['payload'] = { data: items }; + } else { + const item = await service.readOne(savedKeys[0]!, req.sanitizedQuery); + res.locals['payload'] = { data: item }; + } + } catch (error: any) { + if (isDirectusError(error, ErrorCode.Forbidden)) { + return next(); + } + + throw error; + } + + return next(); + }), + respond, +); + +const readHandler = asyncHandler(async (req, res, next) => { + const service = new AccessService({ + accountability: req.accountability, + schema: req.schema, + }); + + const metaService = new MetaService({ + accountability: req.accountability, + schema: req.schema, + }); + + let result; + + if (req.body.keys) { + result = await service.readMany(req.body.keys, req.sanitizedQuery); + } else { + result = await service.readByQuery(req.sanitizedQuery); + } + + const meta = await metaService.getMetaForQuery('directus_access', req.sanitizedQuery); + + res.locals['payload'] = { data: result, meta }; + return next(); +}); + +router.get('/', validateBatch('read'), readHandler, respond); +router.search('/', validateBatch('read'), readHandler, respond); + +router.get( + '/:pk', + asyncHandler(async (req, res, next) => { + if (req.path.endsWith('me')) return next(); + + const service = new AccessService({ + accountability: req.accountability, + schema: req.schema, + }); + + const record = await service.readOne(req.params['pk']!, req.sanitizedQuery); + + res.locals['payload'] = { data: record }; + return next(); + }), + respond, +); + +router.patch( + '/', + validateBatch('update'), + asyncHandler(async (req, res, next) => { + const service = new AccessService({ + accountability: req.accountability, + schema: req.schema, + }); + + let keys: PrimaryKey[] = []; + + if (Array.isArray(req.body)) { + keys = await service.updateBatch(req.body); + } else if (req.body.keys) { + keys = await service.updateMany(req.body.keys, req.body.data); + } else { + const sanitizedQuery = sanitizeQuery(req.body.query, req.accountability); + keys = await service.updateByQuery(sanitizedQuery, req.body.data); + } + + try { + const result = await service.readMany(keys, req.sanitizedQuery); + res.locals['payload'] = { data: result }; + } catch (error: any) { + if (isDirectusError(error, ErrorCode.Forbidden)) { + return next(); + } + + throw error; + } + + return next(); + }), + respond, +); + +router.patch( + '/:pk', + asyncHandler(async (req, res, next) => { + const service = new AccessService({ + accountability: req.accountability, + schema: req.schema, + }); + + const primaryKey = await service.updateOne(req.params['pk']!, req.body); + + try { + const item = await service.readOne(primaryKey, req.sanitizedQuery); + res.locals['payload'] = { data: item || null }; + } catch (error: any) { + if (isDirectusError(error, ErrorCode.Forbidden)) { + return next(); + } + + throw error; + } + + return next(); + }), + respond, +); + +router.delete( + '/', + validateBatch('delete'), + asyncHandler(async (req, _res, next) => { + const service = new AccessService({ + accountability: req.accountability, + schema: req.schema, + }); + + if (Array.isArray(req.body)) { + await service.deleteMany(req.body); + } else if (req.body.keys) { + await service.deleteMany(req.body.keys); + } else { + const sanitizedQuery = sanitizeQuery(req.body.query, req.accountability); + await service.deleteByQuery(sanitizedQuery); + } + + return next(); + }), + respond, +); + +router.delete( + '/:pk', + asyncHandler(async (req, _res, next) => { + const service = new AccessService({ + accountability: req.accountability, + schema: req.schema, + }); + + await service.deleteOne(req.params['pk']!); + + return next(); + }), + respond, +); + +export default router; diff --git a/api/src/controllers/auth.ts b/api/src/controllers/auth.ts index 3869f995c8..f76c16e4f7 100644 --- a/api/src/controllers/auth.ts +++ b/api/src/controllers/auth.ts @@ -13,6 +13,7 @@ import { import { DEFAULT_AUTH_PROVIDER, REFRESH_COOKIE_OPTIONS, SESSION_COOKIE_OPTIONS } from '../constants.js'; import { useLogger } from '../logger/index.js'; import { respond } from '../middleware/respond.js'; +import { createDefaultAccountability } from '../permissions/utils/create-default-accountability.js'; import { AuthenticationService } from '../services/authentication.js'; import { UsersService } from '../services/users.js'; import type { AuthenticationMode } from '../types/auth.js'; @@ -102,10 +103,7 @@ function getCurrentRefreshToken(req: Request, mode: AuthenticationMode): string router.post( '/refresh', asyncHandler(async (req, res, next) => { - const accountability: Accountability = { - ip: getIPFromReq(req), - role: null, - }; + const accountability: Accountability = createDefaultAccountability({ ip: getIPFromReq(req) }); const userAgent = req.get('user-agent')?.substring(0, 1024); if (userAgent) accountability.userAgent = userAgent; @@ -156,10 +154,7 @@ router.post( router.post( '/logout', asyncHandler(async (req, res, next) => { - const accountability: Accountability = { - ip: getIPFromReq(req), - role: null, - }; + const accountability: Accountability = createDefaultAccountability({ ip: getIPFromReq(req) }); const userAgent = req.get('user-agent')?.substring(0, 1024); if (userAgent) accountability.userAgent = userAgent; @@ -203,10 +198,7 @@ router.post( throw new InvalidPayloadError({ reason: `"email" field is required` }); } - const accountability: Accountability = { - ip: getIPFromReq(req), - role: null, - }; + const accountability: Accountability = createDefaultAccountability({ ip: getIPFromReq(req) }); const userAgent = req.get('user-agent')?.substring(0, 1024); if (userAgent) accountability.userAgent = userAgent; @@ -242,10 +234,7 @@ router.post( throw new InvalidPayloadError({ reason: `"password" field is required` }); } - const accountability: Accountability = { - ip: getIPFromReq(req), - role: null, - }; + const accountability: Accountability = createDefaultAccountability({ ip: getIPFromReq(req) }); const userAgent = req.get('user-agent')?.substring(0, 1024); if (userAgent) accountability.userAgent = userAgent; diff --git a/api/src/controllers/permissions.ts b/api/src/controllers/permissions.ts index 1c24c18556..1f6a38684b 100644 --- a/api/src/controllers/permissions.ts +++ b/api/src/controllers/permissions.ts @@ -1,11 +1,13 @@ -import { ErrorCode, isDirectusError } from '@directus/errors'; +import { ErrorCode, ForbiddenError, isDirectusError } from '@directus/errors'; import type { PrimaryKey } from '@directus/types'; import express from 'express'; +import getDatabase from '../database/index.js'; import { respond } from '../middleware/respond.js'; import useCollection from '../middleware/use-collection.js'; import { validateBatch } from '../middleware/validate-batch.js'; +import { fetchAccountabilityCollectionAccess } from '../permissions/modules/fetch-accountability-collection-access/fetch-accountability-collection-access.js'; import { MetaService } from '../services/meta.js'; -import { PermissionsService } from '../services/permissions/index.js'; +import { PermissionsService } from '../services/permissions.js'; import asyncHandler from '../utils/async-handler.js'; import { sanitizeQuery } from '../utils/sanitize-query.js'; @@ -86,6 +88,22 @@ const readHandler = asyncHandler(async (req, res, next) => { router.get('/', validateBatch('read'), readHandler, respond); router.search('/', validateBatch('read'), readHandler, respond); +router.get( + '/me', + asyncHandler(async (req, res, next) => { + if (!req.accountability?.user && !req.accountability?.role) throw new ForbiddenError(); + + const result = await fetchAccountabilityCollectionAccess(req.accountability, { + schema: req.schema, + knex: getDatabase(), + }); + + res.locals['payload'] = { data: result }; + return next(); + }), + respond, +); + router.get( '/:pk', asyncHandler(async (req, res, next) => { diff --git a/api/src/controllers/policies.ts b/api/src/controllers/policies.ts new file mode 100644 index 0000000000..e567af9461 --- /dev/null +++ b/api/src/controllers/policies.ts @@ -0,0 +1,229 @@ +import { ErrorCode, ForbiddenError, isDirectusError } from '@directus/errors'; +import type { PrimaryKey } from '@directus/types'; +import express from 'express'; +import getDatabase from '../database/index.js'; +import { respond } from '../middleware/respond.js'; +import useCollection from '../middleware/use-collection.js'; +import { validateBatch } from '../middleware/validate-batch.js'; +import { fetchAccountabilityPolicyGlobals } from '../permissions/modules/fetch-accountability-policy-globals/fetch-accountability-policy-globals.js'; +import { MetaService } from '../services/meta.js'; +import { PoliciesService } from '../services/policies.js'; +import asyncHandler from '../utils/async-handler.js'; +import { sanitizeQuery } from '../utils/sanitize-query.js'; + +const router = express.Router(); + +router.use(useCollection('directus_policies')); + +router.post( + '/', + asyncHandler(async (req, res, next) => { + const service = new PoliciesService({ + accountability: req.accountability, + schema: req.schema, + }); + + const savedKeys: PrimaryKey[] = []; + + if (Array.isArray(req.body)) { + const keys = await service.createMany(req.body); + savedKeys.push(...keys); + } else { + const key = await service.createOne(req.body); + savedKeys.push(key); + } + + try { + if (Array.isArray(req.body)) { + const items = await service.readMany(savedKeys, req.sanitizedQuery); + res.locals['payload'] = { data: items }; + } else { + const item = await service.readOne(savedKeys[0]!, req.sanitizedQuery); + res.locals['payload'] = { data: item }; + } + } catch (error: any) { + if (isDirectusError(error, ErrorCode.Forbidden)) { + return next(); + } + + throw error; + } + + return next(); + }), + respond, +); + +const readHandler = asyncHandler(async (req, res, next) => { + const service = new PoliciesService({ + accountability: req.accountability, + schema: req.schema, + }); + + const metaService = new MetaService({ + accountability: req.accountability, + schema: req.schema, + }); + + let result; + + if (req.body.keys) { + result = await service.readMany(req.body.keys, req.sanitizedQuery); + } else { + result = await service.readByQuery(req.sanitizedQuery); + } + + const meta = await metaService.getMetaForQuery('directus_policies', req.sanitizedQuery); + + res.locals['payload'] = { data: result, meta }; + return next(); +}); + +router.get('/', validateBatch('read'), readHandler, respond); +router.search('/', validateBatch('read'), readHandler, respond); + +router.get( + '/me/globals', + asyncHandler(async (req, res, next) => { + try { + if (!req.accountability?.user && !req.accountability?.role) throw new ForbiddenError(); + + const result = await fetchAccountabilityPolicyGlobals(req.accountability, { + schema: req.schema, + knex: getDatabase(), + }); + + res.locals['payload'] = { data: result }; + } catch (error: any) { + if (isDirectusError(error, ErrorCode.Forbidden)) { + res.locals['payload'] = { data: { app_access: false } }; + return next(); + } + + throw error; + } + + return next(); + }), + respond, +); + +router.get( + '/:pk', + asyncHandler(async (req, res, next) => { + if (req.path.endsWith('me')) return next(); + + const service = new PoliciesService({ + accountability: req.accountability, + schema: req.schema, + }); + + const record = await service.readOne(req.params['pk']!, req.sanitizedQuery); + + res.locals['payload'] = { data: record }; + return next(); + }), + respond, +); + +router.patch( + '/', + validateBatch('update'), + asyncHandler(async (req, res, next) => { + const service = new PoliciesService({ + accountability: req.accountability, + schema: req.schema, + }); + + let keys: PrimaryKey[] = []; + + if (Array.isArray(req.body)) { + keys = await service.updateBatch(req.body); + } else if (req.body.keys) { + keys = await service.updateMany(req.body.keys, req.body.data); + } else { + const sanitizedQuery = sanitizeQuery(req.body.query, req.accountability); + keys = await service.updateByQuery(sanitizedQuery, req.body.data); + } + + try { + const result = await service.readMany(keys, req.sanitizedQuery); + res.locals['payload'] = { data: result }; + } catch (error: any) { + if (isDirectusError(error, ErrorCode.Forbidden)) { + return next(); + } + + throw error; + } + + return next(); + }), + respond, +); + +router.patch( + '/:pk', + asyncHandler(async (req, res, next) => { + const service = new PoliciesService({ + accountability: req.accountability, + schema: req.schema, + }); + + const primaryKey = await service.updateOne(req.params['pk']!, req.body); + + try { + const item = await service.readOne(primaryKey, req.sanitizedQuery); + res.locals['payload'] = { data: item || null }; + } catch (error: any) { + if (isDirectusError(error, ErrorCode.Forbidden)) { + return next(); + } + + throw error; + } + + return next(); + }), + respond, +); + +router.delete( + '/', + validateBatch('delete'), + asyncHandler(async (req, _res, next) => { + const service = new PoliciesService({ + accountability: req.accountability, + schema: req.schema, + }); + + if (Array.isArray(req.body)) { + await service.deleteMany(req.body); + } else if (req.body.keys) { + await service.deleteMany(req.body.keys); + } else { + const sanitizedQuery = sanitizeQuery(req.body.query, req.accountability); + await service.deleteByQuery(sanitizedQuery); + } + + return next(); + }), + respond, +); + +router.delete( + '/:pk', + asyncHandler(async (req, _res, next) => { + const service = new PoliciesService({ + accountability: req.accountability, + schema: req.schema, + }); + + await service.deleteOne(req.params['pk']!); + + return next(); + }), + respond, +); + +export default router; diff --git a/api/src/controllers/roles.ts b/api/src/controllers/roles.ts index 1bdb4e213b..6233f1b385 100644 --- a/api/src/controllers/roles.ts +++ b/api/src/controllers/roles.ts @@ -1,4 +1,4 @@ -import { ErrorCode, isDirectusError } from '@directus/errors'; +import { ErrorCode, ForbiddenError, isDirectusError } from '@directus/errors'; import type { PrimaryKey } from '@directus/types'; import express from 'express'; import { respond } from '../middleware/respond.js'; @@ -73,6 +73,36 @@ const readHandler = asyncHandler(async (req, res, next) => { router.get('/', validateBatch('read'), readHandler, respond); router.search('/', validateBatch('read'), readHandler, respond); +router.get( + '/me', + asyncHandler(async (req, res, next) => { + if (!req.accountability?.user && !req.accountability?.role) throw new ForbiddenError(); + + const service = new RolesService({ + accountability: req.accountability, + schema: req.schema, + }); + + const query = { ...req.sanitizedQuery, limit: -1 }; + + try { + const roles = await service.readMany(req.accountability.roles, query); + + res.locals['payload'] = { data: roles || null }; + } catch (error: any) { + if (isDirectusError(error, ErrorCode.Forbidden)) { + res.locals['payload'] = { data: req.accountability.roles.map((id) => ({ id })) }; + return next(); + } + + throw error; + } + + return next(); + }), + respond, +); + router.get( '/:pk', asyncHandler(async (req, res, next) => { diff --git a/api/src/controllers/tus.ts b/api/src/controllers/tus.ts index 22ac71f22c..613ecc1613 100644 --- a/api/src/controllers/tus.ts +++ b/api/src/controllers/tus.ts @@ -1,12 +1,12 @@ +import type { PermissionsAction } from '@directus/types'; import { Router } from 'express'; +import { RESUMABLE_UPLOADS } from '../constants.js'; +import getDatabase from '../database/index.js'; +import { validateAccess } from '../permissions/modules/validate-access/validate-access.js'; +import { createTusServer } from '../services/tus/index.js'; +import asyncHandler from '../utils/async-handler.js'; import { getSchema } from '../utils/get-schema.js'; import { scheduleSynchronizedJob, validateCron } from '../utils/schedule.js'; -import { createTusServer } from '../services/tus/index.js'; -import { AuthorizationService } from '../services/authorization.js'; -import asyncHandler from '../utils/async-handler.js'; -import type { PermissionsAction } from '@directus/types'; -import { ForbiddenError } from '@directus/errors'; -import { RESUMABLE_UPLOADS } from '../constants.js'; const mapAction = (method: string): PermissionsAction => { switch (method) { @@ -22,30 +22,20 @@ const mapAction = (method: string): PermissionsAction => { }; const checkFileAccess = asyncHandler(async (req, _res, next) => { - const auth = new AuthorizationService({ - accountability: req.accountability, - schema: req.schema, - }); - - if (!req.accountability?.admin) { + if (req.accountability) { const action = mapAction(req.method); - if (action === 'create') { - // checkAccess doesn't seem to work as expected for "create" actions - const hasPermission = Boolean( - req.accountability?.permissions?.find((permission) => { - return permission.collection === 'directus_files' && permission.action === action; - }), - ); - - if (!hasPermission) throw new ForbiddenError(); - } else { - try { - await auth.checkAccess(action, 'directus_files'); - } catch (e) { - throw new ForbiddenError(); - } - } + await validateAccess( + { + action, + collection: 'directus_files', + accountability: req.accountability, + }, + { + schema: req.schema, + knex: getDatabase(), + }, + ); } return next(); diff --git a/api/src/controllers/users.ts b/api/src/controllers/users.ts index 99ed401604..49a20a54a9 100644 --- a/api/src/controllers/users.ts +++ b/api/src/controllers/users.ts @@ -5,7 +5,7 @@ import { InvalidPayloadError, isDirectusError, } from '@directus/errors'; -import type { PrimaryKey, RegisterUserInput, Role } from '@directus/types'; +import type { PrimaryKey, RegisterUserInput } from '@directus/types'; import express from 'express'; import Joi from 'joi'; import checkRateLimit from '../middleware/rate-limiter-registration.js'; @@ -14,7 +14,6 @@ import useCollection from '../middleware/use-collection.js'; import { validateBatch } from '../middleware/validate-batch.js'; import { AuthenticationService } from '../services/authentication.js'; import { MetaService } from '../services/meta.js'; -import { RolesService } from '../services/roles.js'; import { TFAService } from '../services/tfa.js'; import { UsersService } from '../services/users.js'; import asyncHandler from '../utils/async-handler.js'; @@ -376,38 +375,6 @@ router.post( throw new InvalidPayloadError({ reason: `"otp" is required` }); } - // Override permissions only when enforce TFA is enabled in role - if (req.accountability.role) { - const rolesService = new RolesService({ - schema: req.schema, - }); - - const role = (await rolesService.readOne(req.accountability.role)) as Role; - - if (role && role.enforce_tfa) { - const existingPermission = await req.accountability.permissions?.find( - (p) => p.collection === 'directus_users' && p.action === 'update', - ); - - if (existingPermission) { - existingPermission.fields = ['tfa_secret']; - existingPermission.permissions = { id: { _eq: req.accountability.user } }; - existingPermission.presets = null; - existingPermission.validation = null; - } else { - (req.accountability.permissions || (req.accountability.permissions = [])).push({ - action: 'update', - collection: 'directus_users', - fields: ['tfa_secret'], - permissions: { id: { _eq: req.accountability.user } }, - presets: null, - role: req.accountability.role, - validation: null, - }); - } - } - } - const service = new TFAService({ accountability: req.accountability, schema: req.schema, @@ -431,38 +398,6 @@ router.post( throw new InvalidPayloadError({ reason: `"otp" is required` }); } - // Override permissions only when enforce TFA is enabled in role - if (req.accountability.role) { - const rolesService = new RolesService({ - schema: req.schema, - }); - - const role = (await rolesService.readOne(req.accountability.role)) as Role; - - if (role && role.enforce_tfa) { - const existingPermission = await req.accountability.permissions?.find( - (p) => p.collection === 'directus_users' && p.action === 'update', - ); - - if (existingPermission) { - existingPermission.fields = ['tfa_secret']; - existingPermission.permissions = { id: { _eq: req.accountability.user } }; - existingPermission.presets = null; - existingPermission.validation = null; - } else { - (req.accountability.permissions || (req.accountability.permissions = [])).push({ - action: 'update', - collection: 'directus_users', - fields: ['tfa_secret'], - permissions: { id: { _eq: req.accountability.user } }, - presets: null, - role: req.accountability.role, - validation: null, - }); - } - } - } - const service = new TFAService({ accountability: req.accountability, schema: req.schema, diff --git a/api/src/database/get-ast-from-query/get-ast-from-query.ts b/api/src/database/get-ast-from-query/get-ast-from-query.ts new file mode 100644 index 0000000000..7ff89f7b7b --- /dev/null +++ b/api/src/database/get-ast-from-query/get-ast-from-query.ts @@ -0,0 +1,121 @@ +/** + * Generate an AST based on a given collection and query + */ + +import type { Accountability, Query, SchemaOverview } from '@directus/types'; +import type { Knex } from 'knex'; +import { cloneDeep, uniq } from 'lodash-es'; +import { fetchAllowedFields } from '../../permissions/modules/fetch-allowed-fields/fetch-allowed-fields.js'; +import type { AST } from '../../types/index.js'; +import { parseFields } from './lib/parse-fields.js'; + +export interface GetAstFromQueryOptions { + collection: string; + query: Query; + accountability: Accountability | null; +} + +export interface GetAstFromQueryContext { + knex: Knex; + schema: SchemaOverview; +} + +export async function getAstFromQuery(options: GetAstFromQueryOptions, context: GetAstFromQueryContext): Promise { + options.query = cloneDeep(options.query); + + const ast: AST = { + type: 'root', + name: options.collection, + query: options.query, + children: [], + cases: [], + }; + + let fields = ['*']; + + if (options.query.fields) { + fields = options.query.fields; + } + + /** + * When using aggregate functions, you can't have any other regular fields + * selected. This makes sure you never end up in a non-aggregate fields selection error + */ + if (Object.keys(options.query.aggregate || {}).length > 0) { + fields = []; + } + + /** + * Similarly, when grouping on a specific field, you can't have other non-aggregated fields. + * The group query will override the fields query + */ + if (options.query.group) { + fields = options.query.group; + } + + fields = uniq(fields); + + const deep = options.query.deep || {}; + + // Prevent fields/deep from showing up in the query object in further use + delete options.query.fields; + delete options.query.deep; + + if (!options.query.sort) { + // We'll default to the primary key for the standard sort output + let sortField: string | null = context.schema.collections[options.collection]!.primary; + + // If a custom manual sort field is configured, use that + if (context.schema.collections[options.collection]?.sortField) { + sortField = context.schema.collections[options.collection]!.sortField as string; + } + + if (options.accountability && options.accountability.admin === false) { + // Verify that the user has access to the sort field + + const allowedFields = await fetchAllowedFields( + { + collection: options.collection, + action: 'read', + accountability: options.accountability, + }, + context, + ); + + if (allowedFields.length === 0) { + sortField = null; + } else if (allowedFields.includes('*') === false && allowedFields.includes(sortField) === false) { + // If the sort field is not allowed, default to the first allowed field + sortField = allowedFields[0]!; + } + } + + // When group by is used, default to the first column provided in the group by clause + if (options.query.group?.[0]) { + sortField = options.query.group[0]; + } + + if (sortField) { + options.query.sort = [sortField]; + } + } + + // When no group by is supplied, but an aggregate function is used, only a single row will be + // returned. In those cases, we'll ignore the sort field altogether + if (options.query.aggregate && Object.keys(options.query.aggregate).length && !options.query.group?.[0]) { + delete options.query.sort; + } + + ast.children = await parseFields( + { + parentCollection: options.collection, + fields, + query: options.query, + deep, + accountability: options.accountability, + }, + context, + ); + + return ast; +} diff --git a/api/src/database/get-ast-from-query/lib/convert-wildcards.ts b/api/src/database/get-ast-from-query/lib/convert-wildcards.ts new file mode 100644 index 0000000000..3574c03ef8 --- /dev/null +++ b/api/src/database/get-ast-from-query/lib/convert-wildcards.ts @@ -0,0 +1,110 @@ +import type { Accountability, Query, SchemaOverview } from '@directus/types'; +import type { Knex } from 'knex'; +import { cloneDeep } from 'lodash-es'; +import { fetchAllowedFields } from '../../../permissions/modules/fetch-allowed-fields/fetch-allowed-fields.js'; +import { getRelation } from '../utils/get-relation.js'; + +export interface ConvertWildcardsOptions { + parentCollection: string; + fields: string[]; + query: Query; + accountability: Accountability | null; +} + +export interface ConvertWildCardsContext { + schema: SchemaOverview; + knex: Knex; +} + +export async function convertWildcards(options: ConvertWildcardsOptions, context: ConvertWildCardsContext) { + const fields = cloneDeep(options.fields); + + const fieldsInCollection = Object.entries(context.schema.collections[options.parentCollection]!.fields).map( + ([name]) => name, + ); + + let allowedFields: string[] | null = fieldsInCollection; + + if (options.accountability && options.accountability.admin === false) { + allowedFields = await fetchAllowedFields( + { + collection: options.parentCollection, + action: 'read', + accountability: options.accountability, + }, + context, + ); + } + + if (!allowedFields || allowedFields.length === 0) return []; + + // In case of full read permissions + if (allowedFields[0] === '*') allowedFields = fieldsInCollection; + + for (let index = 0; index < fields.length; index++) { + const fieldKey = fields[index]!; + + if (fieldKey.includes('*') === false) continue; + + if (fieldKey === '*') { + const aliases = Object.keys(options.query.alias ?? {}); + + // Set to all fields in collection + if (allowedFields.includes('*')) { + fields.splice(index, 1, ...fieldsInCollection, ...aliases); + } else { + // Set to all allowed fields + const allowedAliases = aliases.filter((fieldKey) => { + const name = options.query.alias![fieldKey]!; + return allowedFields!.includes(name); + }); + + fields.splice(index, 1, ...allowedFields, ...allowedAliases); + } + } + + // Swap *.* case for *,.*,.* + if (fieldKey.includes('.') && fieldKey.split('.')[0] === '*') { + const parts = fieldKey.split('.'); + + const relationalFields = allowedFields.includes('*') + ? context.schema.relations + .filter( + (relation) => + relation.collection === options.parentCollection || + relation.related_collection === options.parentCollection, + ) + .map((relation) => { + const isMany = relation.collection === options.parentCollection; + return isMany ? relation.field : relation.meta?.one_field; + }) + : allowedFields.filter((fieldKey) => !!getRelation(context.schema, options.parentCollection, fieldKey)); + + const nonRelationalFields = allowedFields.filter((fieldKey) => relationalFields.includes(fieldKey) === false); + + const aliasFields = Object.keys(options.query.alias ?? {}).map((fieldKey) => { + const name = options.query.alias![fieldKey]; + + if (relationalFields.includes(name)) { + return `${fieldKey}.${parts.slice(1).join('.')}`; + } + + return fieldKey; + }); + + fields.splice( + index, + 1, + ...[ + ...relationalFields.map((relationalField) => { + return `${relationalField}.${parts.slice(1).join('.')}`; + }), + ...nonRelationalFields, + ...aliasFields, + ], + ); + } + } + + return fields; +} diff --git a/api/src/database/get-ast-from-query/lib/parse-fields.ts b/api/src/database/get-ast-from-query/lib/parse-fields.ts new file mode 100644 index 0000000000..4d8be36e09 --- /dev/null +++ b/api/src/database/get-ast-from-query/lib/parse-fields.ts @@ -0,0 +1,277 @@ +import { REGEX_BETWEEN_PARENS } from '@directus/constants'; +import type { Accountability, Query, SchemaOverview } from '@directus/types'; +import type { Knex } from 'knex'; +import { isEmpty } from 'lodash-es'; +import { fetchPermissions } from '../../../permissions/lib/fetch-permissions.js'; +import { fetchPolicies } from '../../../permissions/lib/fetch-policies.js'; +import type { FieldNode, FunctionFieldNode, NestedCollectionNode } from '../../../types/index.js'; +import { getRelationType } from '../../../utils/get-relation-type.js'; +import { getDeepQuery } from '../utils/get-deep-query.js'; +import { getRelatedCollection } from '../utils/get-related-collection.js'; +import { getRelation } from '../utils/get-relation.js'; +import { convertWildcards } from './convert-wildcards.js'; + +interface CollectionScope { + [collectionScope: string]: string[]; +} + +export interface ParseFieldsOptions { + accountability: Accountability | null; + parentCollection: string; + fields: string[] | null; + query: Query; + deep?: Record; +} + +export interface ParseFieldsContext { + schema: SchemaOverview; + knex: Knex; +} + +export async function parseFields( + options: ParseFieldsOptions, + context: ParseFieldsContext, +): Promise<[] | (NestedCollectionNode | FieldNode | FunctionFieldNode)[]> { + let { fields } = options; + if (!fields) return []; + + fields = await convertWildcards( + { + fields, + parentCollection: options.parentCollection, + query: options.query, + accountability: options.accountability, + }, + context, + ); + + if (!fields || !Array.isArray(fields)) return []; + + const children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[] = []; + + const policies = + options.accountability && options.accountability.admin === false + ? await fetchPolicies(options.accountability, context) + : null; + + const relationalStructure: Record = Object.create(null); + + for (const fieldKey of fields) { + let name = fieldKey; + + if (options.query.alias) { + // check for field alias (is one of the key) + if (name in options.query.alias) { + name = options.query.alias[fieldKey]!; + } + } + + const isRelational = + name.includes('.') || + // We'll always treat top level o2m fields as a related item. This is an alias field, otherwise it won't return + // anything + !!context.schema.relations.find( + (relation) => relation.related_collection === options.parentCollection && relation.meta?.one_field === name, + ); + + if (isRelational) { + // field is relational + const parts = fieldKey.split('.'); + + let rootField = parts[0]!; + let collectionScope: string | null = null; + + // a2o related collection scoped field selector `fields=sections.section_id:headings.title` + if (rootField.includes(':')) { + const [key, scope] = rootField.split(':'); + rootField = key!; + collectionScope = scope!; + } + + if (rootField in relationalStructure === false) { + if (collectionScope) { + relationalStructure[rootField] = { [collectionScope]: [] }; + } else { + relationalStructure[rootField] = []; + } + } + + if (parts.length > 1) { + const childKey = parts.slice(1).join('.'); + + if (collectionScope) { + if (collectionScope in relationalStructure[rootField]! === false) { + (relationalStructure[rootField] as CollectionScope)[collectionScope] = []; + } + + (relationalStructure[rootField] as CollectionScope)[collectionScope]!.push(childKey); + } else { + (relationalStructure[rootField] as string[]).push(childKey); + } + } + } else { + if (name.includes('(') && name.includes(')')) { + const columnName = name.match(REGEX_BETWEEN_PARENS)![1]!; + const foundField = context.schema.collections[options.parentCollection]!.fields[columnName]; + + if (foundField && foundField.type === 'alias') { + const foundRelation = context.schema.relations.find( + (relation) => + relation.related_collection === options.parentCollection && relation.meta?.one_field === columnName, + ); + + if (foundRelation) { + children.push({ + type: 'functionField', + name, + fieldKey, + query: {}, + relatedCollection: foundRelation.collection, + whenCase: [], + cases: [], + }); + + continue; + } + } + } + + if (name.includes(':')) { + const [key, scope] = name.split(':') as [string, string]; + + if (key in relationalStructure === false) { + relationalStructure[key] = { [scope]: [] }; + } else if (scope in (relationalStructure[key] as CollectionScope) === false) { + (relationalStructure[key] as CollectionScope)[scope] = []; + } + + continue; + } + + children.push({ type: 'field', name, fieldKey, whenCase: [] }); + } + } + + for (const [fieldKey, nestedFields] of Object.entries(relationalStructure)) { + let fieldName = fieldKey; + + if (options.query.alias && fieldKey in options.query.alias) { + fieldName = options.query.alias[fieldKey]!; + } + + const relatedCollection = getRelatedCollection(context.schema, options.parentCollection, fieldName); + const relation = getRelation(context.schema, options.parentCollection, fieldName); + + if (!relation) continue; + + const relationType = getRelationType({ + relation, + collection: options.parentCollection, + field: fieldName, + }); + + if (!relationType) continue; + + let child: NestedCollectionNode | null = null; + + if (relationType === 'a2o') { + const allowedCollections = relation.meta!.one_allowed_collections!; + + child = { + type: 'a2o', + names: allowedCollections, + children: {}, + query: {}, + relatedKey: {}, + parentKey: context.schema.collections[options.parentCollection]!.primary, + fieldKey: fieldKey, + relation: relation, + cases: {}, + whenCase: [], + }; + + for (const relatedCollection of allowedCollections) { + child.children[relatedCollection] = await parseFields( + { + parentCollection: relatedCollection, + fields: Array.isArray(nestedFields) + ? nestedFields + : (nestedFields as CollectionScope)[relatedCollection] || [], + query: options.query, + deep: options.deep?.[`${fieldKey}:${relatedCollection}`], + accountability: options.accountability, + }, + context, + ); + + child.query[relatedCollection] = getDeepQuery(options.deep?.[`${fieldKey}:${relatedCollection}`] || {}); + + child.relatedKey[relatedCollection] = context.schema.collections[relatedCollection]!.primary; + } + } else if (relatedCollection) { + if (options.accountability && options.accountability.admin === false && policies) { + const permissions = await fetchPermissions( + { + action: 'read', + collections: [relatedCollection], + policies: policies, + accountability: options.accountability, + }, + context, + ); + + // Skip related collection if no permissions + if (permissions.length === 0) { + continue; + } + } + + // update query alias for children parseFields + const deepAlias = getDeepQuery(options.deep?.[fieldKey] || {})?.['alias']; + if (!isEmpty(deepAlias)) options.query.alias = deepAlias; + + child = { + type: relationType, + name: relatedCollection, + fieldKey: fieldKey, + parentKey: context.schema.collections[options.parentCollection]!.primary, + relatedKey: context.schema.collections[relatedCollection]!.primary, + relation: relation, + query: getDeepQuery(options.deep?.[fieldKey] || {}), + children: await parseFields( + { + parentCollection: relatedCollection, + fields: nestedFields as string[], + query: options.query, + deep: options.deep?.[fieldKey] || {}, + accountability: options.accountability, + }, + context, + ), + cases: [], + whenCase: [], + }; + + if (relationType === 'o2m' && !child!.query.sort) { + child!.query.sort = [relation.meta?.sort_field || context.schema.collections[relation.collection]!.primary]; + } + } + + if (child) { + children.push(child); + } + } + + // Deduplicate any children fields that are included both as a regular field, and as a nested m2o field + const nestedCollectionNodes = children.filter((childNode) => childNode.type !== 'field'); + + return children.filter((childNode) => { + const existsAsNestedRelational = !!nestedCollectionNodes.find( + (nestedCollectionNode) => childNode.fieldKey === nestedCollectionNode.fieldKey, + ); + + if (childNode.type === 'field' && existsAsNestedRelational) return false; + + return true; + }); +} diff --git a/api/src/database/get-ast-from-query/utils/get-deep-query.ts b/api/src/database/get-ast-from-query/utils/get-deep-query.ts new file mode 100644 index 0000000000..9049ebd6e5 --- /dev/null +++ b/api/src/database/get-ast-from-query/utils/get-deep-query.ts @@ -0,0 +1,21 @@ +import { mapKeys, omitBy } from 'lodash-es'; + +/** + * Convert Deep query object to regular query object by ignoring all nested fields and returning the + * `_` prefixed fields as top level query fields + * + * @example + * + * ```js + * getDeepQuery({ + * _sort: ['a'] + * }); + * // => { sort: ['a'] } + * ``` + */ +export function getDeepQuery(query: Record): Record { + return mapKeys( + omitBy(query, (_value, key) => key.startsWith('_') === false), + (_value, key) => key.substring(1), + ); +} diff --git a/api/src/database/get-ast-from-query/utils/get-related-collection.ts b/api/src/database/get-ast-from-query/utils/get-related-collection.ts new file mode 100644 index 0000000000..ddd5ec466a --- /dev/null +++ b/api/src/database/get-ast-from-query/utils/get-related-collection.ts @@ -0,0 +1,18 @@ +import type { SchemaOverview } from '@directus/types'; +import { getRelation } from './get-relation.js'; + +export function getRelatedCollection(schema: SchemaOverview, collection: string, field: string): string | null { + const relation = getRelation(schema, collection, field); + + if (!relation) return null; + + if (relation.collection === collection && relation.field === field) { + return relation.related_collection || null; + } + + if (relation.related_collection === collection && relation.meta?.one_field === field) { + return relation.collection || null; + } + + return null; +} diff --git a/api/src/database/get-ast-from-query/utils/get-relation.ts b/api/src/database/get-ast-from-query/utils/get-relation.ts new file mode 100644 index 0000000000..9ad28f0d77 --- /dev/null +++ b/api/src/database/get-ast-from-query/utils/get-relation.ts @@ -0,0 +1,12 @@ +import type { SchemaOverview } from '@directus/types'; + +export function getRelation(schema: SchemaOverview, collection: string, field: string) { + const relation = schema.relations.find((relation) => { + return ( + (relation.collection === collection && relation.field === field) || + (relation.related_collection === collection && relation.meta?.one_field === field) + ); + }); + + return relation; +} diff --git a/api/src/database/helpers/fn/types.ts b/api/src/database/helpers/fn/types.ts index c8741e9f27..e8da279ba1 100644 --- a/api/src/database/helpers/fn/types.ts +++ b/api/src/database/helpers/fn/types.ts @@ -1,4 +1,4 @@ -import type { Query, SchemaOverview } from '@directus/types'; +import type { Filter, Query, SchemaOverview } from '@directus/types'; import type { Knex } from 'knex'; import { applyFilter, generateAlias } from '../../../utils/apply-query.js'; import type { AliasMap } from '../../../utils/get-column-path.js'; @@ -7,6 +7,7 @@ import { DatabaseHelper } from '../types.js'; export type FnHelperOptions = { type: string | undefined; query: Query | undefined; + cases: Filter[] | undefined; originalCollectionName: string | undefined; }; @@ -66,6 +67,7 @@ export abstract class FnHelper extends DatabaseHelper { options.query.filter, relation.collection, aliasMap, + options.cases ?? [], ).query; } diff --git a/api/src/database/helpers/geometry/dialects/mssql.ts b/api/src/database/helpers/geometry/dialects/mssql.ts index f6e504dfb3..27129c6180 100644 --- a/api/src/database/helpers/geometry/dialects/mssql.ts +++ b/api/src/database/helpers/geometry/dialects/mssql.ts @@ -20,8 +20,9 @@ export class GeometryHelperMSSQL extends GeometryHelper { return table.specificType(field.field, 'geometry'); } - override asText(table: string, column: string): Knex.Raw { - return this.knex.raw('??.??.STAsText() as ??', [table, column, column]); + override asText(table: string, column: string, alias: string | false): Knex.Raw { + if (alias) return this.knex.raw('??.??.STAsText() as ??', [table, column, alias]); + return this.knex.raw('??.??.STAsText()', [table, column]); } override fromText(text: string): Knex.Raw { diff --git a/api/src/database/helpers/geometry/dialects/mysql.ts b/api/src/database/helpers/geometry/dialects/mysql.ts index 4ff1040baa..a2dd70d3d6 100644 --- a/api/src/database/helpers/geometry/dialects/mysql.ts +++ b/api/src/database/helpers/geometry/dialects/mysql.ts @@ -5,7 +5,7 @@ export class GeometryHelperMySQL extends GeometryHelper { override collect(table: string, column: string): Knex.Raw { return this.knex.raw( `concat('geometrycollection(', group_concat(? separator ', '), ')'`, - this.asText(table, column), + this.asText(table, column, column), ); } diff --git a/api/src/database/helpers/geometry/dialects/oracle.ts b/api/src/database/helpers/geometry/dialects/oracle.ts index c33daf07fa..294e3c18fc 100644 --- a/api/src/database/helpers/geometry/dialects/oracle.ts +++ b/api/src/database/helpers/geometry/dialects/oracle.ts @@ -20,8 +20,9 @@ export class GeometryHelperOracle extends GeometryHelper { return table.specificType(field.field, 'sdo_geometry'); } - override asText(table: string, column: string): Knex.Raw { - return this.knex.raw('sdo_util.to_wktgeometry(??.??) as ??', [table, column, column]); + override asText(table: string, column: string, alias: string | false): Knex.Raw { + if (alias) return this.knex.raw('sdo_util.to_wktgeometry(??.??) as ??', [table, column, alias]); + return this.knex.raw('sdo_util.to_wktgeometry(??.??)', [table, column]); } asGeoJSON(table: string, column: string): Knex.Raw { @@ -43,6 +44,6 @@ export class GeometryHelperOracle extends GeometryHelper { } override collect(table: string, column: string): Knex.Raw { - return this.knex.raw(`concat('geometrycollection(', listagg(?, ', '), ')'`, this.asText(table, column)); + return this.knex.raw(`concat('geometrycollection(', listagg(?, ', '), ')'`, this.asText(table, column, column)); } } diff --git a/api/src/database/helpers/geometry/types.ts b/api/src/database/helpers/geometry/types.ts index 9fe081fa46..b82830b1c9 100644 --- a/api/src/database/helpers/geometry/types.ts +++ b/api/src/database/helpers/geometry/types.ts @@ -22,8 +22,9 @@ export abstract class GeometryHelper extends DatabaseHelper { return table.specificType(field.field, type); } - asText(table: string, column: string): Knex.Raw { - return this.knex.raw('st_astext(??.??) as ??', [table, column, column]); + asText(table: string, column: string, alias: string | false): Knex.Raw { + if (alias) return this.knex.raw('st_astext(??.??) as ??', [table, column, alias]); + return this.knex.raw('st_astext(??.??)', [table, column]); } fromText(text: string): Knex.Raw { diff --git a/api/src/database/helpers/schema/dialects/cockroachdb.ts b/api/src/database/helpers/schema/dialects/cockroachdb.ts index e6c0a45248..91ed8a0bfb 100644 --- a/api/src/database/helpers/schema/dialects/cockroachdb.ts +++ b/api/src/database/helpers/schema/dialects/cockroachdb.ts @@ -1,7 +1,8 @@ import type { KNEX_TYPES } from '@directus/constants'; -import type { Options } from '../types.js'; +import type { Options, Sql } from '../types.js'; import { SchemaHelper } from '../types.js'; import { useEnv } from '@directus/env'; +import { preprocessBindings } from '../utils/preprocess-bindings.js'; const env = useEnv(); @@ -38,4 +39,8 @@ export class SchemaHelperCockroachDb extends SchemaHelper { return null; } } + + override preprocessBindings(queryParams: Sql): Sql { + return preprocessBindings(queryParams, { format: (index) => `$${index + 1}` }); + } } diff --git a/api/src/database/helpers/schema/dialects/mssql.ts b/api/src/database/helpers/schema/dialects/mssql.ts index 1b716283a7..c6ed9e8433 100644 --- a/api/src/database/helpers/schema/dialects/mssql.ts +++ b/api/src/database/helpers/schema/dialects/mssql.ts @@ -1,5 +1,6 @@ import type { Knex } from 'knex'; -import { SchemaHelper } from '../types.js'; +import { SchemaHelper, type Sql } from '../types.js'; +import { preprocessBindings } from '../utils/preprocess-bindings.js'; export class SchemaHelperMSSQL extends SchemaHelper { override applyLimit(rootQuery: Knex.QueryBuilder, limit: number): void { @@ -30,4 +31,8 @@ export class SchemaHelperMSSQL extends SchemaHelper { return null; } } + + override preprocessBindings(queryParams: Sql): Sql { + return preprocessBindings(queryParams, { format: (index) => `@p${index}` }); + } } diff --git a/api/src/database/helpers/schema/dialects/oracle.ts b/api/src/database/helpers/schema/dialects/oracle.ts index b473d4b079..52ee76162c 100644 --- a/api/src/database/helpers/schema/dialects/oracle.ts +++ b/api/src/database/helpers/schema/dialects/oracle.ts @@ -1,7 +1,8 @@ import type { KNEX_TYPES } from '@directus/constants'; import type { Field, Relation, Type } from '@directus/types'; -import type { Options } from '../types.js'; +import type { Options, Sql } from '../types.js'; import { SchemaHelper } from '../types.js'; +import { preprocessBindings } from '../utils/preprocess-bindings.js'; export class SchemaHelperOracle extends SchemaHelper { override async changeToType( @@ -50,4 +51,8 @@ export class SchemaHelperOracle extends SchemaHelper { return null; } } + + override preprocessBindings(queryParams: Sql): Sql { + return preprocessBindings(queryParams, { format: (index) => `:${index + 1}` }); + } } diff --git a/api/src/database/helpers/schema/dialects/postgres.ts b/api/src/database/helpers/schema/dialects/postgres.ts index 9f8262274a..993bcd8f5f 100644 --- a/api/src/database/helpers/schema/dialects/postgres.ts +++ b/api/src/database/helpers/schema/dialects/postgres.ts @@ -1,5 +1,6 @@ import { useEnv } from '@directus/env'; -import { SchemaHelper } from '../types.js'; +import { SchemaHelper, type Sql } from '../types.js'; +import { preprocessBindings } from '../utils/preprocess-bindings.js'; const env = useEnv(); @@ -13,4 +14,8 @@ export class SchemaHelperPostgres extends SchemaHelper { return null; } } + + override preprocessBindings(queryParams: Sql): Sql { + return preprocessBindings(queryParams, { format: (index) => `$${index + 1}` }); + } } diff --git a/api/src/database/helpers/schema/types.ts b/api/src/database/helpers/schema/types.ts index 9da933b758..1d9136ab13 100644 --- a/api/src/database/helpers/schema/types.ts +++ b/api/src/database/helpers/schema/types.ts @@ -7,6 +7,11 @@ import { DatabaseHelper } from '../types.js'; export type Options = { nullable?: boolean; default?: any; length?: number }; +export type Sql = { + sql: string; + bindings: readonly Knex.Value[]; +}; + export abstract class SchemaHelper extends DatabaseHelper { isOneOfClients(clients: DatabaseClient[]): boolean { return clients.includes(getDatabaseClient(this.knex)); @@ -146,4 +151,8 @@ export abstract class SchemaHelper extends DatabaseHelper { async getDatabaseSize(): Promise { return null; } + + preprocessBindings(queryParams: Sql): Sql { + return queryParams; + } } diff --git a/api/src/database/helpers/schema/utils/preprocess-bindings.test.ts b/api/src/database/helpers/schema/utils/preprocess-bindings.test.ts new file mode 100644 index 0000000000..7841b35fd6 --- /dev/null +++ b/api/src/database/helpers/schema/utils/preprocess-bindings.test.ts @@ -0,0 +1,37 @@ +import { test, expect } from 'vitest'; +import { preprocessBindings } from './preprocess-bindings.js'; + +const format = (index: number) => `$${index + 1}`; + +test('Returns an escaped question mark, so it stays escaped', () => { + expect(preprocessBindings(`SELECT * FROM table WHERE column = "\\?"`, { format }).sql).toEqual( + 'SELECT * FROM table WHERE column = "\\?"', + ); + + expect(preprocessBindings(`SELECT * FROM table WHERE column = "\\\\\\?"`, { format }).sql).toEqual( + 'SELECT * FROM table WHERE column = "\\\\\\?"', + ); +}); + +test('Replaces question marks with $1, $2, etc.', () => { + const bindings = preprocessBindings( + { sql: `SELECT * FROM table WHERE column = ? LIMIT ?`, bindings: [1, 100] }, + { format }, + ); + + expect(bindings.sql).toEqual('SELECT * FROM table WHERE column = $1 LIMIT $2'); + expect(bindings.bindings).toEqual([1, 100]); +}); + +test('Replaces question marks with $1, $2, etc. and skips duplicates', () => { + const bindings = preprocessBindings( + { + sql: `SELECT * FROM table WHERE column = ? AND other = ? LIMIT ?`, + bindings: [10, 'foo', 10], + }, + { format }, + ); + + expect(bindings.sql).toEqual('SELECT * FROM table WHERE column = $1 AND other = $2 LIMIT $1'); + expect(bindings.bindings).toEqual([10, 'foo']); +}); diff --git a/api/src/database/helpers/schema/utils/preprocess-bindings.ts b/api/src/database/helpers/schema/utils/preprocess-bindings.ts new file mode 100644 index 0000000000..94563bf7f2 --- /dev/null +++ b/api/src/database/helpers/schema/utils/preprocess-bindings.ts @@ -0,0 +1,46 @@ +import { isString } from 'lodash-es'; +import type { Sql } from '../types.js'; + +export type PreprocessBindingsOptions = { + format(index: number): string; +}; + +export function preprocessBindings( + queryParams: (Partial & Pick) | string, + options: PreprocessBindingsOptions, +) { + const query: Sql = { bindings: [], ...(isString(queryParams) ? { sql: queryParams } : queryParams) }; + + const bindingIndices: number[] = new Array(query.bindings.length); + + for (let i = 0; i < query.bindings.length; i++) { + const binding = query.bindings[i]; + const prevIndex = query.bindings.findIndex((b, j) => j < i && b === binding); + + if (prevIndex !== -1) { + bindingIndices[i] = prevIndex; + } else { + bindingIndices[i] = i; + } + } + + let matchIndex = 0; + let currentBindingIndex = 0; + + const sql = query.sql.replace(/(\\*)(\?)/g, function (_, escapes) { + if (escapes.length % 2) { + // Return an escaped question mark, so it stays escaped + return `${'\\'.repeat(escapes.length)}?`; + } else { + const bindingIndex = + bindingIndices[matchIndex] === matchIndex ? currentBindingIndex++ : bindingIndices[matchIndex]!; + + matchIndex++; + return options.format(bindingIndex); + } + }); + + const bindings = query.bindings.filter((_, i) => bindingIndices[i] === i); + + return { ...query, sql, bindings }; +} diff --git a/api/src/database/index.ts b/api/src/database/index.ts index 2799c68e03..3ce8039458 100644 --- a/api/src/database/index.ts +++ b/api/src/database/index.ts @@ -1,6 +1,7 @@ import { useEnv } from '@directus/env'; import type { SchemaInspector } from '@directus/schema'; import { createInspector } from '@directus/schema'; +import { isObject } from '@directus/utils'; import fse from 'fs-extra'; import type { Knex } from 'knex'; import knex from 'knex'; @@ -143,6 +144,11 @@ export function getDatabase(): Knex { } if (client === 'mysql') { + // Remove the conflicting `filename` option, defined by default in the Docker Image + if (isObject(knexConfig.connection)) delete knexConfig.connection['filename']; + + Object.assign(knexConfig, { client: 'mysql2' }); + poolConfig.afterCreate = async (conn: any, callback: any) => { logger.trace('Retrieving database version'); const run = promisify(conn.query.bind(conn)); @@ -243,7 +249,7 @@ export function getDatabaseClient(database?: Knex): DatabaseClient { database = database ?? getDatabase(); switch (database.client.constructor.name) { - case 'Client_MySQL': + case 'Client_MySQL2': return 'mysql'; case 'Client_PG': return 'postgres'; diff --git a/api/src/database/migrations/20240806A-permissions-policies.ts b/api/src/database/migrations/20240806A-permissions-policies.ts new file mode 100644 index 0000000000..4bd2d1ce5a --- /dev/null +++ b/api/src/database/migrations/20240806A-permissions-policies.ts @@ -0,0 +1,430 @@ +import { processChunk, toBoolean } from '@directus/utils'; +import type { Knex } from 'knex'; +import { flatten, intersection, isEqual, merge, omit, uniq } from 'lodash-es'; +import { randomUUID } from 'node:crypto'; +import { fetchPermissions } from '../../permissions/lib/fetch-permissions.js'; +import { fetchPolicies } from '../../permissions/lib/fetch-policies.js'; +import { fetchRolesTree } from '../../permissions/lib/fetch-roles-tree.js'; +import { getSchema } from '../../utils/get-schema.js'; + +import type { LogicalFilterAND, LogicalFilterOR, Permission } from '@directus/types'; + +type RoleAccess = { + app_access: boolean; + admin_access: boolean; + ip_access: string | null; + enforce_tfa: boolean; +}; + +// Adapted from https://github.com/directus/directus/blob/141b8adbf4dd8e06530a7929f34e3fc68a522053/api/src/utils/merge-permissions.ts#L4 +export function mergePermissions(strategy: 'and' | 'or', ...permissions: Permission[][]) { + const allPermissions = flatten(permissions); + + const mergedPermissions = allPermissions + .reduce((acc, val) => { + const key = `${val.collection}__${val.action}`; + const current = acc.get(key); + acc.set(key, current ? mergePermission(strategy, current, val) : val); + return acc; + }, new Map()) + .values(); + + return Array.from(mergedPermissions); +} + +export function mergePermission( + strategy: 'and' | 'or', + currentPerm: Permission, + newPerm: Permission, +): Omit { + const logicalKey = `_${strategy}` as keyof LogicalFilterOR | keyof LogicalFilterAND; + + let { permissions, validation, fields, presets } = currentPerm; + + if (newPerm.permissions) { + if (currentPerm.permissions && Object.keys(currentPerm.permissions)[0] === logicalKey) { + permissions = { + [logicalKey]: [ + ...(currentPerm.permissions as LogicalFilterOR & LogicalFilterAND)[logicalKey], + newPerm.permissions, + ], + } as LogicalFilterAND | LogicalFilterOR; + } else if (currentPerm.permissions) { + // Empty {} supersedes other permissions in _OR merge + if (strategy === 'or' && (isEqual(currentPerm.permissions, {}) || isEqual(newPerm.permissions, {}))) { + permissions = {}; + } else { + permissions = { + [logicalKey]: [currentPerm.permissions, newPerm.permissions], + } as LogicalFilterAND | LogicalFilterOR; + } + } else { + permissions = { + [logicalKey]: [newPerm.permissions], + } as LogicalFilterAND | LogicalFilterOR; + } + } + + if (newPerm.validation) { + if (currentPerm.validation && Object.keys(currentPerm.validation)[0] === logicalKey) { + validation = { + [logicalKey]: [ + ...(currentPerm.validation as LogicalFilterOR & LogicalFilterAND)[logicalKey], + newPerm.validation, + ], + } as LogicalFilterAND | LogicalFilterOR; + } else if (currentPerm.validation) { + // Empty {} supersedes other validations in _OR merge + if (strategy === 'or' && (isEqual(currentPerm.validation, {}) || isEqual(newPerm.validation, {}))) { + validation = {}; + } else { + validation = { + [logicalKey]: [currentPerm.validation, newPerm.validation], + } as LogicalFilterAND | LogicalFilterOR; + } + } else { + validation = { + [logicalKey]: [newPerm.validation], + } as LogicalFilterAND | LogicalFilterOR; + } + } + + if (newPerm.fields) { + if (Array.isArray(currentPerm.fields) && strategy === 'or') { + fields = uniq([...currentPerm.fields, ...newPerm.fields]); + } else if (Array.isArray(currentPerm.fields) && strategy === 'and') { + fields = intersection(currentPerm.fields, newPerm.fields); + } else { + fields = newPerm.fields; + } + + if (fields.includes('*')) fields = ['*']; + } + + if (newPerm.presets) { + presets = merge({}, presets, newPerm.presets); + } + + return omit( + { + ...currentPerm, + permissions, + validation, + fields, + presets, + }, + ['id', 'system'], + ); +} + +async function fetchRoleAccess(roles: string[], context: { knex: Knex }) { + const roleAccess: RoleAccess = { + admin_access: false, + app_access: false, + ip_access: null, + enforce_tfa: false, + }; + + const accessRows = await context + .knex('directus_access') + .select( + 'directus_policies.id', + 'directus_policies.admin_access', + 'directus_policies.app_access', + 'directus_policies.ip_access', + 'directus_policies.enforce_tfa', + ) + .where('role', 'in', roles) + .leftJoin('directus_policies', 'directus_policies.id', 'directus_access.policy'); + + const ipAccess = new Set(); + + for (const { admin_access, app_access, ip_access, enforce_tfa } of accessRows) { + roleAccess.admin_access ||= toBoolean(admin_access); + roleAccess.app_access ||= toBoolean(app_access); + roleAccess.enforce_tfa ||= toBoolean(enforce_tfa); + + if (ip_access && ip_access.length) { + ip_access.split(',').forEach((ip: string) => ipAccess.add(ip)); + } + } + + if (ipAccess.size > 0) { + roleAccess.ip_access = Array.from(ipAccess).join(','); + } + + return roleAccess; +} + +/** + * The public role used to be `null`, we gotta create a single new policy for the permissions + * previously attached to the public role (marked through `role = null`). + */ +const PUBLIC_POLICY_ID = 'abf8a154-5b1c-4a46-ac9c-7300570f4f17'; + +export async function up(knex: Knex) { + ///////////////////////////////////////////////////////////////////////////////////////////////// + // If the policies table already exists the migration has already run + if (await knex.schema.hasTable('directus_policies')) { + return; + } + + ///////////////////////////////////////////////////////////////////////////////////////////////// + // Create new policies table that mirrors previous Roles + + await knex.schema.createTable('directus_policies', (table) => { + table.uuid('id').primary(); + table.string('name', 100).notNullable(); + table.string('icon', 64).notNullable().defaultTo('badge'); + table.text('description'); + table.text('ip_access'); + table.boolean('enforce_tfa').defaultTo(false).notNullable(); + table.boolean('admin_access').defaultTo(false).notNullable(); + table.boolean('app_access').defaultTo(false).notNullable(); + }); + + ///////////////////////////////////////////////////////////////////////////////////////////////// + // Copy over all existing roles into new policies + + const roles = await knex + .select('id', 'name', 'icon', 'description', 'ip_access', 'enforce_tfa', 'admin_access', 'app_access') + .from('directus_roles'); + + if (roles.length > 0) { + await processChunk(roles, 100, async (chunk) => { + await knex('directus_policies').insert(chunk); + }); + } + + await knex + .insert({ + id: PUBLIC_POLICY_ID, + name: '$t:public_label', + icon: 'public', + description: '$t:public_description', + app_access: false, + }) + .into('directus_policies'); + + // Change the admin policy description to $t:admin_policy_description + await knex('directus_policies') + .update({ + description: '$t:admin_policy_description', + }) + .where('description', 'LIKE', '$t:admin_description'); + + ///////////////////////////////////////////////////////////////////////////////////////////////// + // Remove access control + add nesting to roles + + await knex.schema.alterTable('directus_roles', (table) => { + table.dropColumn('ip_access'); + table.dropColumn('enforce_tfa'); + table.dropColumn('admin_access'); + table.dropColumn('app_access'); + + table.uuid('parent').references('directus_roles.id'); + }); + + ///////////////////////////////////////////////////////////////////////////////////////////////// + // Link permissions to policies instead of roles + + await knex.schema.alterTable('directus_permissions', (table) => { + table.uuid('policy').references('directus_policies.id').onDelete('CASCADE'); + // Drop the foreign key constraint here in order to update `null` role to public policy ID + table.dropForeign('role'); + }); + + await knex('directus_permissions') + .update({ + role: PUBLIC_POLICY_ID, + }) + .whereNull('role'); + + await knex('directus_permissions').update({ + policy: knex.ref('role'), + }); + + await knex.schema.alterTable('directus_permissions', (table) => { + table.dropColumns('role'); + table.dropNullable('policy'); + }); + + ///////////////////////////////////////////////////////////////////////////////////////////////// + // Setup junction table between roles/users and policies + + // This could be a A2O style setup with a collection/item field rather than individual foreign + // keys, but we want to be able to show the reverse-relationship on the individual policies as + // well, which would require the O2A type to exist in Directus which currently doesn't. + // Shouldn't be the end of the world here, as we know we're only attaching policies to two other + // collections. + + await knex.schema.createTable('directus_access', (table) => { + table.uuid('id').primary(); + table.uuid('role').references('directus_roles.id').nullable().onDelete('CASCADE'); + table.uuid('user').references('directus_users.id').nullable().onDelete('CASCADE'); + table.uuid('policy').references('directus_policies.id').notNullable().onDelete('CASCADE'); + table.integer('sort'); + }); + + ///////////////////////////////////////////////////////////////////////////////////////////////// + // Attach policies to existing roles for backwards compatibility + + const policyAttachments = roles.map((role) => ({ + id: randomUUID(), + role: role.id, + user: null, + policy: role.id, + sort: 1, + })); + + await processChunk(policyAttachments, 100, async (chunk) => { + await knex('directus_access').insert(chunk); + }); + + await knex('directus_access').insert({ + id: randomUUID(), + role: null, + user: null, + policy: PUBLIC_POLICY_ID, + sort: 1, + }); +} + +export async function down(knex: Knex) { + ///////////////////////////////////////////////////////////////////////////////////////////////// + // Reinstate access control fields on directus roles + + await knex.schema.alterTable('directus_roles', (table) => { + table.text('ip_access'); + table.boolean('enforce_tfa').defaultTo(false).notNullable(); + table.boolean('admin_access').defaultTo(false).notNullable(); + table.boolean('app_access').defaultTo(true).notNullable(); + }); + + ///////////////////////////////////////////////////////////////////////////////////////////////// + // Copy policy access control rules back to roles + + const originalPermissions = await knex + .select('id') + .from('directus_permissions') + .whereNot({ policy: PUBLIC_POLICY_ID }); + + await knex.schema.alterTable('directus_permissions', (table) => { + table.uuid('role').nullable(); + table.setNullable('policy'); + }); + + const context = { knex, schema: await getSchema() }; + + // fetch all roles + const roles: Array<{ id: string | null }> = await knex.select('id').from('directus_roles'); + + // simulate Public Role + roles.push({ id: null }); + + // role permissions to be inserted once all processing is completed + const rolePermissions: Array | { role: string | null }> = []; + + for (const role of roles) { + const roleTree = await fetchRolesTree(role.id, knex); + + let roleAccess = null; + + if (role.id !== null) { + roleAccess = await fetchRoleAccess(roleTree, context); + await knex('directus_roles').update(roleAccess).where({ id: role.id }); + } + + if (roleAccess === null || !roleAccess.admin_access) { + // fetch all of the roles policies + const policies = await fetchPolicies({ roles: roleTree, user: null, ip: null }, context); + + // fetch all of the policies permissions + const rawPermissions = await fetchPermissions( + { + accountability: { role: null, roles: roleTree, user: null, app: roleAccess?.app_access || false }, + policies, + bypassDynamicVariableProcessing: true, + }, + context, + ); + + // merge all permissions to single version (v10) and save for later use + mergePermissions('or', rawPermissions).forEach((permission) => { + // System permissions are automatically populated + if (permission.system) { + return; + } + + // convert merged permissions to storage ready format + if (Array.isArray(permission.fields)) { + permission.fields = permission.fields.join(','); + } + + if (permission.permissions) { + permission.permissions = JSON.stringify(permission.permissions); + } + + if (permission.validation) { + permission.validation = JSON.stringify(permission.validation); + } + + if (permission.presets) { + permission.presets = JSON.stringify(permission.presets); + } + + rolePermissions.push({ role: role.id, ...omit(permission, ['id', 'policy']) }); + }); + } + } + + ///////////////////////////////////////////////////////////////////////////////////////////////// + // Remove role nesting support + + await knex.schema.alterTable('directus_roles', (table) => { + table.dropForeign('parent'); + table.dropColumn('parent'); + }); + + ///////////////////////////////////////////////////////////////////////////////////////////////// + // Drop all permissions that are only attached to a user + + // TODO query all policies that are attached to a user and delete their permissions, + // since we don't know were to put them now and it'll cause a foreign key problem + // as soon as we reference directus_roles in directus_permissions again + + ///////////////////////////////////////////////////////////////////////////////////////////////// + // Drop policy attachments + + await knex.schema.dropTable('directus_access'); + + ///////////////////////////////////////////////////////////////////////////////////////////////// + // Reattach permissions to roles instead of policies + + await knex('directus_permissions') + .update({ + role: null, + }) + .where({ role: PUBLIC_POLICY_ID }); + + // remove all v11 permissions + await processChunk(originalPermissions, 100, async (chunk) => { + await knex('directus_permissions').delete(chunk); + }); + + // insert all v10 permissions + await processChunk(rolePermissions, 100, async (chunk) => { + await knex('directus_permissions').insert(chunk); + }); + + await knex.schema.alterTable('directus_permissions', (table) => { + table.uuid('role').references('directus_roles.id').alter(); + table.dropForeign('policy'); + table.dropColumn('policy'); + }); + + ///////////////////////////////////////////////////////////////////////////////////////////////// + // Drop policies table + + await knex.schema.dropTable('directus_policies'); +} diff --git a/api/src/database/run-ast.ts b/api/src/database/run-ast.ts deleted file mode 100644 index 086917e0d4..0000000000 --- a/api/src/database/run-ast.ts +++ /dev/null @@ -1,650 +0,0 @@ -import { useEnv } from '@directus/env'; -import type { Item, Query, SchemaOverview } from '@directus/types'; -import { toArray } from '@directus/utils'; -import type { Knex } from 'knex'; -import { clone, cloneDeep, isNil, merge, pick, uniq } from 'lodash-es'; -import { PayloadService } from '../services/payload.js'; -import type { AST, FieldNode, FunctionFieldNode, M2ONode, NestedCollectionNode } from '../types/ast.js'; -import { applyFunctionToColumnName } from '../utils/apply-function-to-column-name.js'; -import applyQuery, { applyLimit, applySort, generateAlias, type ColumnSortRecord } from '../utils/apply-query.js'; -import { getCollectionFromAlias } from '../utils/get-collection-from-alias.js'; -import type { AliasMap } from '../utils/get-column-path.js'; -import { getColumn } from '../utils/get-column.js'; -import { parseFilterKey } from '../utils/parse-filter-key.js'; -import { getHelpers } from './helpers/index.js'; -import getDatabase from './index.js'; - -type RunASTOptions = { - /** - * Query override for the current level - */ - query?: AST['query']; - - /** - * Knex instance - */ - knex?: Knex; - - /** - * Whether or not the current execution is a nested dataset in another AST - */ - nested?: boolean; - - /** - * Whether or not to strip out non-requested required fields automatically (eg IDs / FKs) - */ - stripNonRequested?: boolean; -}; - -/** - * Execute a given AST using Knex. Returns array of items based on requested AST. - */ -export default async function runAST( - originalAST: AST | NestedCollectionNode, - schema: SchemaOverview, - options?: RunASTOptions, -): Promise { - const ast = cloneDeep(originalAST); - - const knex = options?.knex || getDatabase(); - - if (ast.type === 'a2o') { - const results: { [collection: string]: null | Item | Item[] } = {}; - - for (const collection of ast.names) { - results[collection] = await run(collection, ast.children[collection]!, ast.query[collection]!); - } - - return results; - } else { - return await run(ast.name, ast.children, options?.query || ast.query); - } - - async function run( - collection: string, - children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[], - query: Query, - ) { - const env = useEnv(); - - // Retrieve the database columns to select in the current AST - const { fieldNodes, primaryKeyField, nestedCollectionNodes } = await parseCurrentLevel( - schema, - collection, - children, - query, - ); - - // The actual knex query builder instance. This is a promise that resolves with the raw items from the db - const dbQuery = await getDBQuery(schema, knex, collection, fieldNodes, query); - - const rawItems: Item | Item[] = await dbQuery; - - if (!rawItems) return null; - - // Run the items through the special transforms - const payloadService = new PayloadService(collection, { knex, schema }); - let items: null | Item | Item[] = await payloadService.processValues('read', rawItems, query.alias ?? {}); - - if (!items || (Array.isArray(items) && items.length === 0)) return items; - - // Apply the `_in` filters to the nested collection batches - const nestedNodes = applyParentFilters(schema, nestedCollectionNodes, items); - - for (const nestedNode of nestedNodes) { - let nestedItems: Item[] | null = []; - - if (nestedNode.type === 'o2m') { - let hasMore = true; - - let batchCount = 0; - - while (hasMore) { - const node = merge({}, nestedNode, { - query: { - limit: env['RELATIONAL_BATCH_SIZE'], - offset: batchCount * (env['RELATIONAL_BATCH_SIZE'] as number), - page: null, - }, - }); - - nestedItems = (await runAST(node, schema, { knex, nested: true })) as Item[] | null; - - if (nestedItems) { - items = mergeWithParentItems(schema, nestedItems, items!, nestedNode)!; - } - - if (!nestedItems || nestedItems.length < (env['RELATIONAL_BATCH_SIZE'] as number)) { - hasMore = false; - } - - batchCount++; - } - } else { - const node = merge({}, nestedNode, { - query: { limit: -1 }, - }); - - nestedItems = (await runAST(node, schema, { knex, nested: true })) as Item[] | null; - - if (nestedItems) { - // Merge all fetched nested records with the parent items - items = mergeWithParentItems(schema, nestedItems, items!, nestedNode)!; - } - } - } - - // During the fetching of data, we have to inject a couple of required fields for the child nesting - // to work (primary / foreign keys) even if they're not explicitly requested. After all fetching - // and nesting is done, we parse through the output structure, and filter out all non-requested - // fields - if (options?.nested !== true && options?.stripNonRequested !== false) { - items = removeTemporaryFields(schema, items, originalAST, primaryKeyField); - } - - return items; - } -} - -async function parseCurrentLevel( - schema: SchemaOverview, - collection: string, - children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[], - query: Query, -) { - const primaryKeyField = schema.collections[collection]!.primary; - const columnsInCollection = Object.keys(schema.collections[collection]!.fields); - - const columnsToSelectInternal: string[] = []; - const nestedCollectionNodes: NestedCollectionNode[] = []; - - for (const child of children) { - if (child.type === 'field' || child.type === 'functionField') { - const { fieldName } = parseFilterKey(child.name); - - if (columnsInCollection.includes(fieldName)) { - columnsToSelectInternal.push(child.fieldKey); - } - - continue; - } - - if (!child.relation) continue; - - if (child.type === 'm2o') { - columnsToSelectInternal.push(child.relation.field); - } - - if (child.type === 'a2o') { - columnsToSelectInternal.push(child.relation.field); - columnsToSelectInternal.push(child.relation.meta!.one_collection_field!); - } - - nestedCollectionNodes.push(child); - } - - const isAggregate = (query.group || (query.aggregate && Object.keys(query.aggregate).length > 0)) ?? false; - - /** Always fetch primary key in case there's a nested relation that needs it. Aggregate payloads - * can't have nested relational fields - */ - if (isAggregate === false && columnsToSelectInternal.includes(primaryKeyField) === false) { - columnsToSelectInternal.push(primaryKeyField); - } - - /** Make sure select list has unique values */ - const columnsToSelect = [...new Set(columnsToSelectInternal)]; - - const fieldNodes = columnsToSelect.map( - (column: string) => - children.find( - (childNode) => - (childNode.type === 'field' || childNode.type === 'functionField') && childNode.fieldKey === column, - ) ?? { - type: 'field', - name: column, - fieldKey: column, - }, - ) as FieldNode[]; - - return { fieldNodes, nestedCollectionNodes, primaryKeyField }; -} - -function getColumnPreprocessor(knex: Knex, schema: SchemaOverview, table: string) { - const helpers = getHelpers(knex); - - return function (fieldNode: FieldNode | FunctionFieldNode | M2ONode): Knex.Raw { - let alias = undefined; - - if (fieldNode.name !== fieldNode.fieldKey) { - alias = fieldNode.fieldKey; - } - - let field; - - if (fieldNode.type === 'field' || fieldNode.type === 'functionField') { - const { fieldName } = parseFilterKey(fieldNode.name); - field = schema.collections[table]!.fields[fieldName]; - } else { - field = schema.collections[fieldNode.relation.collection]!.fields[fieldNode.relation.field]; - } - - if (field?.type?.startsWith('geometry')) { - return helpers.st.asText(table, field.field); - } - - if (fieldNode.type === 'functionField') { - return getColumn(knex, table, fieldNode.name, alias, schema, { query: fieldNode.query }); - } - - return getColumn(knex, table, fieldNode.name, alias, schema); - }; -} - -async function getDBQuery( - schema: SchemaOverview, - knex: Knex, - table: string, - fieldNodes: (FieldNode | FunctionFieldNode)[], - query: Query, -): Promise { - const env = useEnv(); - const preProcess = getColumnPreprocessor(knex, schema, table); - const queryCopy = clone(query); - const helpers = getHelpers(knex); - - queryCopy.limit = typeof queryCopy.limit === 'number' ? queryCopy.limit : Number(env['QUERY_LIMIT_DEFAULT']); - - // Queries with aggregates and groupBy will not have duplicate result - if (queryCopy.aggregate || queryCopy.group) { - const flatQuery = knex.select(fieldNodes.map(preProcess)).from(table); - return await applyQuery(knex, table, flatQuery, queryCopy, schema).query; - } - - const primaryKey = schema.collections[table]!.primary; - const aliasMap: AliasMap = Object.create(null); - let dbQuery = knex.from(table); - let sortRecords: ColumnSortRecord[] | undefined; - const innerQuerySortRecords: { alias: string; order: 'asc' | 'desc' }[] = []; - let hasMultiRelationalSort: boolean | undefined; - - if (queryCopy.sort) { - const sortResult = applySort(knex, schema, dbQuery, queryCopy, table, aliasMap, true); - - if (sortResult) { - sortRecords = sortResult.sortRecords; - hasMultiRelationalSort = sortResult.hasMultiRelationalSort; - } - } - - const { hasMultiRelationalFilter } = applyQuery(knex, table, dbQuery, queryCopy, schema, { - aliasMap, - isInnerQuery: true, - hasMultiRelationalSort, - }); - - const needsInnerQuery = hasMultiRelationalSort || hasMultiRelationalFilter; - - if (needsInnerQuery) { - dbQuery.select(`${table}.${primaryKey}`).distinct(); - } else { - dbQuery.select(fieldNodes.map(preProcess)); - } - - if (sortRecords) { - // Clears the order if any, eg: from MSSQL offset - dbQuery.clear('order'); - - if (needsInnerQuery) { - let orderByString = ''; - const orderByFields: Knex.Raw[] = []; - - sortRecords.map((sortRecord) => { - if (orderByString.length !== 0) { - orderByString += ', '; - } - - const sortAlias = `sort_${generateAlias()}`; - - if (sortRecord.column.includes('.')) { - const [alias, field] = sortRecord.column.split('.'); - const originalCollectionName = getCollectionFromAlias(alias!, aliasMap); - dbQuery.select(getColumn(knex, alias!, field!, sortAlias, schema, { originalCollectionName })); - - orderByString += `?? ${sortRecord.order}`; - orderByFields.push(getColumn(knex, alias!, field!, false, schema, { originalCollectionName })); - } else { - dbQuery.select(getColumn(knex, table, sortRecord.column, sortAlias, schema)); - - orderByString += `?? ${sortRecord.order}`; - orderByFields.push(getColumn(knex, table, sortRecord.column, false, schema)); - } - - innerQuerySortRecords.push({ alias: sortAlias, order: sortRecord.order }); - }); - - if (hasMultiRelationalSort) { - dbQuery = helpers.schema.applyMultiRelationalSort( - knex, - dbQuery, - table, - primaryKey, - orderByString, - orderByFields, - ); - - // Start order by with directus_row_number. The directus_row_number is derived from a window function that - // is ordered by the sort fields within every primary key partition. That ensures that the result with the - // row number = 1 is the top-most row of every partition, according to the selected sort fields. - // Since the only relevant result is the first row of this partition, adding the directus_row_number to the - // order by here ensures that all rows with a directus_row_number = 1 show up first in the inner query result, - // and are correctly truncated by the limit, but not earlier. - orderByString = `?? asc, ${orderByString}`; - orderByFields.unshift(knex.ref('directus_row_number')); - } - - dbQuery.orderByRaw(orderByString, orderByFields); - } else { - sortRecords.map((sortRecord) => { - if (sortRecord.column.includes('.')) { - const [alias, field] = sortRecord.column.split('.'); - - sortRecord.column = getColumn(knex, alias!, field!, false, schema, { - originalCollectionName: getCollectionFromAlias(alias!, aliasMap), - }) as any; - } else { - sortRecord.column = getColumn(knex, table, sortRecord.column, false, schema) as any; - } - }); - - dbQuery.orderBy(sortRecords); - } - } - - if (!needsInnerQuery) return dbQuery; - - const wrapperQuery = knex - .select(fieldNodes.map(preProcess)) - .from(table) - .innerJoin(knex.raw('??', dbQuery.as('inner')), `${table}.${primaryKey}`, `inner.${primaryKey}`); - - if (sortRecords) { - innerQuerySortRecords.map((innerQuerySortRecord) => { - wrapperQuery.orderBy(`inner.${innerQuerySortRecord.alias}`, innerQuerySortRecord.order); - }); - - if (hasMultiRelationalSort) { - wrapperQuery.where('inner.directus_row_number', '=', 1); - applyLimit(knex, wrapperQuery, queryCopy.limit); - } - } - - return wrapperQuery; -} - -function applyParentFilters( - schema: SchemaOverview, - nestedCollectionNodes: NestedCollectionNode[], - parentItem: Item | Item[], -) { - const parentItems = toArray(parentItem); - - for (const nestedNode of nestedCollectionNodes) { - if (!nestedNode.relation) continue; - - if (nestedNode.type === 'm2o') { - const foreignField = schema.collections[nestedNode.relation.related_collection!]!.primary; - const foreignIds = uniq(parentItems.map((res) => res[nestedNode.relation.field])).filter((id) => !isNil(id)); - - merge(nestedNode, { query: { filter: { [foreignField]: { _in: foreignIds } } } }); - } else if (nestedNode.type === 'o2m') { - const relatedM2OisFetched = !!nestedNode.children.find((child) => { - return child.type === 'field' && child.name === nestedNode.relation.field; - }); - - if (relatedM2OisFetched === false) { - nestedNode.children.push({ - type: 'field', - name: nestedNode.relation.field, - fieldKey: nestedNode.relation.field, - }); - } - - if (nestedNode.relation.meta?.sort_field) { - nestedNode.children.push({ - type: 'field', - name: nestedNode.relation.meta.sort_field, - fieldKey: nestedNode.relation.meta.sort_field, - }); - } - - const foreignField = nestedNode.relation.field; - const foreignIds = uniq(parentItems.map((res) => res[nestedNode.parentKey])).filter((id) => !isNil(id)); - - merge(nestedNode, { query: { filter: { [foreignField]: { _in: foreignIds } } } }); - } else if (nestedNode.type === 'a2o') { - const keysPerCollection: { [collection: string]: (string | number)[] } = {}; - - for (const parentItem of parentItems) { - const collection = parentItem[nestedNode.relation.meta!.one_collection_field!]; - if (!keysPerCollection[collection]) keysPerCollection[collection] = []; - keysPerCollection[collection]!.push(parentItem[nestedNode.relation.field]); - } - - for (const relatedCollection of nestedNode.names) { - const foreignField = nestedNode.relatedKey[relatedCollection]!; - const foreignIds = uniq(keysPerCollection[relatedCollection]); - - merge(nestedNode, { - query: { [relatedCollection]: { filter: { [foreignField]: { _in: foreignIds } }, limit: foreignIds.length } }, - }); - } - } - } - - return nestedCollectionNodes; -} - -function mergeWithParentItems( - schema: SchemaOverview, - nestedItem: Item | Item[], - parentItem: Item | Item[], - nestedNode: NestedCollectionNode, -) { - const env = useEnv(); - const nestedItems = toArray(nestedItem); - const parentItems = clone(toArray(parentItem)); - - if (nestedNode.type === 'm2o') { - for (const parentItem of parentItems) { - const itemChild = nestedItems.find((nestedItem) => { - return ( - nestedItem[schema.collections[nestedNode.relation.related_collection!]!.primary] == - parentItem[nestedNode.relation.field] - ); - }); - - parentItem[nestedNode.fieldKey] = itemChild || null; - } - } else if (nestedNode.type === 'o2m') { - for (const parentItem of parentItems) { - if (!parentItem[nestedNode.fieldKey]) parentItem[nestedNode.fieldKey] = [] as Item[]; - - const itemChildren = nestedItems.filter((nestedItem) => { - if (nestedItem === null) return false; - if (Array.isArray(nestedItem[nestedNode.relation.field])) return true; - - return ( - nestedItem[nestedNode.relation.field] == - parentItem[schema.collections[nestedNode.relation.related_collection!]!.primary] || - nestedItem[nestedNode.relation.field]?.[ - schema.collections[nestedNode.relation.related_collection!]!.primary - ] == parentItem[schema.collections[nestedNode.relation.related_collection!]!.primary] - ); - }); - - parentItem[nestedNode.fieldKey].push(...itemChildren); - - const limit = nestedNode.query.limit ?? Number(env['QUERY_LIMIT_DEFAULT']); - - if (nestedNode.query.page && nestedNode.query.page > 1) { - parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].slice(limit * (nestedNode.query.page - 1)); - } - - if (nestedNode.query.offset && nestedNode.query.offset >= 0) { - parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].slice(nestedNode.query.offset); - } - - if (limit !== -1) { - parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].slice(0, limit); - } - - parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].sort((a: Item, b: Item) => { - // This is pre-filled in get-ast-from-query - const sortField = nestedNode.query.sort![0]!; - let column = sortField; - let order: 'asc' | 'desc' = 'asc'; - - if (sortField.startsWith('-')) { - column = sortField.substring(1); - order = 'desc'; - } - - if (a[column] === b[column]) return 0; - if (a[column] === null) return 1; - if (b[column] === null) return -1; - - if (order === 'asc') { - return a[column] < b[column] ? -1 : 1; - } else { - return a[column] < b[column] ? 1 : -1; - } - }); - } - } else if (nestedNode.type === 'a2o') { - for (const parentItem of parentItems) { - if (!nestedNode.relation.meta?.one_collection_field) { - parentItem[nestedNode.fieldKey] = null; - continue; - } - - const relatedCollection = parentItem[nestedNode.relation.meta.one_collection_field]; - - if (!(nestedItem as Record)[relatedCollection]) { - parentItem[nestedNode.fieldKey] = null; - continue; - } - - const itemChild = (nestedItem as Record)[relatedCollection]!.find((nestedItem) => { - return nestedItem[nestedNode.relatedKey[relatedCollection]!] == parentItem[nestedNode.fieldKey]; - }); - - parentItem[nestedNode.fieldKey] = itemChild || null; - } - } - - return Array.isArray(parentItem) ? parentItems : parentItems[0]; -} - -function removeTemporaryFields( - schema: SchemaOverview, - rawItem: Item | Item[], - ast: AST | NestedCollectionNode, - primaryKeyField: string, - parentItem?: Item, -): null | Item | Item[] { - const rawItems = cloneDeep(toArray(rawItem)); - const items: Item[] = []; - - if (ast.type === 'a2o') { - const fields: Record = {}; - const nestedCollectionNodes: Record = {}; - - for (const relatedCollection of ast.names) { - if (!fields[relatedCollection]) fields[relatedCollection] = []; - if (!nestedCollectionNodes[relatedCollection]) nestedCollectionNodes[relatedCollection] = []; - - for (const child of ast.children[relatedCollection]!) { - if (child.type === 'field' || child.type === 'functionField') { - fields[relatedCollection]!.push(child.name); - } else { - fields[relatedCollection]!.push(child.fieldKey); - nestedCollectionNodes[relatedCollection]!.push(child); - } - } - } - - for (const rawItem of rawItems) { - const relatedCollection: string = parentItem?.[ast.relation.meta!.one_collection_field!]; - - if (rawItem === null || rawItem === undefined) return rawItem; - - let item = rawItem; - - for (const nestedNode of nestedCollectionNodes[relatedCollection]!) { - item[nestedNode.fieldKey] = removeTemporaryFields( - schema, - item[nestedNode.fieldKey], - nestedNode, - schema.collections[nestedNode.relation.collection]!.primary, - item, - ); - } - - const fieldsWithFunctionsApplied = fields[relatedCollection]!.map((field) => applyFunctionToColumnName(field)); - - item = - fields[relatedCollection]!.length > 0 ? pick(rawItem, fieldsWithFunctionsApplied) : rawItem[primaryKeyField]; - - items.push(item); - } - } else { - const fields: string[] = []; - const nestedCollectionNodes: NestedCollectionNode[] = []; - - for (const child of ast.children) { - fields.push(child.fieldKey); - - if (child.type !== 'field' && child.type !== 'functionField') { - nestedCollectionNodes.push(child); - } - } - - // Make sure any requested aggregate fields are included - if (ast.query?.aggregate) { - for (const [operation, aggregateFields] of Object.entries(ast.query.aggregate)) { - if (!fields) continue; - - if (operation === 'count' && aggregateFields.includes('*')) fields.push('count'); - - fields.push(...aggregateFields.map((field) => `${operation}.${field}`)); - } - } - - for (const rawItem of rawItems) { - if (rawItem === null || rawItem === undefined) return rawItem; - - let item = rawItem; - - for (const nestedNode of nestedCollectionNodes) { - item[nestedNode.fieldKey] = removeTemporaryFields( - schema, - item[nestedNode.fieldKey], - nestedNode, - nestedNode.type === 'm2o' - ? schema.collections[nestedNode.relation.related_collection!]!.primary - : schema.collections[nestedNode.relation.collection]!.primary, - item, - ); - } - - const fieldsWithFunctionsApplied = fields.map((field) => applyFunctionToColumnName(field)); - - item = fields.length > 0 ? pick(rawItem, fieldsWithFunctionsApplied) : rawItem[primaryKeyField]; - - items.push(item); - } - } - - return Array.isArray(rawItem) ? items : items[0]!; -} diff --git a/api/src/database/run-ast/lib/get-db-query.ts b/api/src/database/run-ast/lib/get-db-query.ts new file mode 100644 index 0000000000..3fd6547e17 --- /dev/null +++ b/api/src/database/run-ast/lib/get-db-query.ts @@ -0,0 +1,301 @@ +import { useEnv } from '@directus/env'; +import type { Filter, Query, SchemaOverview } from '@directus/types'; +import type { Knex } from 'knex'; +import { cloneDeep } from 'lodash-es'; +import type { FieldNode, FunctionFieldNode, O2MNode } from '../../../types/ast.js'; +import type { ColumnSortRecord } from '../../../utils/apply-query.js'; +import applyQuery, { applyLimit, applySort, generateAlias } from '../../../utils/apply-query.js'; +import { getCollectionFromAlias } from '../../../utils/get-collection-from-alias.js'; +import type { AliasMap } from '../../../utils/get-column-path.js'; +import { getColumn } from '../../../utils/get-column.js'; +import { getHelpers } from '../../helpers/index.js'; +import { applyCaseWhen } from '../utils/apply-case-when.js'; +import { getColumnPreprocessor } from '../utils/get-column-pre-processor.js'; +import { getNodeAlias } from '../utils/get-field-alias.js'; +import { getInnerQueryColumnPreProcessor } from '../utils/get-inner-query-column-pre-processor.js'; +import { withPreprocessBindings } from '../utils/with-preprocess-bindings.js'; + +export function getDBQuery( + schema: SchemaOverview, + knex: Knex, + table: string, + fieldNodes: (FieldNode | FunctionFieldNode)[], + o2mNodes: O2MNode[], + query: Query, + cases: Filter[], +): Knex.QueryBuilder { + const aliasMap: AliasMap = Object.create(null); + const env = useEnv(); + const preProcess = getColumnPreprocessor(knex, schema, table, cases, aliasMap); + const queryCopy = cloneDeep(query); + const helpers = getHelpers(knex); + + const hasCaseWhen = + o2mNodes.some((node) => node.whenCase && node.whenCase.length > 0) || + fieldNodes.some((node) => node.whenCase && node.whenCase.length > 0); + + queryCopy.limit = typeof queryCopy.limit === 'number' ? queryCopy.limit : Number(env['QUERY_LIMIT_DEFAULT']); + + // Queries with aggregates and groupBy will not have duplicate result + if (queryCopy.aggregate || queryCopy.group) { + const flatQuery = knex.from(table); + + // Map the group fields to their respective field nodes + const groupWhenCases = hasCaseWhen + ? queryCopy.group?.map((field) => fieldNodes.find(({ fieldKey }) => fieldKey === field)?.whenCase ?? []) + : undefined; + + const dbQuery = applyQuery(knex, table, flatQuery, queryCopy, schema, cases, { aliasMap, groupWhenCases }).query; + + flatQuery.select(fieldNodes.map((node) => preProcess(node))); + + withPreprocessBindings(knex, dbQuery); + + return dbQuery; + } + + const primaryKey = schema.collections[table]!.primary; + let dbQuery = knex.from(table); + let sortRecords: ColumnSortRecord[] | undefined; + const innerQuerySortRecords: { alias: string; order: 'asc' | 'desc' }[] = []; + let hasMultiRelationalSort: boolean | undefined; + + if (queryCopy.sort) { + const sortResult = applySort(knex, schema, dbQuery, queryCopy, table, aliasMap, true); + + if (sortResult) { + sortRecords = sortResult.sortRecords; + hasMultiRelationalSort = sortResult.hasMultiRelationalSort; + } + } + + const { hasMultiRelationalFilter } = applyQuery(knex, table, dbQuery, queryCopy, schema, cases, { + aliasMap, + isInnerQuery: true, + hasMultiRelationalSort, + }); + + const needsInnerQuery = hasMultiRelationalSort || hasMultiRelationalFilter; + + if (needsInnerQuery) { + dbQuery.select(`${table}.${primaryKey}`); + + // Only add distinct if there are no case/when constructs, since otherwise we rely on group by + if (!hasCaseWhen) dbQuery.distinct(); + } else { + dbQuery.select(fieldNodes.map((node) => preProcess(node))); + + // Add flags for o2m fields with case/when to the let the DB to the partial item permissions + dbQuery.select( + o2mNodes + .filter((node) => node.whenCase && node.whenCase.length > 0) + .map((node) => { + const columnCases = node.whenCase!.map((index) => cases[index]!); + return applyCaseWhen( + { + column: knex.raw(1), + columnCases, + aliasMap, + cases, + table, + alias: node.fieldKey, + }, + { knex, schema }, + ); + }), + ); + } + + if (sortRecords) { + // Clears the order if any, eg: from MSSQL offset + dbQuery.clear('order'); + + if (needsInnerQuery) { + let orderByString = ''; + const orderByFields: Knex.Raw[] = []; + + sortRecords.map((sortRecord) => { + if (orderByString.length !== 0) { + orderByString += ', '; + } + + const sortAlias = `sort_${generateAlias()}`; + + if (sortRecord.column.includes('.')) { + const [alias, field] = sortRecord.column.split('.'); + const originalCollectionName = getCollectionFromAlias(alias!, aliasMap); + dbQuery.select(getColumn(knex, alias!, field!, sortAlias, schema, { originalCollectionName })); + + orderByString += `?? ${sortRecord.order}`; + orderByFields.push(getColumn(knex, alias!, field!, false, schema, { originalCollectionName })); + } else { + dbQuery.select(getColumn(knex, table, sortRecord.column, sortAlias, schema)); + + orderByString += `?? ${sortRecord.order}`; + orderByFields.push(getColumn(knex, table, sortRecord.column, false, schema)); + } + + innerQuerySortRecords.push({ alias: sortAlias, order: sortRecord.order }); + }); + + if (hasMultiRelationalSort) { + dbQuery = helpers.schema.applyMultiRelationalSort( + knex, + dbQuery, + table, + primaryKey, + orderByString, + orderByFields, + ); + + // Start order by with directus_row_number. The directus_row_number is derived from a window function that + // is ordered by the sort fields within every primary key partition. That ensures that the result with the + // row number = 1 is the top-most row of every partition, according to the selected sort fields. + // Since the only relevant result is the first row of this partition, adding the directus_row_number to the + // order by here ensures that all rows with a directus_row_number = 1 show up first in the inner query result, + // and are correctly truncated by the limit, but not earlier. + orderByString = `?? asc, ${orderByString}`; + orderByFields.unshift(knex.ref('directus_row_number')); + } + + dbQuery.orderByRaw(orderByString, orderByFields); + } else { + sortRecords.map((sortRecord) => { + if (sortRecord.column.includes('.')) { + const [alias, field] = sortRecord.column.split('.'); + + sortRecord.column = getColumn(knex, alias!, field!, false, schema, { + originalCollectionName: getCollectionFromAlias(alias!, aliasMap), + }) as any; + } else { + sortRecord.column = getColumn(knex, table, sortRecord.column, false, schema) as any; + } + }); + + dbQuery.orderBy(sortRecords); + } + } + + if (!needsInnerQuery) return dbQuery; + + const innerCaseWhenAliasPrefix = generateAlias(); + + if (hasCaseWhen) { + /* If there are cases, we need to employ a trick in order to evaluate the case/when structure in the inner query, + while passing the result of the evaluation to the outer query. The case/when needs to be evaluated in the inner + query since only there all joined in tables, that might be required for the case/when, are available. + + The problem is, that the resulting columns can not be directly selected in the inner query, + as a `SELECT DISTINCT` does not work for all datatypes in all vendors. + + So instead of having an inner query which might look like this: + + SELECT DISTINCT ..., + CASE WHEN THEN END AS + + a group-by query is generated. + + Another problem is that all not all rows with the same primary key are guaranteed to have the same value for + the columns with the case/when, so we to `or` those together, but counting the number of flags in a group by + operation. This way the flag is set to > 0 if any of the rows in the group allows access to the column. + + The inner query only evaluates the condition and passes up or-ed flag, that is used in the wrapper query to select + the actual column: + + SELECT ..., + COUNT (CASE WHEN THEN 1 END) AS _ + ... + GROUP BY + + Then, in the wrapper query there is no need to evaluate the condition again, but instead rely on the flag: + + SELECT ..., + CASE WHEN `inner`._ > 0 THEN END AS + */ + + const innerPreprocess = getInnerQueryColumnPreProcessor( + knex, + schema, + table, + cases, + aliasMap, + innerCaseWhenAliasPrefix, + ); + + // To optimize the query we avoid having unnecessary columns in the inner query, that don't have a caseWhen, since + // they are selected in the outer query directly + dbQuery.select(fieldNodes.map(innerPreprocess).filter((x) => x !== null)); + + // In addition to the regular columns select a flag that indicates if a user has access to o2m related field + // based on the case/when of that field. + dbQuery.select(o2mNodes.map(innerPreprocess).filter((x) => x !== null)); + + const groupByFields = [knex.raw('??.??', [table, primaryKey])]; + + if (hasMultiRelationalSort) { + // Sort fields that are not directly in the table the primary key is from need to be included in the group + // by clause, otherwise this causes problems on some DBs + groupByFields.push(...innerQuerySortRecords.map(({ alias }) => knex.raw('??', alias))); + } + + dbQuery.groupBy(groupByFields); + } + + const wrapperQuery = knex + .from(table) + .innerJoin(knex.raw('??', dbQuery.as('inner')), `${table}.${primaryKey}`, `inner.${primaryKey}`); + + if (!hasCaseWhen) { + // No need for case/when in the wrapper query, just select the preprocessed columns + wrapperQuery.select(fieldNodes.map((node) => preProcess(node))); + } else { + // This applies a simplified case/when construct in the wrapper query, that only looks at flag > 1 + + // Distinguish between column with and without case/when and handle them differently + const plainColumns = fieldNodes.filter((fieldNode) => !fieldNode.whenCase || fieldNode.whenCase.length === 0); + const whenCaseColumns = fieldNodes.filter((fieldNode) => fieldNode.whenCase && fieldNode.whenCase.length > 0); + + // Select the plain columns + wrapperQuery.select(plainColumns.map((node) => preProcess(node))); + + // Select the case/when columns based on the flag from the inner query + wrapperQuery.select( + whenCaseColumns.map((fieldNode) => { + const alias = getNodeAlias(fieldNode); + + const innerAlias = `${innerCaseWhenAliasPrefix}_${alias}`; + + // Preprocess the column without the case/when, since that is applied in a simpler fashion in the select + const column = preProcess({ ...fieldNode, whenCase: [] }, { noAlias: true }); + + return knex.raw(`CASE WHEN ??.?? > 0 THEN ?? END as ??`, ['inner', innerAlias, column, alias]); + }), + ); + + // Pass the flags of o2m fields up through the wrapper query + wrapperQuery.select( + o2mNodes + .filter((node) => node.whenCase && node.whenCase.length > 0) + .map((node) => { + const alias = node.fieldKey; + + const innerAlias = `${innerCaseWhenAliasPrefix}_${alias}`; + + return knex.raw(`CASE WHEN ??.?? > 0 THEN 1 END as ??`, ['inner', innerAlias, alias]); + }), + ); + } + + if (sortRecords) { + innerQuerySortRecords.map((innerQuerySortRecord) => { + wrapperQuery.orderBy(`inner.${innerQuerySortRecord.alias}`, innerQuerySortRecord.order); + }); + + if (hasMultiRelationalSort) { + wrapperQuery.where('inner.directus_row_number', '=', 1); + applyLimit(knex, wrapperQuery, queryCopy.limit); + } + } + + return wrapperQuery; +} diff --git a/api/src/database/run-ast/lib/parse-current-level.ts b/api/src/database/run-ast/lib/parse-current-level.ts new file mode 100644 index 0000000000..98cb1e0e63 --- /dev/null +++ b/api/src/database/run-ast/lib/parse-current-level.ts @@ -0,0 +1,67 @@ +import type { Query, SchemaOverview } from '@directus/types'; +import type { FieldNode, FunctionFieldNode, NestedCollectionNode } from '../../../types/ast.js'; +import { parseFilterKey } from '../../../utils/parse-filter-key.js'; + +export async function parseCurrentLevel( + schema: SchemaOverview, + collection: string, + children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[], + query: Query, +) { + const primaryKeyField = schema.collections[collection]!.primary; + const columnsInCollection = Object.keys(schema.collections[collection]!.fields); + + const columnsToSelectInternal: string[] = []; + const nestedCollectionNodes: NestedCollectionNode[] = []; + + for (const child of children) { + if (child.type === 'field' || child.type === 'functionField') { + const { fieldName } = parseFilterKey(child.name); + + if (columnsInCollection.includes(fieldName)) { + columnsToSelectInternal.push(child.fieldKey); + } + + continue; + } + + if (!child.relation) continue; + + if (child.type === 'm2o') { + columnsToSelectInternal.push(child.relation.field); + } + + if (child.type === 'a2o') { + columnsToSelectInternal.push(child.relation.field); + columnsToSelectInternal.push(child.relation.meta!.one_collection_field!); + } + + nestedCollectionNodes.push(child); + } + + const isAggregate = (query.group || (query.aggregate && Object.keys(query.aggregate).length > 0)) ?? false; + + /** Always fetch primary key in case there's a nested relation that needs it. Aggregate payloads + * can't have nested relational fields + */ + if (isAggregate === false && columnsToSelectInternal.includes(primaryKeyField) === false) { + columnsToSelectInternal.push(primaryKeyField); + } + + /** Make sure select list has unique values */ + const columnsToSelect = [...new Set(columnsToSelectInternal)]; + + const fieldNodes = columnsToSelect.map( + (column: string) => + children.find( + (childNode) => + (childNode.type === 'field' || childNode.type === 'functionField') && childNode.fieldKey === column, + ) ?? { + type: 'field', + name: column, + fieldKey: column, + }, + ) as FieldNode[]; + + return { fieldNodes, nestedCollectionNodes, primaryKeyField }; +} diff --git a/api/src/database/run-ast/run-ast.ts b/api/src/database/run-ast/run-ast.ts new file mode 100644 index 0000000000..1f28f0afd3 --- /dev/null +++ b/api/src/database/run-ast/run-ast.ts @@ -0,0 +1,153 @@ +import { useEnv } from '@directus/env'; +import type { Filter, Item, Query, SchemaOverview } from '@directus/types'; +import { cloneDeep, merge } from 'lodash-es'; +import { PayloadService } from '../../services/payload.js'; +import type { AST, FieldNode, FunctionFieldNode, NestedCollectionNode, O2MNode } from '../../types/ast.js'; +import getDatabase from '../index.js'; +import { getDBQuery } from './lib/get-db-query.js'; +import { parseCurrentLevel } from './lib/parse-current-level.js'; +import type { RunASTOptions } from './types.js'; +import { applyParentFilters } from './utils/apply-parent-filters.js'; +import { mergeWithParentItems } from './utils/merge-with-parent-items.js'; +import { removeTemporaryFields } from './utils/remove-temporary-fields.js'; + +/** + * Execute a given AST using Knex. Returns array of items based on requested AST. + */ +export async function runAst( + originalAST: AST | NestedCollectionNode, + schema: SchemaOverview, + options?: RunASTOptions, +): Promise { + const ast = cloneDeep(originalAST); + + const knex = options?.knex || getDatabase(); + + if (ast.type === 'a2o') { + const results: { [collection: string]: null | Item | Item[] } = {}; + + for (const collection of ast.names) { + results[collection] = await run( + collection, + ast.children[collection]!, + ast.query[collection]!, + ast.cases[collection] ?? [], + ); + } + + return results; + } else { + return await run(ast.name, ast.children, options?.query || ast.query, ast.cases); + } + + async function run( + collection: string, + children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[], + query: Query, + cases: Filter[], + ) { + const env = useEnv(); + + // Retrieve the database columns to select in the current AST + const { fieldNodes, primaryKeyField, nestedCollectionNodes } = await parseCurrentLevel( + schema, + collection, + children, + query, + ); + + const o2mNodes = nestedCollectionNodes.filter((node): node is O2MNode => node.type === 'o2m'); + + // The actual knex query builder instance. This is a promise that resolves with the raw items from the db + const dbQuery = getDBQuery(schema, knex, collection, fieldNodes, o2mNodes, query, cases); + + const rawItems: Item | Item[] = await dbQuery; + + if (!rawItems) return null; + + // Run the items through the special transforms + const payloadService = new PayloadService(collection, { knex, schema }); + let items: null | Item | Item[] = await payloadService.processValues('read', rawItems, query.alias ?? {}); + + if (!items || (Array.isArray(items) && items.length === 0)) return items; + + // Apply the `_in` filters to the nested collection batches + const nestedNodes = applyParentFilters(schema, nestedCollectionNodes, items); + + for (const nestedNode of nestedNodes) { + let nestedItems: Item[] | null = []; + + if (nestedNode.type === 'o2m') { + let hasMore = true; + + let batchCount = 0; + + // If a nested node has a whenCase it indicates that the user might not be able to access the field for all items. + // In that case the queried item includes a flag under the fieldKey that is populated in the db and indicates + // if the user has access to that field for that specific item. + const hasWhenCase = nestedNode.whenCase && nestedNode.whenCase.length > 0; + let fieldAllowed: boolean | boolean[] = true; + + if (hasWhenCase) { + // Extract flag and remove field from item, so it can be populated with the actual items + if (Array.isArray(items)) { + fieldAllowed = []; + + for (const item of items) { + fieldAllowed.push(!!item[nestedNode.fieldKey]); + delete item[nestedNode.fieldKey]; + } + } else { + fieldAllowed = !!items[nestedNode.fieldKey]; + delete items[nestedNode.fieldKey]; + } + } + + while (hasMore) { + const node = merge({}, nestedNode, { + query: { + limit: env['RELATIONAL_BATCH_SIZE'], + offset: batchCount * (env['RELATIONAL_BATCH_SIZE'] as number), + page: null, + }, + }); + + nestedItems = (await runAst(node, schema, { knex, nested: true })) as Item[] | null; + + if (nestedItems) { + items = mergeWithParentItems(schema, nestedItems, items!, nestedNode, fieldAllowed)!; + } + + if (!nestedItems || nestedItems.length < (env['RELATIONAL_BATCH_SIZE'] as number)) { + hasMore = false; + } + + batchCount++; + } + } else { + const node = merge({}, nestedNode, { + query: { limit: -1 }, + }); + + nestedItems = (await runAst(node, schema, { knex, nested: true })) as Item[] | null; + + if (nestedItems) { + // Merge all fetched nested records with the parent items + items = mergeWithParentItems(schema, nestedItems, items!, nestedNode, true)!; + } + } + } + + // During the fetching of data, we have to inject a couple of required fields for the child nesting + // to work (primary / foreign keys) even if they're not explicitly requested. After all fetching + // and nesting is done, we parse through the output structure, and filter out all non-requested + // fields + // The field allowed flags injected in `getDBQuery` are already removed while processing the nested nodes in + // the previous step. + if (options?.nested !== true && options?.stripNonRequested !== false) { + items = removeTemporaryFields(schema, items, originalAST, primaryKeyField); + } + + return items; + } +} diff --git a/api/src/database/run-ast/types.ts b/api/src/database/run-ast/types.ts new file mode 100644 index 0000000000..8d1e86ead6 --- /dev/null +++ b/api/src/database/run-ast/types.ts @@ -0,0 +1,24 @@ +import type { Knex } from 'knex'; +import type { AST } from '../../types/ast.js'; + +export interface RunASTOptions { + /** + * Query override for the current level + */ + query?: AST['query']; + + /** + * Knex instance + */ + knex?: Knex; + + /** + * Whether or not the current execution is a nested dataset in another AST + */ + nested?: boolean; + + /** + * Whether or not to strip out non-requested required fields automatically (eg IDs / FKs) + */ + stripNonRequested?: boolean; +} diff --git a/api/src/database/run-ast/utils/apply-case-when.ts b/api/src/database/run-ast/utils/apply-case-when.ts new file mode 100644 index 0000000000..b2f605417c --- /dev/null +++ b/api/src/database/run-ast/utils/apply-case-when.ts @@ -0,0 +1,58 @@ +import type { Filter, SchemaOverview } from '@directus/types'; +import type { Knex } from 'knex'; +import { applyFilter } from '../../../utils/apply-query.js'; +import type { AliasMap } from '../../../utils/get-column-path.js'; + +export interface ApplyCaseWhenOptions { + column: Knex.Raw; + columnCases: Filter[]; + table: string; + cases: Filter[]; + aliasMap: AliasMap; + alias?: string; +} + +export interface ApplyCaseWhenContext { + knex: Knex; + schema: SchemaOverview; +} + +export function applyCaseWhen( + { columnCases, table, aliasMap, cases, column, alias }: ApplyCaseWhenOptions, + { knex, schema }: ApplyCaseWhenContext, +): Knex.Raw { + const caseQuery = knex.queryBuilder(); + + applyFilter(knex, schema, caseQuery, { _or: columnCases }, table, aliasMap, cases); + + const compiler = knex.client.queryCompiler(caseQuery); + + const sqlParts = []; + + // Only empty filters, so no where was generated, skip it + if (!compiler.grouped.where) return column; + + for (const statement of compiler.grouped.where) { + const val = compiler[statement.type](statement); + + if (val) { + if (sqlParts.length > 0) { + sqlParts.push(statement.bool); + } + + sqlParts.push(val); + } + } + + const sql = sqlParts.join(' '); + const bindings = [...caseQuery.toSQL().bindings, column]; + + let rawCase = `(CASE WHEN ${sql} THEN ?? END)`; + + if (alias) { + rawCase += ' AS ??'; + bindings.push(alias); + } + + return knex.raw(rawCase, bindings); +} diff --git a/api/src/database/run-ast/utils/apply-parent-filters.ts b/api/src/database/run-ast/utils/apply-parent-filters.ts new file mode 100644 index 0000000000..e214c45745 --- /dev/null +++ b/api/src/database/run-ast/utils/apply-parent-filters.ts @@ -0,0 +1,69 @@ +import type { Item, SchemaOverview } from '@directus/types'; +import { toArray } from '@directus/utils'; +import { isNil, merge, uniq } from 'lodash-es'; +import type { NestedCollectionNode } from '../../../types/ast.js'; + +export function applyParentFilters( + schema: SchemaOverview, + nestedCollectionNodes: NestedCollectionNode[], + parentItem: Item | Item[], +) { + const parentItems = toArray(parentItem); + + for (const nestedNode of nestedCollectionNodes) { + if (!nestedNode.relation) continue; + + if (nestedNode.type === 'm2o') { + const foreignField = schema.collections[nestedNode.relation.related_collection!]!.primary; + const foreignIds = uniq(parentItems.map((res) => res[nestedNode.relation.field])).filter((id) => !isNil(id)); + + merge(nestedNode, { query: { filter: { [foreignField]: { _in: foreignIds } } } }); + } else if (nestedNode.type === 'o2m') { + const relatedM2OisFetched = !!nestedNode.children.find((child) => { + return child.type === 'field' && child.name === nestedNode.relation.field; + }); + + if (relatedM2OisFetched === false) { + nestedNode.children.push({ + type: 'field', + name: nestedNode.relation.field, + fieldKey: nestedNode.relation.field, + whenCase: [], + }); + } + + if (nestedNode.relation.meta?.sort_field) { + nestedNode.children.push({ + type: 'field', + name: nestedNode.relation.meta.sort_field, + fieldKey: nestedNode.relation.meta.sort_field, + whenCase: [], + }); + } + + const foreignField = nestedNode.relation.field; + const foreignIds = uniq(parentItems.map((res) => res[nestedNode.parentKey])).filter((id) => !isNil(id)); + + merge(nestedNode, { query: { filter: { [foreignField]: { _in: foreignIds } } } }); + } else if (nestedNode.type === 'a2o') { + const keysPerCollection: { [collection: string]: (string | number)[] } = {}; + + for (const parentItem of parentItems) { + const collection = parentItem[nestedNode.relation.meta!.one_collection_field!]; + if (!keysPerCollection[collection]) keysPerCollection[collection] = []; + keysPerCollection[collection]!.push(parentItem[nestedNode.relation.field]); + } + + for (const relatedCollection of nestedNode.names) { + const foreignField = nestedNode.relatedKey[relatedCollection]!; + const foreignIds = uniq(keysPerCollection[relatedCollection]); + + merge(nestedNode, { + query: { [relatedCollection]: { filter: { [foreignField]: { _in: foreignIds } }, limit: foreignIds.length } }, + }); + } + } + } + + return nestedCollectionNodes; +} diff --git a/api/src/database/run-ast/utils/get-column-pre-processor.ts b/api/src/database/run-ast/utils/get-column-pre-processor.ts new file mode 100644 index 0000000000..47e87daeb7 --- /dev/null +++ b/api/src/database/run-ast/utils/get-column-pre-processor.ts @@ -0,0 +1,86 @@ +import type { Filter, SchemaOverview } from '@directus/types'; +import type { Knex } from 'knex'; +import type { FieldNode, FunctionFieldNode, M2ONode } from '../../../types/ast.js'; +import { joinFilterWithCases } from '../../../utils/apply-query.js'; +import type { AliasMap } from '../../../utils/get-column-path.js'; +import { getColumn } from '../../../utils/get-column.js'; +import { parseFilterKey } from '../../../utils/parse-filter-key.js'; +import { getHelpers } from '../../helpers/index.js'; +import { applyCaseWhen } from './apply-case-when.js'; +import { getNodeAlias } from './get-field-alias.js'; + +interface NodePreProcessOptions { + /** Don't assign an alias to the column but instead return the column as is */ + noAlias?: boolean; +} + +export function getColumnPreprocessor( + knex: Knex, + schema: SchemaOverview, + table: string, + cases: Filter[], + aliasMap: AliasMap, +) { + const helpers = getHelpers(knex); + + return function ( + fieldNode: FieldNode | FunctionFieldNode | M2ONode, + options?: NodePreProcessOptions, + ): Knex.Raw { + // Don't assign an alias to the column expression if the field has a whenCase + // (since the alias will be assigned in applyCaseWhen) or if the noAlias option is set + const hasWhenCase = fieldNode.whenCase && fieldNode.whenCase.length > 0; + const noAlias = options?.noAlias || hasWhenCase; + const alias = getNodeAlias(fieldNode); + + const rawColumnAlias = noAlias ? false : alias; + + let field; + + if (fieldNode.type === 'field' || fieldNode.type === 'functionField') { + const { fieldName } = parseFilterKey(fieldNode.name); + field = schema.collections[table]!.fields[fieldName]; + } else { + field = schema.collections[fieldNode.relation.collection]!.fields[fieldNode.relation.field]; + } + + let column; + + if (field?.type?.startsWith('geometry')) { + column = helpers.st.asText(table, field.field, rawColumnAlias); + } else if (fieldNode.type === 'functionField') { + // Include the field cases in the functionField query filter + column = getColumn(knex, table, fieldNode.name, rawColumnAlias, schema, { + query: { + ...fieldNode.query, + filter: joinFilterWithCases(fieldNode.query.filter, fieldNode.cases), + }, + cases: fieldNode.cases, + }); + } else { + column = getColumn(knex, table, fieldNode.name, rawColumnAlias, schema); + } + + if (hasWhenCase) { + const columnCases: Filter[] = []; + + for (const index of fieldNode.whenCase) { + columnCases.push(cases[index]!); + } + + column = applyCaseWhen( + { + column, + columnCases, + aliasMap, + cases, + table, + alias, + }, + { knex, schema }, + ); + } + + return column; + }; +} diff --git a/api/src/database/run-ast/utils/get-field-alias.ts b/api/src/database/run-ast/utils/get-field-alias.ts new file mode 100644 index 0000000000..0389ed5541 --- /dev/null +++ b/api/src/database/run-ast/utils/get-field-alias.ts @@ -0,0 +1,6 @@ +import type { FieldNode, FunctionFieldNode, M2ONode, O2MNode } from '../../../types/index.js'; +import { applyFunctionToColumnName } from '../../../utils/apply-function-to-column-name.js'; + +export function getNodeAlias(node: FieldNode | FunctionFieldNode | M2ONode | O2MNode) { + return applyFunctionToColumnName(node.fieldKey); +} diff --git a/api/src/database/run-ast/utils/get-inner-query-column-pre-processor.ts b/api/src/database/run-ast/utils/get-inner-query-column-pre-processor.ts new file mode 100644 index 0000000000..ea60d060aa --- /dev/null +++ b/api/src/database/run-ast/utils/get-inner-query-column-pre-processor.ts @@ -0,0 +1,43 @@ +import type { Filter, SchemaOverview } from '@directus/types'; +import type { Knex } from 'knex'; +import type { FieldNode, FunctionFieldNode, M2ONode, O2MNode } from '../../../types/index.js'; +import type { AliasMap } from '../../../utils/get-column-path.js'; +import { applyCaseWhen } from './apply-case-when.js'; +import { getNodeAlias } from './get-field-alias.js'; + +export function getInnerQueryColumnPreProcessor( + knex: Knex, + schema: SchemaOverview, + table: string, + cases: Filter[], + aliasMap: AliasMap, + aliasPrefix: string, +) { + return function (fieldNode: FieldNode | FunctionFieldNode | M2ONode | O2MNode): Knex.Raw | null { + const alias = getNodeAlias(fieldNode); + + if (fieldNode.whenCase && fieldNode.whenCase.length > 0) { + const columnCases: Filter[] = []; + + for (const index of fieldNode.whenCase) { + columnCases.push(cases[index]!); + } + + // Don't pass in the alias as we need to wrap the whole case/when in a count() an alias that + const caseWhen = applyCaseWhen( + { + column: knex.raw(1), + columnCases, + aliasMap, + cases, + table, + }, + { knex, schema }, + ); + + return knex.raw('COUNT(??) AS ??', [caseWhen, `${aliasPrefix}_${alias}`]); + } + + return null; + }; +} diff --git a/api/src/database/run-ast/utils/merge-with-parent-items.ts b/api/src/database/run-ast/utils/merge-with-parent-items.ts new file mode 100644 index 0000000000..7bcc87a674 --- /dev/null +++ b/api/src/database/run-ast/utils/merge-with-parent-items.ts @@ -0,0 +1,112 @@ +import { useEnv } from '@directus/env'; +import type { Item, SchemaOverview } from '@directus/types'; +import { toArray } from '@directus/utils'; +import { clone, isArray } from 'lodash-es'; +import type { NestedCollectionNode } from '../../../types/ast.js'; + +export function mergeWithParentItems( + schema: SchemaOverview, + nestedItem: Item | Item[], + parentItem: Item | Item[], + nestedNode: NestedCollectionNode, + fieldAllowed: boolean | boolean[], +) { + const env = useEnv(); + const nestedItems = toArray(nestedItem); + const parentItems = clone(toArray(parentItem)); + + if (nestedNode.type === 'm2o') { + for (const parentItem of parentItems) { + const itemChild = nestedItems.find((nestedItem) => { + return ( + nestedItem[schema.collections[nestedNode.relation.related_collection!]!.primary] == + parentItem[nestedNode.relation.field] + ); + }); + + parentItem[nestedNode.fieldKey] = itemChild || null; + } + } else if (nestedNode.type === 'o2m') { + for (const [index, parentItem] of parentItems.entries()) { + if (fieldAllowed === false || (isArray(fieldAllowed) && !fieldAllowed[index])) { + parentItem[nestedNode.fieldKey] = null; + continue; + } + + if (!parentItem[nestedNode.fieldKey]) parentItem[nestedNode.fieldKey] = [] as Item[]; + + const itemChildren = nestedItems.filter((nestedItem) => { + if (nestedItem === null) return false; + if (Array.isArray(nestedItem[nestedNode.relation.field])) return true; + + return ( + nestedItem[nestedNode.relation.field] == + parentItem[schema.collections[nestedNode.relation.related_collection!]!.primary] || + nestedItem[nestedNode.relation.field]?.[ + schema.collections[nestedNode.relation.related_collection!]!.primary + ] == parentItem[schema.collections[nestedNode.relation.related_collection!]!.primary] + ); + }); + + parentItem[nestedNode.fieldKey].push(...itemChildren); + + const limit = nestedNode.query.limit ?? Number(env['QUERY_LIMIT_DEFAULT']); + + if (nestedNode.query.page && nestedNode.query.page > 1) { + parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].slice(limit * (nestedNode.query.page - 1)); + } + + if (nestedNode.query.offset && nestedNode.query.offset >= 0) { + parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].slice(nestedNode.query.offset); + } + + if (limit !== -1) { + parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].slice(0, limit); + } + + parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].sort((a: Item, b: Item) => { + // This is pre-filled in get-ast-from-query + const sortField = nestedNode.query.sort![0]!; + let column = sortField; + let order: 'asc' | 'desc' = 'asc'; + + if (sortField.startsWith('-')) { + column = sortField.substring(1); + order = 'desc'; + } + + if (a[column] === b[column]) return 0; + if (a[column] === null) return 1; + if (b[column] === null) return -1; + + if (order === 'asc') { + return a[column] < b[column] ? -1 : 1; + } else { + return a[column] < b[column] ? 1 : -1; + } + }); + } + } else if (nestedNode.type === 'a2o') { + for (const parentItem of parentItems) { + if (!nestedNode.relation.meta?.one_collection_field) { + parentItem[nestedNode.fieldKey] = null; + continue; + } + + const relatedCollection = parentItem[nestedNode.relation.meta.one_collection_field]; + + if (!(nestedItem as Record)[relatedCollection]) { + parentItem[nestedNode.fieldKey] = null; + continue; + } + + const itemChild = (nestedItem as Record)[relatedCollection]!.find((nestedItem) => { + return nestedItem[nestedNode.relatedKey[relatedCollection]!] == parentItem[nestedNode.fieldKey]; + }); + + parentItem[nestedNode.fieldKey] = itemChild || null; + } + } + + return Array.isArray(parentItem) ? parentItems : parentItems[0]; +} diff --git a/api/src/database/run-ast/utils/remove-temporary-fields.ts b/api/src/database/run-ast/utils/remove-temporary-fields.ts new file mode 100644 index 0000000000..75f5d2efa6 --- /dev/null +++ b/api/src/database/run-ast/utils/remove-temporary-fields.ts @@ -0,0 +1,108 @@ +import type { Item, SchemaOverview } from '@directus/types'; +import { toArray } from '@directus/utils'; +import { cloneDeep, pick } from 'lodash-es'; +import type { AST, NestedCollectionNode } from '../../../types/ast.js'; +import { applyFunctionToColumnName } from '../../../utils/apply-function-to-column-name.js'; + +export function removeTemporaryFields( + schema: SchemaOverview, + rawItem: Item | Item[], + ast: AST | NestedCollectionNode, + primaryKeyField: string, + parentItem?: Item, +): null | Item | Item[] { + const rawItems = cloneDeep(toArray(rawItem)); + const items: Item[] = []; + + if (ast.type === 'a2o') { + const fields: Record = {}; + const nestedCollectionNodes: Record = {}; + + for (const relatedCollection of ast.names) { + if (!fields[relatedCollection]) fields[relatedCollection] = []; + if (!nestedCollectionNodes[relatedCollection]) nestedCollectionNodes[relatedCollection] = []; + + for (const child of ast.children[relatedCollection]!) { + if (child.type === 'field' || child.type === 'functionField') { + fields[relatedCollection]!.push(child.name); + } else { + fields[relatedCollection]!.push(child.fieldKey); + nestedCollectionNodes[relatedCollection]!.push(child); + } + } + } + + for (const rawItem of rawItems) { + const relatedCollection: string = parentItem?.[ast.relation.meta!.one_collection_field!]; + + if (rawItem === null || rawItem === undefined) return rawItem; + + let item = rawItem; + + for (const nestedNode of nestedCollectionNodes[relatedCollection]!) { + item[nestedNode.fieldKey] = removeTemporaryFields( + schema, + item[nestedNode.fieldKey], + nestedNode, + schema.collections[nestedNode.relation.collection]!.primary, + item, + ); + } + + const fieldsWithFunctionsApplied = fields[relatedCollection]!.map((field) => applyFunctionToColumnName(field)); + + item = + fields[relatedCollection]!.length > 0 ? pick(rawItem, fieldsWithFunctionsApplied) : rawItem[primaryKeyField]; + + items.push(item); + } + } else { + const fields: string[] = []; + const nestedCollectionNodes: NestedCollectionNode[] = []; + + for (const child of ast.children) { + fields.push(child.fieldKey); + + if (child.type !== 'field' && child.type !== 'functionField') { + nestedCollectionNodes.push(child); + } + } + + // Make sure any requested aggregate fields are included + if (ast.query?.aggregate) { + for (const [operation, aggregateFields] of Object.entries(ast.query.aggregate)) { + if (!fields) continue; + + if (operation === 'count' && aggregateFields.includes('*')) fields.push('count'); + + fields.push(...aggregateFields.map((field) => `${operation}.${field}`)); + } + } + + for (const rawItem of rawItems) { + if (rawItem === null || rawItem === undefined) return rawItem; + + let item = rawItem; + + for (const nestedNode of nestedCollectionNodes) { + item[nestedNode.fieldKey] = removeTemporaryFields( + schema, + item[nestedNode.fieldKey], + nestedNode, + nestedNode.type === 'm2o' + ? schema.collections[nestedNode.relation.related_collection!]!.primary + : schema.collections[nestedNode.relation.collection]!.primary, + item, + ); + } + + const fieldsWithFunctionsApplied = fields.map((field) => applyFunctionToColumnName(field)); + + item = fields.length > 0 ? pick(rawItem, fieldsWithFunctionsApplied) : rawItem[primaryKeyField]; + + items.push(item); + } + } + + return Array.isArray(rawItem) ? items : items[0]!; +} diff --git a/api/src/database/run-ast/utils/with-preprocess-bindings.ts b/api/src/database/run-ast/utils/with-preprocess-bindings.ts new file mode 100644 index 0000000000..73475f9740 --- /dev/null +++ b/api/src/database/run-ast/utils/with-preprocess-bindings.ts @@ -0,0 +1,21 @@ +import type { Knex } from 'knex'; +import { getHelpers } from '../../helpers/index.js'; + +export function withPreprocessBindings(knex: Knex, dbQuery: Knex.QueryBuilder) { + const schemaHelper = getHelpers(knex).schema; + + dbQuery.client = new Proxy(dbQuery.client, { + get(target, prop, receiver) { + if (prop === 'query') { + return (connection: any, queryParam: any) => { + return Reflect.get(target, prop, receiver).bind(target)( + connection, + schemaHelper.preprocessBindings(queryParam), + ); + }; + } + + return Reflect.get(target, prop, receiver); + }, + }); +} diff --git a/api/src/flows.ts b/api/src/flows.ts index a836b82984..2c69858a4c 100644 --- a/api/src/flows.ts +++ b/api/src/flows.ts @@ -2,10 +2,11 @@ import { Action } from '@directus/constants'; import { useEnv } from '@directus/env'; import { ForbiddenError } from '@directus/errors'; import type { OperationHandler } from '@directus/extensions'; +import { isSystemCollection } from '@directus/system-data'; import type { Accountability, ActionHandler, FilterHandler, Flow, Operation, SchemaOverview } from '@directus/types'; import { applyOptionsData, getRedactedString, isValidJSON, parseJSON, toArray } from '@directus/utils'; import type { Knex } from 'knex'; -import { omit, pick } from 'lodash-es'; +import { pick } from 'lodash-es'; import { get } from 'micromustache'; import { useBus } from './bus/index.js'; import getDatabase from './database/index.js'; @@ -22,7 +23,6 @@ import { JobQueue } from './utils/job-queue.js'; import { mapValuesDeep } from './utils/map-values-deep.js'; import { redactObject } from './utils/redact-object.js'; import { scheduleSynchronizedJob, validateCron } from './utils/schedule.js'; -import { isSystemCollection } from '@directus/system-data'; let flowManager: FlowManager | undefined; @@ -371,7 +371,7 @@ class FlowManager { data: { steps: steps.map((step) => redactObject(step, { values: this.envs }, getRedactedString)), data: redactObject( - omit(keyedData, '$accountability.permissions'), // Permissions is a ton of data, and is just a copy of what's in the directus_permissions table + keyedData, { keys: [ ['**', 'headers', 'authorization'], diff --git a/api/src/middleware/authenticate.test.ts b/api/src/middleware/authenticate.test.ts index 67718ec80b..417ad918d7 100644 --- a/api/src/middleware/authenticate.test.ts +++ b/api/src/middleware/authenticate.test.ts @@ -5,9 +5,23 @@ import type { Knex } from 'knex'; import { afterEach, expect, test, vi } from 'vitest'; import getDatabase from '../database/index.js'; import emitter from '../emitter.js'; +import { createDefaultAccountability } from '../permissions/utils/create-default-accountability.js'; +import { fetchRolesTree } from '../permissions/lib/fetch-roles-tree.js'; +import { fetchGlobalAccess } from '../permissions/modules/fetch-global-access/fetch-global-access.js'; import '../types/express.d.ts'; import { handler } from './authenticate.js'; +const reqGetImplementation = (string: any) => { + switch (string) { + case 'user-agent': + return 'fake-user-agent'; + case 'origin': + return 'fake-origin'; + default: + return null; + } +}; + vi.mock('../database/index'); // This is required because logger uses global env which is imported before the tests run. Can be @@ -27,6 +41,9 @@ vi.mock('@directus/env', () => ({ }), })); +vi.mock('../permissions/lib/fetch-roles-tree.js'); +vi.mock('../permissions/modules/fetch-global-access/fetch-global-access.js'); + afterEach(() => { vi.clearAllMocks(); }); @@ -35,7 +52,7 @@ test('Short-circuits when authenticate filter is used', async () => { const req = { ip: '127.0.0.1', cookies: {}, - get: vi.fn(), + get: vi.fn(reqGetImplementation), } as unknown as Request; const res = {} as Response; @@ -55,16 +72,7 @@ test('Uses default public accountability when no token is given', async () => { const req = { ip: '127.0.0.1', cookies: {}, - get: vi.fn((string) => { - switch (string) { - case 'user-agent': - return 'fake-user-agent'; - case 'origin': - return 'fake-origin'; - default: - return null; - } - }), + get: vi.fn(reqGetImplementation), } as unknown as Request; const res = {} as Response; @@ -74,15 +82,13 @@ test('Uses default public accountability when no token is given', async () => { await handler(req, res, next); - expect(req.accountability).toEqual({ - user: null, - role: null, - admin: false, - app: false, - ip: '127.0.0.1', - userAgent: 'fake-user-agent', - origin: 'fake-origin', - }); + expect(req.accountability).toEqual( + createDefaultAccountability({ + ip: '127.0.0.1', + userAgent: 'fake-user-agent', + origin: 'fake-origin', + }), + ); expect(next).toHaveBeenCalledTimes(1); }); @@ -116,27 +122,22 @@ test('Sets accountability to payload contents if valid token is passed', async ( const req = { ip: '127.0.0.1', cookies: {}, - get: vi.fn((string) => { - switch (string) { - case 'user-agent': - return 'fake-user-agent'; - case 'origin': - return 'fake-origin'; - default: - return null; - } - }), + get: vi.fn(reqGetImplementation), token, } as unknown as Request; const res = {} as Response; const next = vi.fn(); + vi.mocked(fetchRolesTree).mockResolvedValue([roleID]); + vi.mocked(fetchGlobalAccess).mockResolvedValue({ app: appAccess, admin: adminAccess }); + await handler(req, res, next); expect(req.accountability).toEqual({ user: userID, role: roleID, + roles: [roleID], app: appAccess, admin: adminAccess, share, @@ -169,6 +170,7 @@ test('Sets accountability to payload contents if valid token is passed', async ( expect(req.accountability).toEqual({ user: userID, role: roleID, + roles: [roleID], app: appAccess, admin: adminAccess, share, @@ -193,16 +195,7 @@ test('Throws InvalidCredentialsError when static token is used, but user does no const req = { ip: '127.0.0.1', cookies: {}, - get: vi.fn((string) => { - switch (string) { - case 'user-agent': - return 'fake-user-agent'; - case 'origin': - return 'fake-origin'; - default: - return null; - } - }), + get: vi.fn(reqGetImplementation), token: 'static-token', } as unknown as Request; @@ -217,16 +210,7 @@ test('Sets accountability to user information when static token is used', async const req = { ip: '127.0.0.1', cookies: {}, - get: vi.fn((string) => { - switch (string) { - case 'user-agent': - return 'fake-user-agent'; - case 'origin': - return 'fake-origin'; - default: - return null; - } - }), + get: vi.fn(reqGetImplementation), token: 'static-token', } as unknown as Request; @@ -238,6 +222,7 @@ test('Sets accountability to user information when static token is used', async const expectedAccountability = { user: testUser.id, role: testUser.role, + roles: [testUser.role], app: testUser.app_access, admin: testUser.admin_access, ip: '127.0.0.1', @@ -253,6 +238,9 @@ test('Sets accountability to user information when static token is used', async first: vi.fn().mockResolvedValue(testUser), } as unknown as Knex); + vi.mocked(fetchRolesTree).mockResolvedValue([testUser.role]); + vi.mocked(fetchGlobalAccess).mockResolvedValue({ app: testUser.app_access, admin: testUser.admin_access }); + await handler(req, res, next); expect(req.accountability).toEqual(expectedAccountability); @@ -272,6 +260,9 @@ test('Sets accountability to user information when static token is used', async testUser.app_access = '1' as never; expectedAccountability.admin = false; expectedAccountability.app = true; + + vi.mocked(fetchGlobalAccess).mockResolvedValue({ app: true, admin: false }); + await handler(req, res, next); expect(req.accountability).toEqual(expectedAccountability); expect(next).toHaveBeenCalledTimes(1); @@ -283,16 +274,7 @@ test('Invalid session token responds with error and clears the cookie', async () cookies: { directus_session: 'session-token', }, - get: vi.fn((string) => { - switch (string) { - case 'user-agent': - return 'fake-user-agent'; - case 'origin': - return 'fake-origin'; - default: - return null; - } - }), + get: vi.fn(reqGetImplementation), token: 'session-token', } as unknown as Request; @@ -321,16 +303,7 @@ test('Invalid query token responds with error but does not clear the session coo cookies: { directus_session: 'session-token', }, - get: vi.fn((string) => { - switch (string) { - case 'user-agent': - return 'fake-user-agent'; - case 'origin': - return 'fake-origin'; - default: - return null; - } - }), + get: vi.fn(reqGetImplementation), token: 'static-token', } as unknown as Request; diff --git a/api/src/middleware/authenticate.ts b/api/src/middleware/authenticate.ts index bc2710c5af..af44b53cfa 100644 --- a/api/src/middleware/authenticate.ts +++ b/api/src/middleware/authenticate.ts @@ -3,6 +3,7 @@ import type { NextFunction, Request, Response } from 'express'; import { isEqual } from 'lodash-es'; import getDatabase from '../database/index.js'; import emitter from '../emitter.js'; +import { createDefaultAccountability } from '../permissions/utils/create-default-accountability.js'; import asyncHandler from '../utils/async-handler.js'; import { getAccountabilityForToken } from '../utils/get-accountability-for-token.js'; import { getIPFromReq } from '../utils/get-ip-from-req.js'; @@ -16,13 +17,7 @@ import { SESSION_COOKIE_OPTIONS } from '../constants.js'; export const handler = async (req: Request, res: Response, next: NextFunction) => { const env = useEnv(); - const defaultAccountability: Accountability = { - user: null, - role: null, - admin: false, - app: false, - ip: getIPFromReq(req), - }; + const defaultAccountability: Accountability = createDefaultAccountability({ ip: getIPFromReq(req) }); const userAgent = req.get('user-agent')?.substring(0, 1024); if (userAgent) defaultAccountability.userAgent = userAgent; diff --git a/api/src/middleware/cache.ts b/api/src/middleware/cache.ts index bc77c14815..892418e5f5 100644 --- a/api/src/middleware/cache.ts +++ b/api/src/middleware/cache.ts @@ -21,7 +21,7 @@ const checkCacheMiddleware: RequestHandler = asyncHandler(async (req, res, next) return next(); } - const key = getCacheKey(req); + const key = await getCacheKey(req); let cachedData; diff --git a/api/src/middleware/check-ip.ts b/api/src/middleware/check-ip.ts deleted file mode 100644 index dbb9abe7da..0000000000 --- a/api/src/middleware/check-ip.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { InvalidIpError } from '@directus/errors'; -import type { RequestHandler } from 'express'; -import getDatabase from '../database/index.js'; -import { useLogger } from '../logger/index.js'; -import asyncHandler from '../utils/async-handler.js'; -import { ipInNetworks } from '../utils/ip-in-networks.js'; - -export const checkIP: RequestHandler = asyncHandler(async (req, _res, next) => { - const database = getDatabase(); - const logger = useLogger(); - - const { role: roleId, ip } = req.accountability!; - - const query = database.select('ip_access').from('directus_roles'); - - if (roleId) { - query.where({ id: roleId }); - } else { - query.whereNull('id'); - } - - const role: { ip_access: string | null } | undefined = await query.first(); - - if (!role?.ip_access) return next(); - - const ipAllowList = role.ip_access.split(',').filter((ip) => ip); - - if (ipAllowList.length > 0) { - if (!ip) throw new InvalidIpError(); - - let allowed; - - try { - allowed = ipInNetworks(ip, ipAllowList); - } catch (error) { - logger.warn(`Invalid IP access configuration for role "${roleId}"`); - logger.warn(error); - - throw new InvalidIpError(); - } - - if (!allowed) throw new InvalidIpError(); - } - - return next(); -}); diff --git a/api/src/middleware/get-permissions.ts b/api/src/middleware/get-permissions.ts deleted file mode 100644 index 97267b6988..0000000000 --- a/api/src/middleware/get-permissions.ts +++ /dev/null @@ -1,15 +0,0 @@ -import type { RequestHandler } from 'express'; -import asyncHandler from '../utils/async-handler.js'; -import { getPermissions as getPermissionsUtil } from '../utils/get-permissions.js'; - -const getPermissions: RequestHandler = asyncHandler(async (req, _res, next) => { - if (!req.accountability) { - throw new Error('getPermissions middleware needs to be called after authenticate'); - } - - req.accountability.permissions = await getPermissionsUtil(req.accountability, req.schema); - - return next(); -}); - -export default getPermissions; diff --git a/api/src/middleware/respond.ts b/api/src/middleware/respond.ts index c1f631b12c..1154c85afb 100644 --- a/api/src/middleware/respond.ts +++ b/api/src/middleware/respond.ts @@ -33,7 +33,7 @@ export const respond: RequestHandler = asyncHandler(async (req, res) => { res.locals['cache'] !== false && exceedsMaxSize === false ) { - const key = getCacheKey(req); + const key = await getCacheKey(req); try { await setCacheValue(cache, key, res.locals['payload'], getMilliseconds(env['CACHE_TTL'])); diff --git a/api/src/permissions/cache.ts b/api/src/permissions/cache.ts new file mode 100644 index 0000000000..46e1e8ef11 --- /dev/null +++ b/api/src/permissions/cache.ts @@ -0,0 +1,27 @@ +import { defineCache, type CacheConfig } from '@directus/memory'; +import { redisConfigAvailable, useRedis } from '../redis/index.js'; + +const localOnly = redisConfigAvailable() === false; + +const config: CacheConfig = localOnly + ? { + type: 'local', + maxKeys: 500, + } + : { + type: 'multi', + redis: { + namespace: 'permissions', + redis: useRedis(), + }, + local: { + maxKeys: 100, + }, + }; + +export const useCache = defineCache(config); + +export function clearCache() { + const cache = useCache(); + return cache.clear(); +} diff --git a/api/src/permissions/lib/fetch-permissions.test.ts b/api/src/permissions/lib/fetch-permissions.test.ts new file mode 100644 index 0000000000..07a5213ade --- /dev/null +++ b/api/src/permissions/lib/fetch-permissions.test.ts @@ -0,0 +1,125 @@ +import type { Accountability, Permission } from '@directus/types'; +import { beforeEach, expect, test, vi } from 'vitest'; +import { PermissionsService } from '../../services/permissions.js'; +import type { Context } from '../types.js'; +import { fetchDynamicVariableContext } from '../utils/fetch-dynamic-variable-context.js'; +import { processPermissions } from '../utils/process-permissions.js'; +import { _fetchPermissions as fetchPermissions } from './fetch-permissions.js'; +import { withAppMinimalPermissions } from './with-app-minimal-permissions.js'; + +vi.mock('../../services/permissions.js', () => ({ + PermissionsService: vi.fn(), +})); + +vi.mock('./with-app-minimal-permissions.js'); +vi.mock('../utils/fetch-dynamic-variable-context.js'); +vi.mock('../utils/process-permissions.js'); + +beforeEach(() => { + PermissionsService.prototype.readByQuery = vi.fn(); + + vi.mocked(fetchDynamicVariableContext).mockResolvedValue({}); + + vi.mocked(withAppMinimalPermissions).mockImplementation((_, permissions) => permissions); + vi.mocked(processPermissions).mockImplementation(({ permissions }) => permissions); +}); + +test('Returns permissions read through service sorted by the order of policies', async () => { + const permissions: Permission[] = [ + { policy: 'policy-2' }, + { policy: 'policy-1' }, + { policy: 'policy-1' }, + ] as Permission[]; + + const policies = ['policy-1', 'policy-2'] as string[]; + const collections = [] as string[]; + + vi.mocked(PermissionsService.prototype.readByQuery).mockResolvedValue(permissions); + + const res = await fetchPermissions({ action: 'read', policies, collections }, {} as Context); + + expect(res).toStrictEqual([{ policy: 'policy-1' }, { policy: 'policy-1' }, { policy: 'policy-2' }]); + + expect(PermissionsService.prototype.readByQuery).toHaveBeenCalledWith({ + filter: { + _and: [{ policy: { _in: policies } }, { action: { _eq: 'read' } }, { collection: { _in: collections } }], + }, + limit: -1, + }); +}); + +test('Returns all action permissions if action is undefined', async () => { + const permissions: Permission[] = [{ policy: 'policy-1' }] as Permission[]; + const policies = [] as string[]; + const collections = [] as string[]; + + vi.mocked(PermissionsService.prototype.readByQuery).mockResolvedValue(permissions); + + const res = await fetchPermissions({ policies, collections }, {} as Context); + + expect(res).toStrictEqual(permissions); + + expect(PermissionsService.prototype.readByQuery).toHaveBeenCalledWith({ + filter: { + _and: [{ policy: { _in: policies } }, { collection: { _in: collections } }], + }, + limit: -1, + }); +}); + +test('Fetches for all collections when collections filter is undefined', async () => { + const permissions: Permission[] = [{ policy: 'policy-1' }] as Permission[]; + const policies = [] as string[]; + + vi.mocked(PermissionsService.prototype.readByQuery).mockResolvedValue(permissions); + + const res = await fetchPermissions({ action: 'read', policies }, {} as Context); + + expect(res).toStrictEqual(permissions); + + expect(PermissionsService.prototype.readByQuery).toHaveBeenCalledWith({ + filter: { + _and: [{ policy: { _in: policies } }, { action: { _eq: 'read' } }], + }, + limit: -1, + }); +}); + +test('Adds minimal permissions if accountability is passed', async () => { + const permissions: Permission[] = [{ policy: 'policy-1' }] as Permission[]; + const accountability = {} as unknown as Accountability; + vi.mocked(PermissionsService.prototype.readByQuery).mockResolvedValue(permissions); + + const res = await fetchPermissions({ accountability, policies: [], action: 'read' }, {} as Context); + + expect(res).toStrictEqual(permissions); + + expect(withAppMinimalPermissions).toHaveBeenCalledWith(accountability, permissions, { + _and: [{ action: { _eq: 'read' } }], + }); +}); + +test('Injects dynamic variables by calling process permissions', async () => { + const permissions: Permission[] = [{ policy: 'policy-1' }] as Permission[]; + const accountability = {} as unknown as Accountability; + vi.mocked(PermissionsService.prototype.readByQuery).mockResolvedValue(permissions); + + const res = await fetchPermissions({ accountability, policies: ['policy-1'], action: 'read' }, {} as Context); + + expect(res).toStrictEqual(permissions); + + expect(fetchDynamicVariableContext).toHaveBeenCalledWith( + { + accountability, + policies: ['policy-1'], + permissions, + }, + {}, + ); + + expect(processPermissions).toHaveBeenCalledWith({ + permissions, + accountability, + permissionsContext: {}, + }); +}); diff --git a/api/src/permissions/lib/fetch-permissions.ts b/api/src/permissions/lib/fetch-permissions.ts new file mode 100644 index 0000000000..464f30af8f --- /dev/null +++ b/api/src/permissions/lib/fetch-permissions.ts @@ -0,0 +1,86 @@ +import type { Accountability, Filter, Permission, PermissionsAction } from '@directus/types'; +import { pick, sortBy } from 'lodash-es'; +import type { Context } from '../types.js'; +import { fetchDynamicVariableContext } from '../utils/fetch-dynamic-variable-context.js'; +import { processPermissions } from '../utils/process-permissions.js'; +import { withCache } from '../utils/with-cache.js'; +import { withAppMinimalPermissions } from './with-app-minimal-permissions.js'; + +export const fetchPermissions = withCache( + 'permissions', + _fetchPermissions, + ({ action, policies, collections, accountability, bypassDynamicVariableProcessing }) => ({ + policies, // we assume that policies always come from the same source, so they should be in the same order + ...(action && { action }), + ...(collections && { collections: sortBy(collections) }), + ...(accountability && { accountability: pick(accountability, ['user', 'role', 'roles', 'app']) }), + ...(bypassDynamicVariableProcessing && { bypassDynamicVariableProcessing }), + }), +); + +export interface FetchPermissionsOptions { + action?: PermissionsAction; + policies: string[]; + collections?: string[]; + accountability?: Pick; + bypassDynamicVariableProcessing?: boolean; +} + +export async function _fetchPermissions(options: FetchPermissionsOptions, context: Context) { + const { PermissionsService } = await import('../../services/permissions.js'); + const permissionsService = new PermissionsService(context); + + const filter: Filter = { + _and: [{ policy: { _in: options.policies } }], + }; + + if (options.action) { + filter._and.push({ action: { _eq: options.action } }); + } + + if (options.collections) { + filter._and.push({ collection: { _in: options.collections } }); + } + + let permissions = (await permissionsService.readByQuery({ + filter, + limit: -1, + })) as Permission[]; + + // Sort permissions by their order in the policies array + // This ensures that if a sorted array of policies is passed in the permissions are returned in the same order + // which is necessary for correctly applying the presets in order + permissions = sortBy(permissions, (permission) => options.policies.indexOf(permission.policy!)); + + if (options.accountability && !options.bypassDynamicVariableProcessing) { + // Add app minimal permissions for the request accountability, if applicable. + // Normally this is done in the permissions service readByQuery, but it also needs to do it here + // since the permissions service is created without accountability. + // We call it without the policies filter, since the static minimal app permissions don't have a policy attached. + const permissionsWithAppPermissions = withAppMinimalPermissions(options.accountability ?? null, permissions, { + _and: filter._and.slice(1), + }); + + const permissionsContext = await fetchDynamicVariableContext( + { + accountability: options.accountability, + policies: options.policies, + permissions: permissionsWithAppPermissions, + }, + context, + ); + + // Replace dynamic variables with their actual values + const processedPermissions = processPermissions({ + permissions: permissionsWithAppPermissions, + accountability: options.accountability, + permissionsContext, + }); + + // TODO merge in permissions coming from the share scope + + return processedPermissions; + } + + return permissions; +} diff --git a/api/src/permissions/lib/fetch-policies.test.ts b/api/src/permissions/lib/fetch-policies.test.ts new file mode 100644 index 0000000000..e30d4439f9 --- /dev/null +++ b/api/src/permissions/lib/fetch-policies.test.ts @@ -0,0 +1,185 @@ +import type { Accountability } from '@directus/types'; +import { beforeEach, expect, test, vi } from 'vitest'; +import { AccessService } from '../../services/access.js'; +import type { Context } from '../types.js'; +import { _fetchPolicies as fetchPolicies, type AccessRow } from './fetch-policies.js'; + +vi.mock('../../services/access.js', () => ({ + AccessService: vi.fn(), +})); + +let rows: AccessRow[]; + +beforeEach(() => { + rows = []; + + AccessService.prototype.readByQuery = vi.fn().mockResolvedValue(rows); +}); + +test('Fetches policies for public role and user when user is given without role', async () => { + const acc = { roles: [], user: 'user-a' } as unknown as Accountability; + + const policies = await fetchPolicies(acc, {} as Context); + + expect(AccessService.prototype.readByQuery).toHaveBeenCalledWith({ + filter: { + _or: [ + { user: { _eq: 'user-a' } }, + { + _and: [ + { + role: { + _null: true, + }, + }, + { + user: { + _null: true, + }, + }, + ], + }, + ], + }, + fields: ['policy.id', 'policy.ip_access', 'role'], + limit: -1, + }); + + expect(policies).toEqual([]); +}); + +test('Fetches policies for public role when no roles and user are given', async () => { + const acc = { roles: [], user: null } as unknown as Accountability; + + const policies = await fetchPolicies(acc, {} as Context); + + expect(AccessService.prototype.readByQuery).toHaveBeenCalledWith({ + filter: { + _and: [ + { + role: { + _null: true, + }, + }, + { + user: { + _null: true, + }, + }, + ], + }, + fields: ['policy.id', 'policy.ip_access', 'role'], + limit: -1, + }); + + expect(policies).toEqual([]); +}); + +test('Fetched policies for user roles', async () => { + const acc = { roles: ['role-a', 'role-b'], user: null } as unknown as Accountability; + + const policies = await fetchPolicies(acc, {} as Context); + + expect(AccessService.prototype.readByQuery).toHaveBeenCalledWith({ + filter: { + role: { + _in: ['role-a', 'role-b'], + }, + }, + fields: ['policy.id', 'policy.ip_access', 'role'], + limit: -1, + }); + + expect(policies).toEqual([]); +}); + +test('Fetches policies for user roles and user if user is passed', async () => { + const acc = { roles: ['role-a', 'role-b'], user: 'user-a' } as unknown as Accountability; + + const policies = await fetchPolicies(acc, {} as Context); + + expect(AccessService.prototype.readByQuery).toHaveBeenCalledWith({ + filter: { + _or: [ + { + user: { + _eq: 'user-a', + }, + }, + { + role: { + _in: ['role-a', 'role-b'], + }, + }, + ], + }, + fields: ['policy.id', 'policy.ip_access', 'role'], + limit: -1, + }); + + expect(policies).toEqual([]); +}); + +test('Filters policies based on ip access on access row', async () => { + const acc = { roles: ['role-a', 'role-b'], user: 'user-a', ip: '127.0.0.5' } as unknown as Accountability; + + rows.push( + { + policy: { + id: 'policy-a', + ip_access: ['127.0.0.0/29'], + }, + role: null, + }, + { + policy: { + id: 'policy-b', + ip_access: ['1.1.1.1/32'], + }, + role: null, + }, + ); + + const policies = await fetchPolicies(acc, {} as Context); + + expect(policies).toEqual(['policy-a']); +}); + +test('Sorts policies by priority', async () => { + const acc = { roles: ['role-a', 'role-b'], user: 'user-a' } as unknown as Accountability; + + rows.push( + { + policy: { + id: 'policy-c', + ip_access: null, + }, + role: null, + }, + { + policy: { + id: 'policy-d', + ip_access: null, + }, + role: null, + }, + { + policy: { + id: 'policy-b', + ip_access: null, + }, + role: 'role-b', + }, + { + policy: { + id: 'policy-a', + ip_access: null, + }, + role: 'role-a', + }, + ); + + const policies = await fetchPolicies(acc, {} as Context); + + expect(policies).toEqual(['policy-a', 'policy-b', 'policy-c', 'policy-d']); +}); diff --git a/api/src/permissions/lib/fetch-policies.ts b/api/src/permissions/lib/fetch-policies.ts new file mode 100644 index 0000000000..3aa3808d47 --- /dev/null +++ b/api/src/permissions/lib/fetch-policies.ts @@ -0,0 +1,60 @@ +import type { Accountability, Filter } from '@directus/types'; +import type { Context } from '../types.js'; +import { filterPoliciesByIp } from '../utils/filter-policies-by-ip.js'; +import { withCache } from '../utils/with-cache.js'; + +export interface AccessRow { + policy: { id: string; ip_access: string[] | null }; + role: string | null; +} + +export const fetchPolicies = withCache('policies', _fetchPolicies, ({ roles, user, ip }) => ({ roles, user, ip })); + +/** + * Fetch the policies associated with the current user accountability + */ +export async function _fetchPolicies( + { roles, user, ip }: Pick, + context: Context, +): Promise { + const { AccessService } = await import('../../services/access.js'); + const accessService = new AccessService(context); + + let roleFilter: Filter; + + if (roles.length === 0) { + // Users without role assumes the Public role permissions along with their attached policies + roleFilter = { _and: [{ role: { _null: true } }, { user: { _null: true } }] }; + } else { + roleFilter = { role: { _in: roles } }; + } + + // If the user is not null, we also want to include the policies attached to the user + const filter = user ? { _or: [{ user: { _eq: user } }, roleFilter] } : roleFilter; + + const accessRows = (await accessService.readByQuery({ + filter, + fields: ['policy.id', 'policy.ip_access', 'role'], + limit: -1, + })) as AccessRow[]; + + const filteredAccessRows = filterPoliciesByIp(accessRows, ip); + + /* + * Sort rows by priority (goes bottom up): + * - Parent role policies + * - Child role policies + * - User policies + */ + filteredAccessRows.sort((a, b) => { + if (!a.role && !b.role) return 0; + if (!a.role) return 1; + if (!b.role) return -1; + + return roles.indexOf(a.role) - roles.indexOf(b.role); + }); + + const ids = filteredAccessRows.map(({ policy }) => policy.id); + + return ids; +} diff --git a/api/src/permissions/lib/fetch-roles-tree.test.ts b/api/src/permissions/lib/fetch-roles-tree.test.ts new file mode 100644 index 0000000000..897210a545 --- /dev/null +++ b/api/src/permissions/lib/fetch-roles-tree.test.ts @@ -0,0 +1,53 @@ +import type { Knex } from 'knex'; +import { beforeEach, expect, test, vi } from 'vitest'; +import { _fetchRolesTree } from './fetch-roles-tree.js'; + +let knex: Knex; + +beforeEach(() => { + knex = { + select: vi.fn().mockReturnThis(), + from: vi.fn().mockReturnThis(), + where: vi.fn().mockReturnThis(), + first: vi.fn(), + } as unknown as Knex; + + vi.clearAllMocks(); +}); + +test('Returns empty array if start value is null', async () => { + const roles = await _fetchRolesTree(null, knex); + expect(roles).toEqual([]); +}); + +test('Returns array of all parents in top-down order', async () => { + vi.mocked(knex.first).mockResolvedValueOnce({ id: 'start', parent: 'second' }); + vi.mocked(knex.first).mockResolvedValueOnce({ id: 'second', parent: 'third' }); + vi.mocked(knex.first).mockResolvedValueOnce({ id: 'third', parent: null }); + vi.mocked(knex.first).mockResolvedValueOnce({ id: 'unrelated', parent: null }); + + const roles = await _fetchRolesTree('start', knex); + + expect(roles).toEqual(['third', 'second', 'start']); +}); + +test('Exits if parent row is undefined', async () => { + vi.mocked(knex.first).mockResolvedValueOnce({ id: 'start', parent: 'second' }); + vi.mocked(knex.first).mockResolvedValueOnce({ id: 'second', parent: 'third' }); + vi.mocked(knex.first).mockResolvedValueOnce(undefined); + vi.mocked(knex.first).mockResolvedValueOnce({ id: 'unrelated', parent: null }); + + const roles = await _fetchRolesTree('start', knex); + + expect(roles).toEqual(['second', 'start']); +}); + +test('Throws error if infinite recursion occurs', async () => { + vi.mocked(knex.first).mockResolvedValueOnce({ id: 'first', parent: 'second' }); + vi.mocked(knex.first).mockResolvedValueOnce({ id: 'second', parent: 'third' }); + vi.mocked(knex.first).mockResolvedValueOnce({ id: 'third', parent: 'first' }); + + await expect(_fetchRolesTree('first', knex)).rejects.toMatchInlineSnapshot( + `[Error: Recursion encountered: role "third" already exists in tree path "third"->"second"->"first"]`, + ); +}); diff --git a/api/src/permissions/lib/fetch-roles-tree.ts b/api/src/permissions/lib/fetch-roles-tree.ts new file mode 100644 index 0000000000..b551c2c90a --- /dev/null +++ b/api/src/permissions/lib/fetch-roles-tree.ts @@ -0,0 +1,38 @@ +import type { Knex } from 'knex'; +import { withCache } from '../utils/with-cache.js'; + +export const fetchRolesTree = withCache('roles-tree', _fetchRolesTree); + +export async function _fetchRolesTree(start: string | null, knex: Knex): Promise { + if (!start) return []; + + let parent: string | null = start; + const roles: string[] = []; + + while (parent) { + const role: { id: string; parent: string | null } | undefined = await knex + .select('id', 'parent') + .from('directus_roles') + .where({ id: parent }) + .first(); + + if (!role) { + break; + } + + roles.push(role.id); + + // Prevent infinite recursion loops + if (role.parent && roles.includes(role.parent) === true) { + roles.reverse(); + const rolesStr = roles.map((role) => `"${role}"`).join('->'); + throw new Error(`Recursion encountered: role "${role.id}" already exists in tree path ${rolesStr}`); + } + + parent = role.parent; + } + + roles.reverse(); + + return roles; +} diff --git a/api/src/services/permissions/lib/with-app-minimal-permissions.test.ts b/api/src/permissions/lib/with-app-minimal-permissions.test.ts similarity index 69% rename from api/src/services/permissions/lib/with-app-minimal-permissions.test.ts rename to api/src/permissions/lib/with-app-minimal-permissions.test.ts index b431d0cca6..6f19132acf 100644 --- a/api/src/services/permissions/lib/with-app-minimal-permissions.test.ts +++ b/api/src/permissions/lib/with-app-minimal-permissions.test.ts @@ -1,7 +1,6 @@ import type { Accountability, Permission, Query } from '@directus/types'; import { expect, it, vi } from 'vitest'; -import { filterItems } from '../../../utils/filter-items.js'; -import { mergePermissions } from '../../../utils/merge-permissions.js'; +import { filterItems } from '../../utils/filter-items.js'; import { withAppMinimalPermissions } from './with-app-minimal-permissions.js'; const mocks = vi.hoisted(() => { @@ -11,8 +10,7 @@ const mocks = vi.hoisted(() => { }); vi.mock('@directus/system-data', () => ({ appAccessMinimalPermissions: mocks.appAccessMinimalPermissions })); -vi.mock('../../../utils/filter-items.js'); -vi.mock('../../../utils/merge-permissions.js'); +vi.mock('../../utils/filter-items.js'); it('should not modify permissions if role has no app access', () => { const accountability = { app: false } as Accountability; @@ -28,15 +26,12 @@ it('should merge with filtered app minimal permissions if role has app access', const accountability = { app: true } as Accountability; const permissions: Permission[] = []; const filter: Query['filter'] = null; - const filteredPermissions: Permission[] = []; - const mergedPermissions: Permission[] = []; + const filteredPermissions: Permission[] = [{} as Permission]; vi.mocked(filterItems).mockImplementation(() => filteredPermissions); - vi.mocked(mergePermissions).mockImplementation(() => mergedPermissions); const result = withAppMinimalPermissions(accountability, permissions, filter); expect(filterItems).toHaveBeenCalledWith(mocks.appAccessMinimalPermissions, filter); - expect(mergePermissions).toHaveBeenCalledWith('or', permissions, filteredPermissions); - expect(result).toBe(mergedPermissions); + expect(result).toEqual(filteredPermissions); }); diff --git a/api/src/permissions/lib/with-app-minimal-permissions.ts b/api/src/permissions/lib/with-app-minimal-permissions.ts new file mode 100644 index 0000000000..6f919abbbd --- /dev/null +++ b/api/src/permissions/lib/with-app-minimal-permissions.ts @@ -0,0 +1,17 @@ +import { appAccessMinimalPermissions } from '@directus/system-data'; +import type { Accountability, Permission, Query } from '@directus/types'; +import { cloneDeep } from 'lodash-es'; +import { filterItems } from '../../utils/filter-items.js'; + +export function withAppMinimalPermissions( + accountability: Pick | null, + permissions: Permission[], + filter: Query['filter'], +): Permission[] { + if (accountability?.app === true) { + const filteredAppMinimalPermissions = cloneDeep(filterItems(appAccessMinimalPermissions, filter)); + return [...permissions, ...filteredAppMinimalPermissions]; + } + + return permissions; +} diff --git a/api/src/permissions/modules/fetch-accountability-collection-access/fetch-accountability-collection-access.test.ts b/api/src/permissions/modules/fetch-accountability-collection-access/fetch-accountability-collection-access.test.ts new file mode 100644 index 0000000000..9306a5a850 --- /dev/null +++ b/api/src/permissions/modules/fetch-accountability-collection-access/fetch-accountability-collection-access.test.ts @@ -0,0 +1,151 @@ +import type { Accountability, Permission, SchemaOverview } from '@directus/types'; +import { vi, test, beforeEach, expect } from 'vitest'; +import { fetchPermissions } from '../../lib/fetch-permissions.js'; +import { fetchPolicies } from '../../lib/fetch-policies.js'; +import type { Context } from '../../types.js'; +import { fetchAccountabilityCollectionAccess } from './fetch-accountability-collection-access.js'; + +vi.mock('../../lib/fetch-policies.js'); +vi.mock('../../lib/fetch-permissions.js'); + +beforeEach(() => { + vi.clearAllMocks(); + + vi.mocked(fetchPolicies).mockResolvedValue([]); +}); + +test('Returns all permissions for all collections if admin', async () => { + const schema = { + collections: { + 'collection-a': {}, + 'collection-b': {}, + }, + } as unknown as SchemaOverview; + + const result = await fetchAccountabilityCollectionAccess( + { admin: true } as unknown as Accountability, + { schema } as unknown as Context, + ); + + expect(result).toEqual({ + 'collection-a': { + create: { access: 'full', fields: ['*'] }, + read: { access: 'full', fields: ['*'] }, + update: { access: 'full', fields: ['*'] }, + delete: { access: 'full', fields: ['*'] }, + share: { access: 'full', fields: ['*'] }, + }, + 'collection-b': { + create: { access: 'full', fields: ['*'] }, + read: { access: 'full', fields: ['*'] }, + update: { access: 'full', fields: ['*'] }, + delete: { access: 'full', fields: ['*'] }, + share: { access: 'full', fields: ['*'] }, + }, + }); +}); + +test('Returns permissions for collections for accountability if not admin', async () => { + const permissions = [ + { collection: 'collection-a', action: 'read', fields: ['field-a', 'field-b'] }, + { collection: 'collection-b', action: 'update', fields: ['field-c'], permissions: {} }, + ] as unknown as Permission[]; + + vi.mocked(fetchPermissions).mockResolvedValue(permissions); + + const result = await fetchAccountabilityCollectionAccess({} as unknown as Accountability, {} as unknown as Context); + + expect(result).toEqual({ + 'collection-a': { + create: { access: 'none' }, + read: { access: 'full', fields: ['field-a', 'field-b'] }, + update: { access: 'none' }, + delete: { access: 'none' }, + share: { access: 'none' }, + }, + 'collection-b': { + create: { access: 'none' }, + read: { access: 'none' }, + update: { access: 'full', fields: ['field-c'] }, + delete: { access: 'none' }, + share: { access: 'none' }, + }, + }); +}); + +test('Returns permissions with partial access if permissions have filters', async () => { + const permissions = [ + { + collection: 'collection-a', + action: 'read', + fields: ['field-a', 'field-b'], + permissions: { + 'field-a': {}, + }, + }, + ] as unknown as Permission[]; + + vi.mocked(fetchPermissions).mockResolvedValue(permissions); + + const result = await fetchAccountabilityCollectionAccess({} as unknown as Accountability, {} as unknown as Context); + + expect(result).toEqual({ + 'collection-a': { + create: { access: 'none' }, + read: { + access: 'partial', + fields: ['field-a', 'field-b'], + }, + update: { access: 'none' }, + delete: { access: 'none' }, + share: { access: 'none' }, + }, + }); +}); + +test('Returns fields with * if any permission has *', async () => { + const permissions = [ + { collection: 'collection-a', action: 'read', fields: ['field-a', 'field-b'] }, + { collection: 'collection-a', action: 'read', fields: ['*'] }, + ] as unknown as Permission[]; + + vi.mocked(fetchPermissions).mockResolvedValue(permissions); + + const result = await fetchAccountabilityCollectionAccess({} as unknown as Accountability, {} as unknown as Context); + + expect(result).toEqual({ + 'collection-a': { + create: { access: 'none' }, + read: { access: 'full', fields: ['*'] }, + update: { access: 'none' }, + delete: { access: 'none' }, + share: { access: 'none' }, + }, + }); +}); + +test('Returns combined presets', async () => { + const permissions = [ + { + collection: 'collection-a', + action: 'read', + fields: ['field-a', 'field-b'], + presets: { 'field-a': 1, 'field-b': 2 }, + }, + { collection: 'collection-a', action: 'read', fields: ['*'], presets: { 'field-a': 3, 'field-c': 4 } }, + ] as unknown as Permission[]; + + vi.mocked(fetchPermissions).mockResolvedValue(permissions); + + const result = await fetchAccountabilityCollectionAccess({} as unknown as Accountability, {} as unknown as Context); + + expect(result).toEqual({ + 'collection-a': { + create: { access: 'none' }, + read: { access: 'full', fields: ['*'], presets: { 'field-a': 3, 'field-b': 2, 'field-c': 4 } }, + update: { access: 'none' }, + delete: { access: 'none' }, + share: { access: 'none' }, + }, + }); +}); diff --git a/api/src/permissions/modules/fetch-accountability-collection-access/fetch-accountability-collection-access.ts b/api/src/permissions/modules/fetch-accountability-collection-access/fetch-accountability-collection-access.ts new file mode 100644 index 0000000000..5b7f1f15c5 --- /dev/null +++ b/api/src/permissions/modules/fetch-accountability-collection-access/fetch-accountability-collection-access.ts @@ -0,0 +1,80 @@ +import { PERMISSION_ACTIONS } from '@directus/constants'; +import type { Accountability, CollectionAccess, CollectionPermissions } from '@directus/types'; +import { mapValues, uniq } from 'lodash-es'; +import { fetchPermissions } from '../../lib/fetch-permissions.js'; +import { fetchPolicies } from '../../lib/fetch-policies.js'; +import type { Context } from '../../types.js'; + +/** + * Get all permissions + minimal app permissions (if applicable) for the user + role in the current accountability. + * The permissions will be filtered by IP access. + */ +export async function fetchAccountabilityCollectionAccess( + accountability: Pick, + context: Context, +): Promise { + if (accountability.admin) { + return mapValues( + context.schema.collections, + () => + Object.fromEntries( + PERMISSION_ACTIONS.map((action) => [ + action, + { + access: 'full', + fields: ['*'], + }, + ]), + ) as CollectionPermissions, + ); + } + + const policies = await fetchPolicies(accountability, context); + + const permissions = await fetchPermissions({ policies, accountability }, context); + + const infos: CollectionAccess = {}; + + for (const perm of permissions) { + // Ensure that collection is in infos + if (!infos[perm.collection]) { + infos[perm.collection] = { + read: { access: 'none' }, + create: { access: 'none' }, + update: { access: 'none' }, + delete: { access: 'none' }, + share: { access: 'none' }, + }; + } + + // Ensure that action with default values is in collection infos + if (infos[perm.collection]![perm.action]?.access === 'none') { + // If a permissions is iterated over it means that the user has access to it, so set access to 'full' + // Set access to 'full' initially and refine that whenever a permission with filters is encountered + infos[perm.collection]![perm.action]!.access = 'full'; + } + + const info = infos[perm.collection]![perm.action]!; + + // Set access to 'partial' if the permission has filters, which means that the user has conditional access + if (info.access === 'full' && perm.permissions && Object.keys(perm.permissions).length > 0) { + info.access = 'partial'; + } + + if (perm.fields && info.fields?.[0] !== '*') { + info.fields = uniq([...(info.fields || []), ...(perm.fields || [])]); + + if (info.fields.includes('*')) { + info.fields = ['*']; + } + } + + if (perm.presets) { + info.presets = { ...(info.presets ?? {}), ...perm.presets }; + } + } + + // TODO Should fields by null, undefined or and empty array if no access? + + return infos; +} diff --git a/api/src/permissions/modules/fetch-accountability-policy-globals/fetch-accountability-policy-globals.test.ts b/api/src/permissions/modules/fetch-accountability-policy-globals/fetch-accountability-policy-globals.test.ts new file mode 100644 index 0000000000..999883d379 --- /dev/null +++ b/api/src/permissions/modules/fetch-accountability-policy-globals/fetch-accountability-policy-globals.test.ts @@ -0,0 +1,47 @@ +import type { Accountability } from '@directus/types'; +import type { Knex } from 'knex'; +import { beforeEach, vi, test, expect } from 'vitest'; +import { fetchPolicies } from '../../lib/fetch-policies.js'; +import { fetchAccountabilityPolicyGlobals } from './fetch-accountability-policy-globals.js'; + +vi.mock('../../lib/fetch-policies.js'); + +let knex: Knex; + +beforeEach(() => { + vi.clearAllMocks(); + + knex = { + from: vi.fn().mockReturnThis(), + select: vi.fn().mockReturnThis(), + where: vi.fn().mockReturnThis(), + whereIn: vi.fn().mockReturnThis(), + first: vi.fn(), + } as unknown as Knex; +}); + +test('Return enforce_tfa true if a policy with enforce_tfa is found', async () => { + vi.mocked(knex.first).mockResolvedValue({}); + vi.mocked(fetchPolicies).mockResolvedValue(['policy-a']); + + const result = await fetchAccountabilityPolicyGlobals( + { app: true, admin: false, roles: [], user: '' } as unknown as Accountability, + { knex } as any, + ); + + expect(result).toEqual({ app_access: true, admin_access: false, enforce_tfa: true }); + expect(knex.whereIn).toHaveBeenCalledWith('id', ['policy-a']); +}); + +test('Return enforce_tfa false if no policy with enforce_tfa is found', async () => { + vi.mocked(knex.first).mockResolvedValue(undefined); + vi.mocked(fetchPolicies).mockResolvedValue(['policy-a']); + + const result = await fetchAccountabilityPolicyGlobals( + { app: true, admin: false, roles: [], user: '' } as unknown as Accountability, + { knex } as any, + ); + + expect(result).toEqual({ app_access: true, admin_access: false, enforce_tfa: false }); + expect(knex.whereIn).toHaveBeenCalledWith('id', ['policy-a']); +}); diff --git a/api/src/permissions/modules/fetch-accountability-policy-globals/fetch-accountability-policy-globals.ts b/api/src/permissions/modules/fetch-accountability-policy-globals/fetch-accountability-policy-globals.ts new file mode 100644 index 0000000000..289400757e --- /dev/null +++ b/api/src/permissions/modules/fetch-accountability-policy-globals/fetch-accountability-policy-globals.ts @@ -0,0 +1,25 @@ +import type { Accountability, Globals } from '@directus/types'; +import { fetchPolicies } from '../../lib/fetch-policies.js'; +import type { Context } from '../../types.js'; + +export async function fetchAccountabilityPolicyGlobals( + accountability: Pick, + context: Context, +): Promise { + const policies = await fetchPolicies(accountability, context); + + // Policies are already filtered down by the accountability IP, so we don't need to check it again + + const result = await context.knex + .select(1) + .from('directus_policies') + .whereIn('id', policies) + .where('enforce_tfa', true) + .first(); + + return { + app_access: accountability.app, + admin_access: accountability.admin, + enforce_tfa: !!result, + }; +} diff --git a/api/src/permissions/modules/fetch-allowed-collections/fetch-allowed-collections.test.ts b/api/src/permissions/modules/fetch-allowed-collections/fetch-allowed-collections.test.ts new file mode 100644 index 0000000000..8f25519a84 --- /dev/null +++ b/api/src/permissions/modules/fetch-allowed-collections/fetch-allowed-collections.test.ts @@ -0,0 +1,67 @@ +import type { Accountability, Permission, SchemaOverview } from '@directus/types'; +import { beforeEach, expect, test, vi } from 'vitest'; +import { PermissionsService } from '../../../services/permissions.js'; +import { fetchPermissions } from '../../lib/fetch-permissions.js'; +import { fetchPolicies } from '../../lib/fetch-policies.js'; +import type { Context } from '../../types.js'; +import { fetchAllowedCollections } from './fetch-allowed-collections.js'; + +vi.mock('../../../services/permissions.js', () => ({ + PermissionsService: vi.fn(), +})); + +vi.mock('../../lib/fetch-policies.js'); +vi.mock('../../lib/fetch-permissions.js'); + +beforeEach(() => { + vi.clearAllMocks(); + PermissionsService.prototype.readByQuery = vi.fn().mockResolvedValue([]); + vi.mocked(fetchPermissions).mockResolvedValue([]); +}); + +test('Returns all schema keys if user is admin', async () => { + const action = 'read'; + + const accountability = { + admin: true, + } as Accountability; + + const schema = { + collections: { + 'collection-a': {}, + 'collection-b': {}, + }, + } as unknown as SchemaOverview; + + const collections = await fetchAllowedCollections({ action, accountability }, { schema } as Context); + + expect(collections).toEqual(['collection-a', 'collection-b']); +}); + +test('Returns unique collection names for all permissions in given action', async () => { + vi.mocked(fetchPolicies).mockResolvedValue(['policy-a', 'policy-b']); + + vi.mocked(fetchPermissions).mockResolvedValue([ + { collection: 'collection-a' }, + { collection: 'collection-a' }, + { collection: 'collection-b' }, + { collection: 'collection-c' }, + ] as Permission[]); + + const action = 'read'; + + const accountability = { + admin: false, + } as Accountability; + + const schema = { + collections: { + 'collection-a': {}, + 'collection-b': {}, + }, + } as unknown as SchemaOverview; + + const collections = await fetchAllowedCollections({ action, accountability }, { schema } as Context); + + expect(collections).toEqual(['collection-a', 'collection-b', 'collection-c']); +}); diff --git a/api/src/permissions/modules/fetch-allowed-collections/fetch-allowed-collections.ts b/api/src/permissions/modules/fetch-allowed-collections/fetch-allowed-collections.ts new file mode 100644 index 0000000000..5b7d87e8e5 --- /dev/null +++ b/api/src/permissions/modules/fetch-allowed-collections/fetch-allowed-collections.ts @@ -0,0 +1,43 @@ +import type { Accountability, PermissionsAction } from '@directus/types'; +import { uniq } from 'lodash-es'; +import { fetchPolicies } from '../../lib/fetch-policies.js'; +import type { Context } from '../../types.js'; +import { withCache } from '../../utils/with-cache.js'; +import { fetchPermissions } from '../../lib/fetch-permissions.js'; + +export interface FetchAllowedCollectionsOptions { + action: PermissionsAction; + accountability: Pick; +} + +export const fetchAllowedCollections = withCache( + 'allowed-collections', + _fetchAllowedCollections, + ({ action, accountability: { user, role, roles, ip, admin, app } }) => ({ + action, + accountability: { + user, + role, + roles, + ip, + admin, + app, + }, + }), +); + +export async function _fetchAllowedCollections( + { action, accountability }: FetchAllowedCollectionsOptions, + { knex, schema }: Context, +): Promise { + if (accountability.admin) { + return Object.keys(schema.collections); + } + + const policies = await fetchPolicies(accountability, { knex, schema }); + const permissions = await fetchPermissions({ action, policies, accountability }, { knex, schema }); + + const collections = permissions.map(({ collection }) => collection); + + return uniq(collections); +} diff --git a/api/src/permissions/modules/fetch-allowed-field-map/fetch-allowed-field-map.test.ts b/api/src/permissions/modules/fetch-allowed-field-map/fetch-allowed-field-map.test.ts new file mode 100644 index 0000000000..9100d7d55c --- /dev/null +++ b/api/src/permissions/modules/fetch-allowed-field-map/fetch-allowed-field-map.test.ts @@ -0,0 +1,75 @@ +import type { Accountability, SchemaOverview } from '@directus/types'; +import { beforeEach, expect, test, vi } from 'vitest'; +import { fetchPolicies } from '../../lib/fetch-policies.js'; +import type { Context } from '../../types.js'; +import { _fetchAllowedFieldMap as fetchAllowedFieldMap } from './fetch-allowed-field-map.js'; +import { fetchPermissions } from '../../lib/fetch-permissions.js'; +import type { Permission } from '@directus/types'; + +vi.mock('../../lib/fetch-policies.js'); +vi.mock('../../lib/fetch-permissions.js', () => ({ fetchPermissions: vi.fn() })); + +vi.mock('../../../services/permissions.js', () => ({ + PermissionsService: vi.fn(), +})); + +beforeEach(() => { + vi.clearAllMocks(); + vi.mocked(fetchPermissions).mockResolvedValue([]); +}); + +test('Returns field map of the whole schema if admin is true', async () => { + const accountability = { + admin: true, + } as Accountability; + + const action = 'read'; + + const schema = { + collections: { + 'collection-a': { + fields: { + 'field-a': {}, + 'field-b': {}, + }, + }, + 'collection-b': { + fields: { + 'field-a': {}, + 'field-c': {}, + }, + }, + }, + } as unknown as SchemaOverview; + + const map = await fetchAllowedFieldMap({ accountability, action }, { schema } as Context); + + expect(map).toEqual({ + 'collection-a': ['field-a', 'field-b'], + 'collection-b': ['field-a', 'field-c'], + }); +}); + +test('Returns field map from permissions for given accountability', async () => { + const accountability = { + admin: false, + } as Accountability; + + const action = 'read'; + + vi.mocked(fetchPolicies).mockResolvedValue(['policy-a', 'policy-b']); + + vi.mocked(fetchPermissions).mockResolvedValue([ + { collection: 'collection-a', fields: ['field-a'] }, + { collection: 'collection-a', fields: ['field-b'] }, + { collection: 'collection-b', fields: ['field-a', 'field-c'] }, + { collection: 'collection-b', fields: ['field-b'] }, + ] as Permission[]); + + const map = await fetchAllowedFieldMap({ accountability, action }, {} as Context); + + expect(map).toEqual({ + 'collection-a': ['field-a', 'field-b'], + 'collection-b': ['field-a', 'field-c', 'field-b'], + }); +}); diff --git a/api/src/permissions/modules/fetch-allowed-field-map/fetch-allowed-field-map.ts b/api/src/permissions/modules/fetch-allowed-field-map/fetch-allowed-field-map.ts new file mode 100644 index 0000000000..09a9839865 --- /dev/null +++ b/api/src/permissions/modules/fetch-allowed-field-map/fetch-allowed-field-map.ts @@ -0,0 +1,56 @@ +import type { Accountability, PermissionsAction } from '@directus/types'; +import { uniq } from 'lodash-es'; +import { fetchPolicies } from '../../lib/fetch-policies.js'; +import type { Context } from '../../types.js'; +import { withCache } from '../../utils/with-cache.js'; +import { fetchPermissions } from '../../lib/fetch-permissions.js'; + +export type FieldMap = Record; + +export interface FetchAllowedFieldMapOptions { + accountability: Pick; + action: PermissionsAction; +} + +export const fetchAllowedFieldMap = withCache( + 'allowed-field-map', + _fetchAllowedFieldMap, + ({ action, accountability: { user, role, roles, ip, admin, app } }) => ({ + action, + accountability: { user, role, roles, ip, admin, app }, + }), +); + +export async function _fetchAllowedFieldMap( + { accountability, action }: FetchAllowedFieldMapOptions, + { knex, schema }: Context, +) { + const fieldMap: FieldMap = {}; + + if (accountability.admin) { + for (const [collection, { fields }] of Object.entries(schema.collections)) { + fieldMap[collection] = Object.keys(fields); + } + + return fieldMap; + } + + const policies = await fetchPolicies(accountability, { knex, schema }); + const permissions = await fetchPermissions({ action, policies, accountability }, { knex, schema }); + + for (const { collection, fields } of permissions) { + if (!fieldMap[collection]) { + fieldMap[collection] = []; + } + + if (fields) { + fieldMap[collection]!.push(...fields); + } + } + + for (const [collection, fields] of Object.entries(fieldMap)) { + fieldMap[collection] = uniq(fields); + } + + return fieldMap; +} diff --git a/api/src/permissions/modules/fetch-allowed-fields/fetch-allowed-fields.test.ts b/api/src/permissions/modules/fetch-allowed-fields/fetch-allowed-fields.test.ts new file mode 100644 index 0000000000..7dc5a8c5d3 --- /dev/null +++ b/api/src/permissions/modules/fetch-allowed-fields/fetch-allowed-fields.test.ts @@ -0,0 +1,85 @@ +import type { Accountability, Permission, SchemaOverview } from '@directus/types'; +import { beforeEach, expect, test, vi } from 'vitest'; +import { fetchPolicies } from '../../lib/fetch-policies.js'; +import type { Context } from '../../types.js'; +import { _fetchAllowedFields as fetchAllowedFields } from './fetch-allowed-fields.js'; +import { fetchPermissions } from '../../lib/fetch-permissions.js'; + +vi.mock('../../lib/fetch-policies.js'); +vi.mock('../../lib/fetch-permissions.js'); + +vi.mock('../../../services/permissions.js', () => ({ + PermissionsService: vi.fn(), +})); + +vi.mock('../../../services/access.js', () => ({ + AccessService: vi.fn(), +})); + +beforeEach(() => { + vi.clearAllMocks(); + vi.mocked(fetchPermissions).mockResolvedValue([]); +}); + +test('Returns unique array of all fields that are associated with the permissions for the passed accountability object', async () => { + const acc = {} as Accountability; + const policies = ['policy-a']; + + const permissions = [ + { fields: ['field-a'] }, + { fields: ['field-a', 'field-b'] }, + { fields: ['field-c'] }, + ] as Permission[]; + + const schema = { + collections: { + 'collection-a': { + fields: { + 'field-a': {}, + 'field-b': {}, + 'field-c': {}, + }, + }, + }, + } as unknown as SchemaOverview; + + vi.mocked(fetchPolicies).mockResolvedValue(policies); + vi.mocked(fetchPermissions).mockResolvedValue(permissions); + + const fields = await fetchAllowedFields({ collection: 'collection-a', action: 'read', accountability: acc }, { + schema, + } as Context); + + expect(fields).toEqual(['field-a', 'field-b', 'field-c']); +}); + +test('Removes fields that are not in the schema', async () => { + const acc = {} as Accountability; + const policies = ['policy-a']; + + const permissions = [ + { fields: ['field-a'] }, + { fields: ['field-a', 'field-b'] }, + { fields: ['field-c'] }, + { fields: ['*'] }, + ] as Permission[]; + + const schema = { + collections: { + 'collection-a': { + fields: { + 'field-a': {}, + }, + }, + }, + } as unknown as SchemaOverview; + + vi.mocked(fetchPolicies).mockResolvedValue(policies); + vi.mocked(fetchPermissions).mockResolvedValue(permissions); + + const fields = await fetchAllowedFields({ collection: 'collection-a', action: 'read', accountability: acc }, { + schema, + } as Context); + + expect(fields).toEqual(['field-a', '*']); +}); diff --git a/api/src/permissions/modules/fetch-allowed-fields/fetch-allowed-fields.ts b/api/src/permissions/modules/fetch-allowed-fields/fetch-allowed-fields.ts new file mode 100644 index 0000000000..35bbd72fdb --- /dev/null +++ b/api/src/permissions/modules/fetch-allowed-fields/fetch-allowed-fields.ts @@ -0,0 +1,52 @@ +import type { Accountability, PermissionsAction } from '@directus/types'; +import { uniq } from 'lodash-es'; +import { fetchPermissions } from '../../lib/fetch-permissions.js'; +import { fetchPolicies } from '../../lib/fetch-policies.js'; +import type { Context } from '../../types.js'; +import { withCache } from '../../utils/with-cache.js'; + +export interface FetchAllowedFieldsOptions { + collection: string; + action: PermissionsAction; + accountability: Pick; +} + +export const fetchAllowedFields = withCache( + 'allowed-fields', + _fetchAllowedFields, + ({ action, collection, accountability: { user, role, roles, ip, app } }) => ({ + action, + collection, + accountability: { user, role, roles, ip, app }, + }), +); + +/** + * Look up all fields that are allowed to be used for the given collection and action for the given + * accountability object + * + * Done by looking up all available policies for the current accountability object, and reading all + * permissions that exist for the collection+action+policy combination + */ +export async function _fetchAllowedFields( + { accountability, action, collection }: FetchAllowedFieldsOptions, + { knex, schema }: Context, +): Promise { + const policies = await fetchPolicies(accountability, { knex, schema }); + + const permissions = await fetchPermissions( + { action, collections: [collection], policies, accountability }, + { knex, schema }, + ); + + const allowedFields = []; + + for (const { fields } of permissions) { + if (!fields) continue; + allowedFields.push(...fields); + } + + return uniq(allowedFields).filter( + (field) => field === '*' || field in (schema.collections[collection]?.fields ?? {}), + ); +} diff --git a/api/src/permissions/modules/fetch-global-access/fetch-global-access.test.ts b/api/src/permissions/modules/fetch-global-access/fetch-global-access.test.ts new file mode 100644 index 0000000000..42bb580eb9 --- /dev/null +++ b/api/src/permissions/modules/fetch-global-access/fetch-global-access.test.ts @@ -0,0 +1,48 @@ +import type { Accountability } from '@directus/types'; +import type { Knex } from 'knex'; +import { beforeEach, expect, test, vi } from 'vitest'; +import { _fetchGlobalAccess as fetchGlobalAccess } from './fetch-global-access.js'; +import { fetchGlobalAccessForRoles } from './lib/fetch-global-access-for-roles.js'; +import { fetchGlobalAccessForUser } from './lib/fetch-global-access-for-user.js'; + +let knex: Knex; + +vi.mock('./lib/fetch-global-access-for-roles.js'); +vi.mock('./lib/fetch-global-access-for-user.js'); + +beforeEach(() => { + vi.clearAllMocks(); + + knex = {} as unknown as Knex; +}); + +test('Returns result from access for roles when no user is passed', async () => { + const mockRolesAccess = { app: true, admin: true }; + vi.mocked(fetchGlobalAccessForRoles).mockResolvedValue(mockRolesAccess); + + const res = await fetchGlobalAccess({} as Accountability, knex); + + expect(res).toEqual(mockRolesAccess); +}); + +test('Returns highest result if user is passed', async () => { + const mockRolesAccess = { app: true, admin: true }; + const mockUserAccess = { app: false, admin: false }; + vi.mocked(fetchGlobalAccessForRoles).mockResolvedValue(mockRolesAccess); + vi.mocked(fetchGlobalAccessForUser).mockResolvedValue(mockUserAccess); + + const res = await fetchGlobalAccess({ user: 'user', roles: [] }, knex); + + expect(res).toEqual({ app: true, admin: true }); +}); + +test('Combines result of role and user', async () => { + const mockRolesAccess = { app: false, admin: true }; + const mockUserAccess = { app: true, admin: false }; + vi.mocked(fetchGlobalAccessForRoles).mockResolvedValue(mockRolesAccess); + vi.mocked(fetchGlobalAccessForUser).mockResolvedValue(mockUserAccess); + + const res = await fetchGlobalAccess({ user: 'user', roles: [] }, knex); + + expect(res).toEqual({ app: true, admin: true }); +}); diff --git a/api/src/permissions/modules/fetch-global-access/fetch-global-access.ts b/api/src/permissions/modules/fetch-global-access/fetch-global-access.ts new file mode 100644 index 0000000000..88c341466f --- /dev/null +++ b/api/src/permissions/modules/fetch-global-access/fetch-global-access.ts @@ -0,0 +1,34 @@ +import type { Accountability } from '@directus/types'; +import type { Knex } from 'knex'; +import { withCache } from '../../utils/with-cache.js'; +import { fetchGlobalAccessForRoles } from './lib/fetch-global-access-for-roles.js'; +import { fetchGlobalAccessForUser } from './lib/fetch-global-access-for-user.js'; +import type { GlobalAccess } from './types.js'; + +export const fetchGlobalAccess = withCache('global-access', _fetchGlobalAccess, ({ user, roles, ip }) => ({ + user, + roles, + ip, +})); + +/** + * Fetch the global access (eg admin/app access) rules for the given roles, or roles+user combination + * + * Will fetch roles and user info separately so they can be cached and reused individually + */ +export async function _fetchGlobalAccess( + accountability: Pick, + knex: Knex, +): Promise { + const access = await fetchGlobalAccessForRoles(accountability, knex); + + if (accountability.user !== undefined) { + const userAccess = await fetchGlobalAccessForUser(accountability, knex); + + // If app/admin is already true, keep it true + access.app ||= userAccess.app; + access.admin ||= userAccess.admin; + } + + return access; +} diff --git a/api/src/permissions/modules/fetch-global-access/lib/fetch-global-access-for-roles.test.ts b/api/src/permissions/modules/fetch-global-access/lib/fetch-global-access-for-roles.test.ts new file mode 100644 index 0000000000..38b4498c88 --- /dev/null +++ b/api/src/permissions/modules/fetch-global-access/lib/fetch-global-access-for-roles.test.ts @@ -0,0 +1,33 @@ +import type { Accountability } from '@directus/types'; +import type { Knex } from 'knex'; +import { beforeEach, expect, test, vi } from 'vitest'; +import type { GlobalAccess } from '../types.js'; +import { fetchGlobalAccessForQuery } from '../utils/fetch-global-access-for-query.js'; +import { _fetchGlobalAccessForRoles as fetchGlobalAccessForRoles } from './fetch-global-access-for-roles.js'; + +vi.mock('../utils/fetch-global-access-for-query.js'); + +let knex: Knex; + +beforeEach(() => { + vi.clearAllMocks(); + + knex = { + where: vi.fn(), + } as unknown as Knex; +}); + +test('Returns result of fetchGlobalAccessForQuery with roles query and accountability', async () => { + const mockResult = {} as GlobalAccess; + const mockKnex = {} as Knex.QueryBuilder; + vi.mocked(knex.where).mockReturnValue(mockKnex); + vi.mocked(fetchGlobalAccessForQuery).mockResolvedValue(mockResult); + + const accountability = { roles: ['role-a', 'role-b'] } as Accountability; + + const res = await fetchGlobalAccessForRoles(accountability, knex); + + expect(knex.where).toHaveBeenCalledWith('role', 'in', ['role-a', 'role-b']); + expect(fetchGlobalAccessForQuery).toHaveBeenCalledWith(mockKnex, accountability); + expect(res).toBe(mockResult); +}); diff --git a/api/src/permissions/modules/fetch-global-access/lib/fetch-global-access-for-roles.ts b/api/src/permissions/modules/fetch-global-access/lib/fetch-global-access-for-roles.ts new file mode 100644 index 0000000000..24b4af00a5 --- /dev/null +++ b/api/src/permissions/modules/fetch-global-access/lib/fetch-global-access-for-roles.ts @@ -0,0 +1,19 @@ +import type { Accountability } from '@directus/types'; +import type { Knex } from 'knex'; +import { withCache } from '../../../utils/with-cache.js'; +import type { GlobalAccess } from '../types.js'; +import { fetchGlobalAccessForQuery } from '../utils/fetch-global-access-for-query.js'; + +export const fetchGlobalAccessForRoles = withCache( + 'global-access-role', + _fetchGlobalAccessForRoles, + ({ roles, ip }) => ({ roles, ip }), +); + +export async function _fetchGlobalAccessForRoles( + accountability: Pick, + knex: Knex, +): Promise { + const query = knex.where('role', 'in', accountability.roles); + return await fetchGlobalAccessForQuery(query, accountability); +} diff --git a/api/src/permissions/modules/fetch-global-access/lib/fetch-global-access-for-user.test.ts b/api/src/permissions/modules/fetch-global-access/lib/fetch-global-access-for-user.test.ts new file mode 100644 index 0000000000..999e304be4 --- /dev/null +++ b/api/src/permissions/modules/fetch-global-access/lib/fetch-global-access-for-user.test.ts @@ -0,0 +1,33 @@ +import type { Accountability } from '@directus/types'; +import type { Knex } from 'knex'; +import { beforeEach, expect, test, vi } from 'vitest'; +import type { GlobalAccess } from '../types.js'; +import { fetchGlobalAccessForQuery } from '../utils/fetch-global-access-for-query.js'; +import { _fetchGlobalAccessForUser as fetchGlobalAccessForUser } from './fetch-global-access-for-user.js'; + +vi.mock('../utils/fetch-global-access-for-query.js'); + +let knex: Knex; + +beforeEach(() => { + vi.clearAllMocks(); + + knex = { + where: vi.fn(), + } as unknown as Knex; +}); + +test('Returns result of fetchGlobalAccessForQuery with roles and accountability', async () => { + const mockResult = {} as GlobalAccess; + const mockKnex = {} as Knex.QueryBuilder; + vi.mocked(knex.where).mockReturnValue(mockKnex); + vi.mocked(fetchGlobalAccessForQuery).mockResolvedValue(mockResult); + + const accountability = { user: 'user-a' } as Accountability; + + const res = await fetchGlobalAccessForUser(accountability, knex); + + expect(knex.where).toHaveBeenCalledWith('user', '=', 'user-a'); + expect(fetchGlobalAccessForQuery).toHaveBeenCalledWith(mockKnex, accountability); + expect(res).toBe(mockResult); +}); diff --git a/api/src/permissions/modules/fetch-global-access/lib/fetch-global-access-for-user.ts b/api/src/permissions/modules/fetch-global-access/lib/fetch-global-access-for-user.ts new file mode 100644 index 0000000000..b3b64bb932 --- /dev/null +++ b/api/src/permissions/modules/fetch-global-access/lib/fetch-global-access-for-user.ts @@ -0,0 +1,18 @@ +import type { Accountability } from '@directus/types'; +import type { Knex } from 'knex'; +import { withCache } from '../../../utils/with-cache.js'; +import type { GlobalAccess } from '../types.js'; +import { fetchGlobalAccessForQuery } from '../utils/fetch-global-access-for-query.js'; + +export const fetchGlobalAccessForUser = withCache('global-access-user', _fetchGlobalAccessForUser, ({ user, ip }) => ({ + user, + ip, +})); + +export async function _fetchGlobalAccessForUser( + accountability: Pick, + knex: Knex, +): Promise { + const query = knex.where('user', '=', accountability.user); + return await fetchGlobalAccessForQuery(query, accountability); +} diff --git a/api/src/permissions/modules/fetch-global-access/types.ts b/api/src/permissions/modules/fetch-global-access/types.ts new file mode 100644 index 0000000000..4c8e8c8074 --- /dev/null +++ b/api/src/permissions/modules/fetch-global-access/types.ts @@ -0,0 +1,4 @@ +export interface GlobalAccess { + app: boolean; + admin: boolean; +} diff --git a/api/src/permissions/modules/fetch-global-access/utils/fetch-global-access-for-query.test.ts b/api/src/permissions/modules/fetch-global-access/utils/fetch-global-access-for-query.test.ts new file mode 100644 index 0000000000..a285300566 --- /dev/null +++ b/api/src/permissions/modules/fetch-global-access/utils/fetch-global-access-for-query.test.ts @@ -0,0 +1,96 @@ +import type { Accountability } from '@directus/types'; +import type { Knex } from 'knex'; +import { beforeEach, expect, test, vi } from 'vitest'; +import { fetchGlobalAccessForQuery } from './fetch-global-access-for-query.js'; + +let qb: Knex.QueryBuilder; + +beforeEach(() => { + vi.clearAllMocks(); + + qb = { + select: vi.fn().mockReturnThis(), + from: vi.fn().mockReturnThis(), + leftJoin: vi.fn().mockResolvedValue([]), + } as unknown as Knex.QueryBuilder; +}); + +test('Returns false by default if no access is found', async () => { + const res = await fetchGlobalAccessForQuery(qb, {} as Accountability); + + expect(res).toEqual({ + app: false, + admin: false, + }); +}); + +test('Sets app true if one or more access rows have app access set as true', async () => { + vi.mocked(qb.leftJoin).mockResolvedValue([ + { admin_access: false, app_access: false }, + { admin_access: false, app_access: true }, + { admin_access: false, app_access: false }, + ]); + + const res = await fetchGlobalAccessForQuery(qb, {} as Accountability); + + expect(res).toEqual({ admin: false, app: true }); +}); + +test('Sets admin & app true if one or more access rows have app admin set as true', async () => { + vi.mocked(qb.leftJoin).mockResolvedValue([ + { admin_access: false, app_access: false }, + { admin_access: true, app_access: false }, + { admin_access: false, app_access: false }, + ]); + + const res = await fetchGlobalAccessForQuery(qb, {} as Accountability); + + expect(res).toEqual({ admin: true, app: true }); +}); + +test('Sets app true if one or more access rows have app access set as 1', async () => { + vi.mocked(qb.leftJoin).mockResolvedValue([ + { admin_access: 0, app_access: 0 }, + { admin_access: 0, app_access: 1 }, + { admin_access: 0, app_access: 0 }, + ]); + + const res = await fetchGlobalAccessForQuery(qb, {} as Accountability); + + expect(res).toEqual({ admin: false, app: true }); +}); + +test('Sets admin & app true if one or more access rows have app admin set as true', async () => { + vi.mocked(qb.leftJoin).mockResolvedValue([ + { admin_access: 0, app_access: 0 }, + { admin_access: 1, app_access: 0 }, + { admin_access: 0, app_access: 0 }, + ]); + + const res = await fetchGlobalAccessForQuery(qb, {} as Accountability); + + expect(res).toEqual({ admin: true, app: true }); +}); + +test('Includes policies that have an ip access restriction that does matches the accountability ip', async () => { + vi.mocked(qb.leftJoin).mockResolvedValue([ + { admin_access: false, app_access: false }, + { admin_access: false, app_access: true, ip_access: '127.0.0.1/24,127.0.0.2' }, + ]); + + const res = await fetchGlobalAccessForQuery(qb, { ip: '127.0.0.5' } as Accountability); + + expect(res).toEqual({ admin: false, app: true }); +}); + +test('Ignores policies that have an ip access restriction that does not match the accountability ip', async () => { + vi.mocked(qb.leftJoin).mockResolvedValue([ + { admin_access: false, app_access: false }, + { admin_access: true, app_access: false, ip_access: '127.0.0.1,127.0.0.2' }, + { admin_access: false, app_access: true, ip_access: '128.0.0.1' }, + ]); + + const res = await fetchGlobalAccessForQuery(qb, { ip: '1.1.1.1' } as Accountability); + + expect(res).toEqual({ admin: false, app: false }); +}); diff --git a/api/src/permissions/modules/fetch-global-access/utils/fetch-global-access-for-query.ts b/api/src/permissions/modules/fetch-global-access/utils/fetch-global-access-for-query.ts new file mode 100644 index 0000000000..8f79d59098 --- /dev/null +++ b/api/src/permissions/modules/fetch-global-access/utils/fetch-global-access-for-query.ts @@ -0,0 +1,46 @@ +import type { Accountability, Policy } from '@directus/types'; +import { toBoolean, toArray } from '@directus/utils'; +import type { Knex } from 'knex'; +import { ipInNetworks } from '../../../../utils/ip-in-networks.js'; +import type { GlobalAccess } from '../types.js'; + +type AccessRow = { + admin_access: Policy['admin_access'] | null; + app_access: Policy['app_access'] | null; + ip_access: Policy['ip_access'] | string | null; +}; + +export async function fetchGlobalAccessForQuery( + query: Knex.QueryBuilder, + accountability: Pick, +): Promise { + const globalAccess = { + app: false, + admin: false, + }; + + const accessRows = await query + .select( + 'directus_policies.admin_access', + 'directus_policies.app_access', + 'directus_policies.ip_access', + ) + .from('directus_access') + // @NOTE: `where` clause comes from the caller + .leftJoin('directus_policies', 'directus_policies.id', 'directus_access.policy'); + + // Additively merge access permissions + for (const { admin_access, app_access, ip_access } of accessRows) { + if (accountability.ip && ip_access) { + // Skip row if IP is not in the allowed networks + const networks = toArray(ip_access); + if (!ipInNetworks(accountability.ip, networks)) continue; + } + + globalAccess.admin ||= toBoolean(admin_access); + globalAccess.app ||= globalAccess.admin || toBoolean(app_access); + if (globalAccess.admin) break; + } + + return globalAccess; +} diff --git a/api/src/permissions/modules/fetch-inconsistent-field-map/fetch-inconsistent-field-map.test.ts b/api/src/permissions/modules/fetch-inconsistent-field-map/fetch-inconsistent-field-map.test.ts new file mode 100644 index 0000000000..86cba2e071 --- /dev/null +++ b/api/src/permissions/modules/fetch-inconsistent-field-map/fetch-inconsistent-field-map.test.ts @@ -0,0 +1,77 @@ +import type { Accountability, Permission, SchemaOverview } from '@directus/types'; +import { beforeEach, expect, test, vi } from 'vitest'; +import { fetchPermissions } from '../../lib/fetch-permissions.js'; +import { fetchPolicies } from '../../lib/fetch-policies.js'; +import type { Context } from '../../types.js'; +import { _fetchInconsistentFieldMap as fetchInconsistentFieldMap } from './fetch-inconsistent-field-map.js'; + +vi.mock('../../lib/fetch-policies.js'); +vi.mock('../../lib/fetch-permissions.js', () => ({ fetchPermissions: vi.fn() })); + +vi.mock('../../../services/permissions.js', () => ({ + PermissionsService: vi.fn(), +})); + +beforeEach(() => { + vi.clearAllMocks(); + vi.mocked(fetchPermissions).mockResolvedValue([]); +}); + +test('Returns field map of the whole schema if admin is true', async () => { + const accountability = { + admin: true, + } as Accountability; + + const action = 'read'; + + const schema = { + collections: { + 'collection-a': { + fields: { + 'field-a': {}, + 'field-b': {}, + }, + }, + 'collection-b': { + fields: { + 'field-a': {}, + 'field-c': {}, + }, + }, + }, + } as unknown as SchemaOverview; + + const map = await fetchInconsistentFieldMap({ accountability, action }, { schema } as Context); + + expect(map).toEqual({ + 'collection-a': [], + 'collection-b': [], + }); +}); + +test('Returns field map from permissions for given accountability', async () => { + const accountability = { + admin: false, + } as Accountability; + + const action = 'read'; + + vi.mocked(fetchPolicies).mockResolvedValue([]); + + vi.mocked(fetchPermissions).mockResolvedValue([ + { collection: 'collection-a', fields: ['field-a'] }, + { collection: 'collection-a', fields: ['field-b'] }, + { collection: 'collection-b', fields: ['field-a', 'field-b', 'field-c'] }, + { collection: 'collection-b', fields: ['field-b'] }, + { collection: 'collection-c', fields: [] }, + { collection: 'collection-c', fields: ['field-a'] }, + ] as Permission[]); + + const map = await fetchInconsistentFieldMap({ accountability, action }, {} as Context); + + expect(map).toEqual({ + 'collection-a': ['field-a', 'field-b'], + 'collection-b': ['field-a', 'field-c'], + 'collection-c': ['field-a'], + }); +}); diff --git a/api/src/permissions/modules/fetch-inconsistent-field-map/fetch-inconsistent-field-map.ts b/api/src/permissions/modules/fetch-inconsistent-field-map/fetch-inconsistent-field-map.ts new file mode 100644 index 0000000000..2e4d8e2f0f --- /dev/null +++ b/api/src/permissions/modules/fetch-inconsistent-field-map/fetch-inconsistent-field-map.ts @@ -0,0 +1,58 @@ +import type { Accountability, PermissionsAction } from '@directus/types'; +import { uniq, intersection, difference, pick } from 'lodash-es'; +import { fetchPolicies } from '../../lib/fetch-policies.js'; +import type { Context } from '../../types.js'; +import { withCache } from '../../utils/with-cache.js'; +import { fetchPermissions } from '../../lib/fetch-permissions.js'; + +export type FieldMap = Record; + +export interface FetchInconsistentFieldMapOptions { + accountability: Pick | null; + action: PermissionsAction; +} + +/** + * Fetch a field map for fields that may or may not be null based on item-by-item permissions. + */ +export const fetchInconsistentFieldMap = withCache( + 'inconsistent-field-map', + _fetchInconsistentFieldMap, + ({ action, accountability }) => ({ + action, + accountability: accountability ? pick(accountability, ['user', 'role', 'roles', 'ip', 'admin', 'app']) : null, + }), +); + +export async function _fetchInconsistentFieldMap( + { accountability, action }: FetchInconsistentFieldMapOptions, + { knex, schema }: Context, +) { + const fieldMap: FieldMap = {}; + + if (!accountability || accountability.admin) { + for (const collection of Object.keys(schema.collections)) { + fieldMap[collection] = []; + } + + return fieldMap; + } + + const policies = await fetchPolicies(accountability, { knex, schema }); + const permissions = await fetchPermissions({ action, policies, accountability }, { knex, schema }); + + const collections = uniq(permissions.map(({ collection }) => collection)); + + for (const collection of collections) { + const fields: string[][] = permissions + .filter((permission) => permission.collection === collection) + .map((permission) => permission.fields ?? []); + + const availableEverywhere = intersection(...fields); + const availableSomewhere = difference(uniq(fields.flat()), availableEverywhere); + + fieldMap[collection] = availableSomewhere; + } + + return fieldMap; +} diff --git a/api/src/permissions/modules/fetch-policies-ip-access/fetch-policies-ip-access.ts b/api/src/permissions/modules/fetch-policies-ip-access/fetch-policies-ip-access.ts new file mode 100644 index 0000000000..a2e885e902 --- /dev/null +++ b/api/src/permissions/modules/fetch-policies-ip-access/fetch-policies-ip-access.ts @@ -0,0 +1,39 @@ +import type { Accountability } from '@directus/types'; +import { toArray } from '@directus/utils'; +import type { Knex } from 'knex'; +import { withCache } from '../../utils/with-cache.js'; + +export const fetchPoliciesIpAccess = withCache('policies-ip-access', _fetchPoliciesIpAccess, ({ user, roles }) => ({ + user, + roles, +})); + +export async function _fetchPoliciesIpAccess( + accountability: Pick, + knex: Knex, +): Promise { + const query = knex('directus_access') + .select({ ip_access: 'directus_policies.ip_access' }) + .leftJoin('directus_policies', 'directus_access.policy', 'directus_policies.id') + .whereNotNull('directus_policies.ip_access'); + + // No roles and no user means unauthenticated request + if (accountability.roles.length === 0 && !accountability.user) { + query.where({ + role: null, + user: null, + }); + } else { + query.where(function () { + if (accountability.user) { + this.orWhere('directus_access.user', accountability.user); + } + + this.orWhereIn('directus_access.role', accountability.roles); + }); + } + + const rows = await query; + + return rows.filter(({ ip_access }) => ip_access).map(({ ip_access }) => toArray(ip_access)); +} diff --git a/api/src/permissions/modules/process-ast/lib/extract-fields-from-children.test.ts b/api/src/permissions/modules/process-ast/lib/extract-fields-from-children.test.ts new file mode 100644 index 0000000000..e55009b819 --- /dev/null +++ b/api/src/permissions/modules/process-ast/lib/extract-fields-from-children.test.ts @@ -0,0 +1,438 @@ +import type { DeepPartial, SchemaOverview } from '@directus/types'; +import { describe, expect, it } from 'vitest'; +import type { FieldNode, FunctionFieldNode, NestedCollectionNode } from '../../../../types/ast.js'; +import type { FieldMap } from '../types.js'; +import { extractFieldsFromChildren } from './extract-fields-from-children.js'; + +function createFieldMap({ read, other }: Partial = {}): FieldMap { + return { read: new Map(read), other: new Map(other) }; +} + +describe('Global', () => { + it('Creates FieldMap entry for passed collection', () => { + const fieldMap = createFieldMap(); + + extractFieldsFromChildren('test-collection', [], fieldMap, {} as SchemaOverview, []); + + expect(fieldMap).toEqual( + createFieldMap({ other: new Map([['', { collection: 'test-collection', fields: new Set() }]]) }), + ); + }); + + it('Uses passed path as map key', () => { + const fieldMap = createFieldMap(); + + extractFieldsFromChildren('test-collection', [], fieldMap, {} as SchemaOverview, ['path', 'to', 'fields']); + + expect(fieldMap).toEqual( + createFieldMap({ other: new Map([['path.to.fields', { collection: 'test-collection', fields: new Set() }]]) }), + ); + }); +}); + +describe('a2o', () => { + it('Extracts children for each related collection with the prefixed path', () => { + const fieldMap = createFieldMap(); + + const children: Partial[] = [ + { + type: 'a2o', + fieldKey: 'test-a2o-a', + children: { + 'test-collection-a': [{ type: 'field', fieldKey: 'test-field-key-a', name: 'test-field-name-a' }], + 'test-collection-b': [{ type: 'field', fieldKey: 'test-field-key-b', name: 'test-field-name-b' }], + }, + relation: { + field: 'test-a2o-a', + }, + query: { + 'test-collection-a': { + filter: { + 'test-filter-field-a': { + _eq: 'irrelevant', + }, + }, + }, + }, + }, + ] as unknown as Partial[]; + + extractFieldsFromChildren( + 'test-collection', + children as NestedCollectionNode[], + fieldMap, + { relations: [] } as unknown as SchemaOverview, + [], + ); + + expect(fieldMap).toEqual( + createFieldMap({ + other: new Map([ + ['', { collection: 'test-collection', fields: new Set(['test-a2o-a']) }], + ['test-a2o-a:test-collection-a', { collection: 'test-collection-a', fields: new Set(['test-field-name-a']) }], + ['test-a2o-a:test-collection-b', { collection: 'test-collection-b', fields: new Set(['test-field-name-b']) }], + ]), + read: new Map([ + [ + 'test-a2o-a:test-collection-a', + { collection: 'test-collection-a', fields: new Set(['test-filter-field-a']) }, + ], + ]), + }), + ); + }); + + it('Extracts fields used in query', () => { + const fieldMap = createFieldMap(); + + const children: Partial[] = [ + { + type: 'a2o', + fieldKey: 'test-a2o-a', + children: { + 'test-collection-a': [{ type: 'field', fieldKey: 'test-field-key-a', name: 'test-field-name-a' }], + 'test-collection-b': [{ type: 'field', fieldKey: 'test-field-key-b', name: 'test-field-name-b' }], + }, + relation: { + field: 'test-a2o-a', + }, + }, + { + type: 'a2o', + fieldKey: 'test-a2o-b', + children: { + 'test-collection-a': [{ type: 'field', fieldKey: 'test-field-key-a2', name: 'test-field-name-a2' }], + 'test-collection-c': [{ type: 'field', fieldKey: 'test-field-key-c', name: 'test-field-name-c' }], + }, + relation: { + field: 'test-a2o-b', + }, + }, + ] as unknown as Partial[]; + + extractFieldsFromChildren( + 'test-collection', + children as NestedCollectionNode[], + fieldMap, + {} as SchemaOverview, + [], + ); + + expect(fieldMap).toEqual( + createFieldMap({ + other: new Map([ + ['', { collection: 'test-collection', fields: new Set(['test-a2o-a', 'test-a2o-b']) }], + ['test-a2o-a:test-collection-a', { collection: 'test-collection-a', fields: new Set(['test-field-name-a']) }], + ['test-a2o-a:test-collection-b', { collection: 'test-collection-b', fields: new Set(['test-field-name-b']) }], + [ + 'test-a2o-b:test-collection-a', + { collection: 'test-collection-a', fields: new Set(['test-field-name-a2']) }, + ], + ['test-a2o-b:test-collection-c', { collection: 'test-collection-c', fields: new Set(['test-field-name-c']) }], + ]), + }), + ); + }); +}); + +describe('m2o', () => { + it('Extract children with correct path', () => { + const fieldMap = createFieldMap(); + + const children: DeepPartial[] = [ + { + type: 'm2o', + fieldKey: 'test-m2o-a', + relation: { + field: 'test-m2o-a', + related_collection: 'test-related-collection-a', + }, + children: [ + { type: 'field', fieldKey: 'test-field-key-a', name: 'test-field-name-a' }, + { type: 'field', fieldKey: 'test-field-key-b', name: 'test-field-name-b' }, + ], + }, + { + type: 'm2o', + fieldKey: 'test-m2o-b', + relation: { + field: 'test-m2o-b', + related_collection: 'test-related-collection-b', + }, + children: [ + { type: 'field', fieldKey: 'test-field-key-a', name: 'test-field-name-a' }, + { type: 'field', fieldKey: 'test-field-key-b', name: 'test-field-name-b' }, + ], + }, + ]; + + extractFieldsFromChildren( + 'test-collection', + children as NestedCollectionNode[], + fieldMap, + {} as SchemaOverview, + [], + ); + + expect(fieldMap).toEqual( + createFieldMap({ + other: new Map([ + ['', { collection: 'test-collection', fields: new Set(['test-m2o-a', 'test-m2o-b']) }], + [ + 'test-m2o-a', + { collection: 'test-related-collection-a', fields: new Set(['test-field-name-a', 'test-field-name-b']) }, + ], + [ + 'test-m2o-b', + { collection: 'test-related-collection-b', fields: new Set(['test-field-name-a', 'test-field-name-b']) }, + ], + ]), + }), + ); + }); + + it('Extracts fields used in query', () => { + const fieldMap = createFieldMap(); + + const children: DeepPartial[] = [ + { + type: 'm2o', + fieldKey: 'test-m2o-a', + relation: { + field: 'test-m2o-a', + related_collection: 'test-related-collection-a', + }, + children: [{ type: 'field', fieldKey: 'test-field-key-a', name: 'test-field-name-a' }], + query: { + filter: { + 'test-filter-field-a': { + _eq: 'hi', + }, + }, + }, + }, + ]; + + extractFieldsFromChildren( + 'test-collection', + children as NestedCollectionNode[], + fieldMap, + { relations: [] } as unknown as SchemaOverview, + [], + ); + + expect(fieldMap).toEqual( + createFieldMap({ + other: new Map([ + ['', { collection: 'test-collection', fields: new Set(['test-m2o-a']) }], + ['test-m2o-a', { collection: 'test-related-collection-a', fields: new Set(['test-field-name-a']) }], + ]), + read: new Map([ + ['test-m2o-a', { collection: 'test-related-collection-a', fields: new Set(['test-filter-field-a']) }], + ]), + }), + ); + }); +}); + +describe('o2m', () => { + it('Extract children with correct path', () => { + const fieldMap = createFieldMap(); + + const children: DeepPartial[] = [ + { + type: 'o2m', + fieldKey: 'test-o2m-a', + relation: { + collection: 'test-related-collection-a', + meta: { + one_field: 'test-o2m-a', + }, + }, + children: [ + { type: 'field', fieldKey: 'test-field-key-a', name: 'test-field-name-a' }, + { type: 'field', fieldKey: 'test-field-key-b', name: 'test-field-name-b' }, + ], + }, + { + type: 'o2m', + fieldKey: 'test-o2m-b', + relation: { + collection: 'test-related-collection-b', + meta: { + one_field: 'test-o2m-b', + }, + }, + children: [ + { type: 'field', fieldKey: 'test-field-key-a', name: 'test-field-name-a' }, + { type: 'field', fieldKey: 'test-field-key-b', name: 'test-field-name-b' }, + ], + }, + ]; + + extractFieldsFromChildren( + 'test-collection', + children as NestedCollectionNode[], + fieldMap, + {} as SchemaOverview, + [], + ); + + expect(fieldMap).toEqual( + createFieldMap({ + other: new Map([ + ['', { collection: 'test-collection', fields: new Set(['test-o2m-a', 'test-o2m-b']) }], + [ + 'test-o2m-a', + { collection: 'test-related-collection-a', fields: new Set(['test-field-name-a', 'test-field-name-b']) }, + ], + [ + 'test-o2m-b', + { collection: 'test-related-collection-b', fields: new Set(['test-field-name-a', 'test-field-name-b']) }, + ], + ]), + }), + ); + }); + + it('Extracts fields used in query', () => { + const fieldMap = createFieldMap(); + + const children: DeepPartial[] = [ + { + type: 'o2m', + fieldKey: 'test-o2m-a', + relation: { + collection: 'test-related-collection-a', + meta: { + one_field: 'test-o2m-a', + }, + }, + children: [{ type: 'field', fieldKey: 'test-field-key-a', name: 'test-field-name-a' }], + query: { + sort: ['-test-sort-field-a'], + }, + }, + ]; + + extractFieldsFromChildren( + 'test-collection', + children as NestedCollectionNode[], + fieldMap, + { relations: [] } as unknown as SchemaOverview, + [], + ); + + expect(fieldMap).toEqual( + createFieldMap({ + other: new Map([ + ['', { collection: 'test-collection', fields: new Set(['test-o2m-a']) }], + ['test-o2m-a', { collection: 'test-related-collection-a', fields: new Set(['test-field-name-a']) }], + ]), + read: new Map([ + ['test-o2m-a', { collection: 'test-related-collection-a', fields: new Set(['test-sort-field-a']) }], + ]), + }), + ); + }); +}); + +describe('functionField', () => { + it('Adds basic function field to field set', () => { + const fieldMap = createFieldMap(); + + const children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[] = [ + { + type: 'functionField', + fieldKey: 'year(test-field-key-a)', + name: 'test-field-name-a', + query: {}, + relatedCollection: 'test-related-collection', + }, + ] as FunctionFieldNode[]; + + extractFieldsFromChildren('test-collection', children, fieldMap, {} as SchemaOverview, []); + + expect(fieldMap).toEqual( + createFieldMap({ + other: new Map([ + ['', { collection: 'test-collection', fields: new Set(['test-field-name-a']) }], + ['year(test-field-key-a)', { collection: 'test-related-collection', fields: new Set([]) }], + ]), + }), + ); + }); + + it('Processes query', () => { + const fieldMap = createFieldMap(); + + const children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[] = [ + { + type: 'functionField', + fieldKey: 'year(test-field-key-a)', + name: 'test-field-name-a', + query: { + sort: ['rating'], + }, + relatedCollection: 'test-related-collection', + }, + ] as FunctionFieldNode[]; + + extractFieldsFromChildren( + 'test-collection', + children, + fieldMap, + { relations: [] } as unknown as SchemaOverview, + [], + ); + + expect(fieldMap).toEqual( + createFieldMap({ + other: new Map([ + ['', { collection: 'test-collection', fields: new Set(['test-field-name-a']) }], + ['year(test-field-key-a)', { collection: 'test-related-collection', fields: new Set() }], + ]), + read: new Map([ + ['year(test-field-key-a)', { collection: 'test-related-collection', fields: new Set(['rating']) }], + ]), + }), + ); + }); +}); + +describe('field', () => { + it('Adds basic fields to field set', () => { + const fieldMap = createFieldMap(); + + const children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[] = [ + { type: 'field', fieldKey: 'test-field-key-a', name: 'test-field-name-a' }, + { type: 'field', fieldKey: 'test-field-key-b', name: 'test-field-name-b' }, + ] as FieldNode[]; + + extractFieldsFromChildren('test-collection', children, fieldMap, {} as SchemaOverview, []); + + expect(fieldMap).toEqual( + createFieldMap({ + other: new Map([ + ['', { collection: 'test-collection', fields: new Set(['test-field-name-a', 'test-field-name-b']) }], + ]), + }), + ); + }); + + it('Strips functions from field keys', () => { + const fieldMap = createFieldMap(); + + const children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[] = [ + { type: 'field', fieldKey: 'someFn(test-field-key-a)', name: 'test-field-name-a' }, + ] as FieldNode[]; + + extractFieldsFromChildren('test-collection', children, fieldMap, {} as SchemaOverview, []); + + expect(fieldMap).toEqual( + createFieldMap({ + other: new Map([['', { collection: 'test-collection', fields: new Set(['test-field-name-a']) }]]), + }), + ); + }); +}); diff --git a/api/src/permissions/modules/process-ast/lib/extract-fields-from-children.ts b/api/src/permissions/modules/process-ast/lib/extract-fields-from-children.ts new file mode 100644 index 0000000000..c1760723ea --- /dev/null +++ b/api/src/permissions/modules/process-ast/lib/extract-fields-from-children.ts @@ -0,0 +1,61 @@ +import type { SchemaOverview } from '@directus/types'; +import type { FieldNode, FunctionFieldNode, NestedCollectionNode } from '../../../../types/ast.js'; +import { getUnaliasedFieldKey } from '../../../utils/get-unaliased-field-key.js'; +import type { FieldMap, QueryPath } from '../types.js'; +import { formatA2oKey } from '../utils/format-a2o-key.js'; +import { getInfoForPath } from '../utils/get-info-for-path.js'; +import { extractFieldsFromQuery } from './extract-fields-from-query.js'; + +export function extractFieldsFromChildren( + collection: string, + children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[], + fieldMap: FieldMap, + schema: SchemaOverview, + path: QueryPath = [], +) { + const info = getInfoForPath(fieldMap, 'other', path, collection); + + for (const child of children) { + info.fields.add(getUnaliasedFieldKey(child)); + + if (child.type === 'a2o') { + for (const [collection, children] of Object.entries(child.children)) { + extractFieldsFromChildren(collection, children, fieldMap, schema, [ + ...path, + formatA2oKey(child.fieldKey, collection), + ]); + } + + if (child.query) { + for (const [collection, query] of Object.entries(child.query)) { + extractFieldsFromQuery(collection, query, fieldMap, schema, [ + ...path, + formatA2oKey(child.fieldKey, collection), + ]); + } + } + } else if (child.type === 'm2o') { + extractFieldsFromChildren(child.relation.related_collection!, child.children, fieldMap, schema, [ + ...path, + child.fieldKey, + ]); + + extractFieldsFromQuery(child.relation.related_collection!, child.query, fieldMap, schema, [ + ...path, + child.fieldKey, + ]); + } else if (child.type === 'o2m') { + extractFieldsFromChildren(child.relation.collection!, child.children, fieldMap, schema, [ + ...path, + child.fieldKey, + ]); + + extractFieldsFromQuery(child.relation.collection!, child.query, fieldMap, schema, [...path, child.fieldKey]); + } else if (child.type === 'functionField') { + // functionFields operate on a related o2m collection, we have to make sure we include a + // no-field read check to the related collection + extractFieldsFromChildren(child.relatedCollection, [], fieldMap, schema, [...path, child.fieldKey]); + extractFieldsFromQuery(child.relatedCollection, child.query, fieldMap, schema, [...path, child.fieldKey]); + } + } +} diff --git a/api/src/permissions/modules/process-ast/lib/extract-fields-from-query.test.ts b/api/src/permissions/modules/process-ast/lib/extract-fields-from-query.test.ts new file mode 100644 index 0000000000..2a82cc0919 --- /dev/null +++ b/api/src/permissions/modules/process-ast/lib/extract-fields-from-query.test.ts @@ -0,0 +1,182 @@ +import type { DeepPartial, Query, SchemaOverview } from '@directus/types'; +import { expect, test } from 'vitest'; +import { extractFieldsFromQuery } from './extract-fields-from-query.js'; + +test('Appends paths used in query to FieldMap', () => { + const fieldMap = { read: new Map(), other: new Map() }; + + const query: Query = { + filter: { + author: { + _eq: 1, + }, + }, + sort: ['id'], + }; + + const schema: DeepPartial = { relations: [] }; + + extractFieldsFromQuery('test-collection', query, fieldMap, schema as SchemaOverview); + + expect(fieldMap.read).toEqual(new Map([['', { collection: 'test-collection', fields: new Set(['author', 'id']) }]])); +}); + +test('Appends nested paths based on m2o relational information', () => { + const fieldMap = { read: new Map(), other: new Map() }; + + const query: Query = { + filter: { + author: { + name: { + _eq: 'Rijk', + }, + }, + }, + sort: ['id'], + }; + + const schema: DeepPartial = { + relations: [ + { + collection: 'test-collection', + field: 'author', + related_collection: 'test-collection-authors', + }, + ], + }; + + extractFieldsFromQuery('test-collection', query, fieldMap, schema as SchemaOverview); + + expect(fieldMap.read).toEqual( + new Map([ + ['', { collection: 'test-collection', fields: new Set(['author', 'id']) }], + ['author', { collection: 'test-collection-authors', fields: new Set(['name']) }], + ]), + ); +}); + +test('Appends nested paths based on o2m relational information', () => { + const fieldMap = { read: new Map(), other: new Map() }; + + const query: Query = { + filter: { + categories: { + _some: { + name: { + _eq: 'recipe', + }, + }, + }, + }, + sort: ['id'], + }; + + const schema: DeepPartial = { + relations: [ + { + collection: 'test-collection-categories', + field: 'article', + related_collection: 'test-collection', + meta: { + one_field: 'categories', + }, + }, + ], + }; + + extractFieldsFromQuery('test-collection', query, fieldMap, schema as SchemaOverview); + + expect(fieldMap.read).toEqual( + new Map([ + ['', { collection: 'test-collection', fields: new Set(['categories', 'id']) }], + ['categories', { collection: 'test-collection-categories', fields: new Set(['name']) }], + ]), + ); +}); + +test('Appends nested paths based on collection scope in a2o filter', () => { + const fieldMap = { read: new Map(), other: new Map() }; + + const query: Query = { + filter: { + 'item:headings': { + title: { + _eq: 'Hello World', + }, + }, + }, + }; + + const schema: DeepPartial = { + relations: [], + }; + + extractFieldsFromQuery('test-collection', query, fieldMap, schema as SchemaOverview); + + expect(fieldMap.read).toEqual( + new Map([ + ['', { collection: 'test-collection', fields: new Set(['item']) }], + ['item:headings', { collection: 'headings', fields: new Set(['title']) }], + ]), + ); +}); + +test('All together now', () => { + const fieldMap = { read: new Map(), other: new Map() }; + + const query: Query = { + filter: { + _or: [ + { + 'item:headings': { + categories: { + _some: { + created_by: { + name: { + _eq: 'Rijk', + }, + }, + }, + }, + }, + }, + { + 'item:headings': { + status: { + _eq: 'published', + }, + }, + }, + ], + }, + }; + + const schema: DeepPartial = { + relations: [ + { + collection: 'categories', + field: 'heading', + related_collection: 'headings', + meta: { + one_field: 'categories', + }, + }, + { + collection: 'categories', + field: 'created_by', + related_collection: 'authors', + }, + ], + }; + + extractFieldsFromQuery('test-collection', query, fieldMap, schema as SchemaOverview); + + expect(fieldMap.read).toEqual( + new Map([ + ['', { collection: 'test-collection', fields: new Set(['item']) }], + ['item:headings', { collection: 'headings', fields: new Set(['categories', 'status']) }], + ['item:headings.categories', { collection: 'categories', fields: new Set(['created_by']) }], + ['item:headings.categories.created_by', { collection: 'authors', fields: new Set(['name']) }], + ]), + ); +}); diff --git a/api/src/permissions/modules/process-ast/lib/extract-fields-from-query.ts b/api/src/permissions/modules/process-ast/lib/extract-fields-from-query.ts new file mode 100644 index 0000000000..9fd7436b5a --- /dev/null +++ b/api/src/permissions/modules/process-ast/lib/extract-fields-from-query.ts @@ -0,0 +1,72 @@ +import type { Query, SchemaOverview } from '@directus/types'; +import { parseFilterKey } from '../../../../utils/parse-filter-key.js'; +import type { CollectionKey, FieldKey, FieldMap } from '../types.js'; +import { extractPathsFromQuery } from '../utils/extract-paths-from-query.js'; +import { findRelatedCollection } from '../utils/find-related-collection.js'; +import { getInfoForPath } from '../utils/get-info-for-path.js'; + +export function extractFieldsFromQuery( + collection: CollectionKey, + query: Query, + fieldMap: FieldMap, + schema: SchemaOverview, + pathPrefix: FieldKey[] = [], +) { + if (!query) return; + + const { paths: otherPaths, readOnlyPaths } = extractPathsFromQuery(query); + + const groupedPaths = { + other: otherPaths, + read: readOnlyPaths, + }; + + for (const [group, paths] of Object.entries(groupedPaths) as [keyof FieldMap, string[][]][]) { + for (const path of paths) { + /** + * Current path stack. For each iteration of the path loop this will be appended with the + * current part we're operating on. So when looping over ['category', 'created_by', 'name'] + * the first iteration it'll be `['category']`, and then `['category', 'created_by']` etc. + */ + const stack = []; + + /** + * Current collection the path part we're operating on lives in. Once we hit a relational + * field, this will be updated to the related collection, so we can follow the relational path + * left to right. + */ + let collectionContext = collection; + + for (const part of path) { + const info = getInfoForPath(fieldMap, group, [...pathPrefix, ...stack], collectionContext); + + // A2o specifier field fetch + if (part.includes(':')) { + const [fieldKey, collection] = part.split(':') as [string, string]; + info.fields.add(fieldKey); + collectionContext = collection; + stack.push(part); + continue; + } + + if (part.startsWith('$FOLLOW(') && part.endsWith(')')) { + // Don't add this implicit relation field to fields, as it will be accounted for in the reverse direction + } else { + const { fieldName } = parseFilterKey(part); + info.fields.add(fieldName); + } + + /** + * Related collection for the current part. Is null when the current field isn't a + * relational field. + */ + const relatedCollection = findRelatedCollection(collectionContext, part, schema); + + if (relatedCollection) { + collectionContext = relatedCollection; + stack.push(part); + } + } + } + } +} diff --git a/api/src/permissions/modules/process-ast/lib/field-map-from-ast.test.ts b/api/src/permissions/modules/process-ast/lib/field-map-from-ast.test.ts new file mode 100644 index 0000000000..5bf0863d61 --- /dev/null +++ b/api/src/permissions/modules/process-ast/lib/field-map-from-ast.test.ts @@ -0,0 +1,108 @@ +import type { DeepPartial, SchemaOverview } from '@directus/types'; +import { expect, test } from 'vitest'; +import type { AST } from '../../../../types/ast.js'; +import { fieldMapFromAst } from './field-map-from-ast.js'; + +test('Extracts fields from children and query', () => { + const ast: DeepPartial = { + type: 'root', + name: 'articles', + query: { + sort: ['-publish_date'], + filter: { + _and: [ + { + categories: { + _some: { + name: { + _eq: 'Recipes', + }, + }, + }, + }, + { + status: { + _eq: 'published', + }, + }, + ], + }, + }, + children: [ + { type: 'field', fieldKey: 'title', name: 'title' }, + { + type: 'm2o', + fieldKey: 'author', + children: [ + { + type: 'field', + fieldKey: 'name', + name: 'name', + }, + ], + relation: { + related_collection: 'authors', + field: 'author', + }, + }, + { + type: 'a2o', + fieldKey: 'item', + names: ['headings', 'paragraphs'], + children: { + headings: [{ type: 'field', fieldKey: 'text', name: 'text' }], + paragraphs: [{ type: 'field', fieldKey: 'body', name: 'body' }], + }, + query: { + headings: { + filter: { + status: { + _eq: 'published', + }, + }, + }, + }, + relation: { + field: 'item', + }, + }, + ], + }; + + const schema: DeepPartial = { + relations: [ + { + collection: 'categories', + field: 'article', + related_collection: 'articles', + meta: { + one_field: 'categories', + }, + }, + ], + }; + + const fieldMap = fieldMapFromAst(ast as AST, schema as SchemaOverview); + + const expectedRead = new Map([ + ['', { collection: 'articles', fields: new Set(['status', 'categories', 'publish_date']) }], + ['categories', { collection: 'categories', fields: new Set(['name']) }], + ['item:headings', { collection: 'headings', fields: new Set(['status']) }], + ]); + + const expectedOther = new Map([ + [ + '', + { + collection: 'articles', + fields: new Set(['title', 'author', 'item']), + }, + ], + ['author', { collection: 'authors', fields: new Set(['name']) }], + ['item:headings', { collection: 'headings', fields: new Set(['text']) }], + ['item:paragraphs', { collection: 'paragraphs', fields: new Set(['body']) }], + ]); + + expect(fieldMap.read).toEqual(expectedRead); + expect(fieldMap.other).toEqual(expectedOther); +}); diff --git a/api/src/permissions/modules/process-ast/lib/field-map-from-ast.ts b/api/src/permissions/modules/process-ast/lib/field-map-from-ast.ts new file mode 100644 index 0000000000..330b0960a8 --- /dev/null +++ b/api/src/permissions/modules/process-ast/lib/field-map-from-ast.ts @@ -0,0 +1,14 @@ +import type { SchemaOverview } from '@directus/types'; +import type { AST } from '../../../../types/ast.js'; +import type { FieldMap } from '../types.js'; +import { extractFieldsFromChildren } from './extract-fields-from-children.js'; +import { extractFieldsFromQuery } from './extract-fields-from-query.js'; + +export function fieldMapFromAst(ast: AST, schema: SchemaOverview): FieldMap { + const fieldMap: FieldMap = { read: new Map(), other: new Map() }; + + extractFieldsFromChildren(ast.name, ast.children, fieldMap, schema); + extractFieldsFromQuery(ast.name, ast.query, fieldMap, schema); + + return fieldMap; +} diff --git a/api/src/permissions/modules/process-ast/lib/inject-cases.test.ts b/api/src/permissions/modules/process-ast/lib/inject-cases.test.ts new file mode 100644 index 0000000000..2958e8fc50 --- /dev/null +++ b/api/src/permissions/modules/process-ast/lib/inject-cases.test.ts @@ -0,0 +1,349 @@ +import type { DeepPartial, Permission } from '@directus/types'; +import { beforeAll, expect, test, vi } from 'vitest'; +import type { AST } from '../../../../types/ast.js'; +import { getUnaliasedFieldKey } from '../../../utils/get-unaliased-field-key.js'; +import { injectCases } from './inject-cases.js'; + +vi.mock('../../../utils/get-unaliased-field-key.js'); + +beforeAll(() => { + vi.clearAllMocks(); + + // This just returns the field key, normally the ast would be of a proper type and getUnaliasedFieldKey would work + vi.mocked(getUnaliasedFieldKey).mockImplementation((field) => field.fieldKey); +}); + +test('Injects cases related to ast', () => { + const ast: DeepPartial = { + name: 'test-collection-a', + cases: [], + children: [{ fieldKey: 'test-field-a' }], + }; + + const permissions: DeepPartial[] = [ + { collection: 'test-collection-a', permissions: { status: { _eq: 'published' } }, fields: ['test-field-a'] }, + { collection: 'test-collection-b', permissions: null, fields: [] }, + ]; + + injectCases(ast as AST, permissions as Permission[]); + + expect(ast.cases).toEqual([{ status: { _eq: 'published' } }]); +}); + +test('Ignores cases for fields that are not requested', () => { + const ast: DeepPartial = { + name: 'test-collection-a', + cases: [], + children: [{ fieldKey: 'test-field-a' }], + }; + + const permissions: DeepPartial[] = [ + { collection: 'test-collection-a', permissions: { status: { _eq: 'published' } }, fields: ['test-field-a'] }, + { collection: 'test-collection-a', permissions: null, fields: ['not-requested-field'] }, + ]; + + injectCases(ast as AST, permissions as Permission[]); + + expect(ast.cases).toEqual([{ status: { _eq: 'published' } }]); +}); + +test('Adds cases that apply to fields in ast children', () => { + const ast: DeepPartial = { + name: 'test-collection-a', + cases: [], + children: [ + { fieldKey: 'test-field-a', whenCase: [] }, + { fieldKey: 'test-field-b', whenCase: [] }, + { fieldKey: 'test-field-c', whenCase: [] }, + ], + }; + + const permissions: DeepPartial[] = [ + { + collection: 'test-collection-a', + permissions: { status: { _eq: 'published' } }, + fields: ['test-field-a', 'test-field-b'], + }, + { + collection: 'test-collection-a', + permissions: { status: { _eq: 'draft' } }, + fields: ['test-field-b', 'test-field-c'], + }, + ]; + + injectCases(ast as AST, permissions as Permission[]); + + expect(ast.children).toEqual([ + { + fieldKey: 'test-field-a', + whenCase: [0], + }, + { + fieldKey: 'test-field-b', + whenCase: [0, 1], + }, + { + fieldKey: 'test-field-c', + whenCase: [1], + }, + ]); +}); + +test('Ignores cases for fields that are allowed by "all" permissions', () => { + const ast: DeepPartial = { + name: 'test-collection-a', + cases: [], + children: [{ fieldKey: 'test-field-a', whenCase: [] }], + }; + + const permissions: DeepPartial[] = [ + { collection: 'test-collection-a', permissions: { status: { _eq: 'published' } }, fields: ['test-field-a'] }, + + // This permission says you can always get field-a no matter what, so the permission above should be ignored + { collection: 'test-collection-a', permissions: {}, fields: ['test-field-a'] }, + ]; + + injectCases(ast as AST, permissions as Permission[]); + + expect(ast.children).toEqual([ + { + fieldKey: 'test-field-a', + whenCase: [], // empty as there's a "all" permission + }, + ]); +}); + +// In real life usage, the inject function is called after the AST has already been validated for +// access, so this error should never be thrown in production use +test('Errors out when there are no permissions for the requested fields', () => { + const ast: DeepPartial = { + name: 'test-collection-a', + cases: [], + children: [{ fieldKey: 'test-field-a', whenCase: [] }], + }; + + const permissions: DeepPartial[] = [ + { collection: 'test-collection-a', permissions: { status: { _eq: 'published' } }, fields: ['test-field-b'] }, + ]; + + expect(() => injectCases(ast as AST, permissions as Permission[])).toThrow(); +}); + +test('Adds the cases to each field if affected fields is *', () => { + const ast: DeepPartial = { + name: 'test-collection-a', + cases: [], + children: [ + { fieldKey: 'test-field-a', whenCase: [] }, + { fieldKey: 'test-field-b', whenCase: [] }, + { fieldKey: 'test-field-c', whenCase: [] }, + ], + }; + + const permissions: DeepPartial[] = [ + { + collection: 'test-collection-a', + permissions: { status: { _eq: 'published' } }, + fields: ['*'], + }, + { + collection: 'test-collection-a', + permissions: { status: { _eq: 'draft' } }, + fields: ['test-field-b', 'test-field-c'], + }, + ]; + + injectCases(ast as AST, permissions as Permission[]); + + expect(ast.children).toEqual([ + { + fieldKey: 'test-field-a', + whenCase: [0], + }, + { + fieldKey: 'test-field-b', + whenCase: [0, 1], + }, + { + fieldKey: 'test-field-c', + whenCase: [0, 1], + }, + ]); +}); + +test('Processes m2o children recursively', () => { + const ast: DeepPartial = { + name: 'test-collection-a', + cases: [], + children: [ + { fieldKey: 'test-field-a', whenCase: [] }, + { + type: 'm2o', + fieldKey: 'test-field-b', + relation: { related_collection: 'test-collection-b' }, + children: [ + { fieldKey: 'test-field-related-a', whenCase: [] }, + { fieldKey: 'test-field-related-b', whenCase: [] }, + ], + }, + ], + }; + + const permissions: DeepPartial[] = [ + { + collection: 'test-collection-a', + permissions: { status: { _eq: 'published' } }, + fields: ['*'], + }, + { + collection: 'test-collection-b', + permissions: { status: { _eq: 'draft' } }, + fields: ['test-field-related-a'], + }, + { + collection: 'test-collection-b', + permissions: { status: { _eq: 'under-review' } }, + fields: ['test-field-related-b'], + }, + ]; + + injectCases(ast as AST, permissions as Permission[]); + + expect(ast.children).toEqual([ + { fieldKey: 'test-field-a', whenCase: [0] }, + { + type: 'm2o', + fieldKey: 'test-field-b', + relation: { related_collection: 'test-collection-b' }, + cases: [{ status: { _eq: 'draft' } }, { status: { _eq: 'under-review' } }], + whenCase: [0], + children: [ + { fieldKey: 'test-field-related-a', whenCase: [0] }, + { fieldKey: 'test-field-related-b', whenCase: [1] }, + ], + }, + ]); +}); + +test('Processes o2m children recursively', () => { + const ast: DeepPartial = { + name: 'test-collection-a', + cases: [], + children: [ + { fieldKey: 'test-field-a', whenCase: [] }, + { + type: 'o2m', + fieldKey: 'test-field-b', + relation: { collection: 'test-collection-b' }, + children: [ + { fieldKey: 'test-field-related-a', whenCase: [] }, + { fieldKey: 'test-field-related-b', whenCase: [] }, + ], + }, + ], + }; + + const permissions: DeepPartial[] = [ + { + collection: 'test-collection-a', + permissions: { status: { _eq: 'published' } }, + fields: ['*'], + }, + { + collection: 'test-collection-b', + permissions: { status: { _eq: 'draft' } }, + fields: ['test-field-related-a'], + }, + { + collection: 'test-collection-b', + permissions: { status: { _eq: 'under-review' } }, + fields: ['test-field-related-b'], + }, + ]; + + injectCases(ast as AST, permissions as Permission[]); + + expect(ast.children).toEqual([ + { fieldKey: 'test-field-a', whenCase: [0] }, + { + type: 'o2m', + fieldKey: 'test-field-b', + relation: { collection: 'test-collection-b' }, + cases: [{ status: { _eq: 'draft' } }, { status: { _eq: 'under-review' } }], + whenCase: [0], + children: [ + { fieldKey: 'test-field-related-a', whenCase: [0] }, + { fieldKey: 'test-field-related-b', whenCase: [1] }, + ], + }, + ]); +}); + +test('Processes a2o children recursively', () => { + const ast: DeepPartial = { + name: 'test-collection-a', + cases: [], + children: [ + { fieldKey: 'test-field-a', whenCase: [] }, + { + type: 'a2o', + fieldKey: 'test-field-b', + names: ['test-collection-b', 'test-collection-c'], + cases: {}, + children: { + 'test-collection-b': [ + { fieldKey: 'test-field-related-a', whenCase: [] }, + { fieldKey: 'test-field-related-b', whenCase: [] }, + ], + 'test-collection-c': [], + }, + }, + ], + }; + + const permissions: DeepPartial[] = [ + { + collection: 'test-collection-a', + permissions: { status: { _eq: 'published' } }, + fields: ['*'], + }, + { + collection: 'test-collection-b', + permissions: { status: { _eq: 'draft' } }, + fields: ['test-field-related-a'], + }, + { + collection: 'test-collection-b', + permissions: { status: { _eq: 'under-review' } }, + fields: ['test-field-related-b'], + }, + { + collection: 'test-collection-c', + permissions: {}, + fields: ['*'], + }, + ]; + + injectCases(ast as AST, permissions as Permission[]); + + expect(ast.children).toEqual([ + { fieldKey: 'test-field-a', whenCase: [0] }, + { + type: 'a2o', + fieldKey: 'test-field-b', + names: ['test-collection-b', 'test-collection-c'], + cases: { + 'test-collection-b': [{ status: { _eq: 'draft' } }, { status: { _eq: 'under-review' } }], + 'test-collection-c': [{}], + }, + whenCase: [0], + children: { + 'test-collection-b': [ + { fieldKey: 'test-field-related-a', whenCase: [0] }, + { fieldKey: 'test-field-related-b', whenCase: [1] }, + ], + 'test-collection-c': [], + }, + }, + ]); +}); diff --git a/api/src/permissions/modules/process-ast/lib/inject-cases.ts b/api/src/permissions/modules/process-ast/lib/inject-cases.ts new file mode 100644 index 0000000000..4dac736484 --- /dev/null +++ b/api/src/permissions/modules/process-ast/lib/inject-cases.ts @@ -0,0 +1,128 @@ +import type { Filter, Permission } from '@directus/types'; +import type { AST, FieldNode, FunctionFieldNode, NestedCollectionNode } from '../../../../types/ast.js'; +import { getUnaliasedFieldKey } from '../../../utils/get-unaliased-field-key.js'; +import type { FieldKey } from '../types.js'; +import { dedupeAccess } from '../utils/dedupe-access.js'; +import { hasItemPermissions } from '../utils/has-item-permissions.js'; +import { uniq } from 'lodash-es'; + +/** + * Mutates passed AST + * + * @param ast - Read query AST + * @param permissions - Expected to be filtered down for the policies and action already + */ +export function injectCases(ast: AST, permissions: Permission[]) { + ast.cases = processChildren(ast.name, ast.children, permissions); +} + +function processChildren( + collection: string, + children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[], + permissions: Permission[], +) { + // Use uniq here, since there might be multiple duplications due to aliases or functions + const requestedKeys = uniq(children.map(getUnaliasedFieldKey)); + const { cases, caseMap, allowedFields } = getCases(collection, permissions, requestedKeys); + + // TODO this can be optimized if there is only one rule to skip the whole case/where system, + // since fields that are not allowed at all are already filtered out + + // TODO this can be optimized if all cases are the same for all requested keys, as those should be + // + + for (const child of children) { + const fieldKey = getUnaliasedFieldKey(child); + + const globalWhenCase = caseMap['*']; + const fieldWhenCase = caseMap[fieldKey]; + + // Validation should catch any fields that are attempted to be read that don't have any access control configured. + // When there are no access rules for this field, and no rules for "all" fields `*`, we missed something in the validation + // and should abort. + if (!globalWhenCase && !fieldWhenCase) { + throw new Error(`Cannot extract access permissions for field "${fieldKey}" in collection "${collection}"`); + } + + // The case/when system only needs to take place if no full access is given on this field, + // otherwise we can skip and thus safe some query perf overhead + if (!allowedFields.has('*') && !allowedFields.has(fieldKey)) { + // Global and field can't both be undefined as per the error check prior + child.whenCase = [...(globalWhenCase ?? []), ...(fieldWhenCase ?? [])]; + } + + if (child.type === 'm2o') { + child.cases = processChildren(child.relation.related_collection!, child.children, permissions); + } + + if (child.type === 'o2m') { + child.cases = processChildren(child.relation.collection, child.children, permissions); + } + + if (child.type === 'a2o') { + for (const collection of child.names) { + child.cases[collection] = processChildren(collection, child.children[collection] ?? [], permissions); + } + } + + if (child.type === 'functionField') { + const { cases } = getCases(child.relatedCollection, permissions, []); + child.cases = cases; + } + } + + return cases; +} + +function getCases(collection: string, permissions: Permission[], requestedKeys: string[]) { + const permissionsForCollection = permissions.filter((permission) => permission.collection === collection); + + const rules = dedupeAccess(permissionsForCollection); + const cases: Filter[] = []; + const caseMap: Record = {}; + + // TODO this can be optimized if there is only one rule to skip the whole case/where system, + // since fields that are not allowed at all are already filtered out + + // TODO this can be optimized if all cases are the same for all requested keys, as those should be + // + + let index = 0; + + for (const { rule, fields } of rules) { + // If none of the fields in the current permissions rule overlap with the actually requested + // fields in the AST, we can ignore this case altogether + if ( + requestedKeys.length > 0 && + fields.has('*') === false && + Array.from(fields).every((field) => requestedKeys.includes(field) === false) + ) { + continue; + } + + if (rule === null) continue; + + cases.push(rule); + + for (const field of fields) { + caseMap[field] = [...(caseMap[field] ?? []), index]; + } + + index++; + } + + // Field that are allowed no matter what conditions exist for the item. These come from + // permissions where the item read access is "everything" + const allowedFields = new Set( + permissionsForCollection + .filter((permission) => hasItemPermissions(permission) === false) + .map((permission) => permission.fields ?? []) + .flat(), + ); + + return { + cases, + caseMap, + allowedFields, + }; +} diff --git a/api/src/permissions/modules/process-ast/process-ast.test.ts b/api/src/permissions/modules/process-ast/process-ast.test.ts new file mode 100644 index 0000000000..9d18595b4d --- /dev/null +++ b/api/src/permissions/modules/process-ast/process-ast.test.ts @@ -0,0 +1,152 @@ +import { ForbiddenError } from '@directus/errors'; +import type { Accountability, SchemaOverview } from '@directus/types'; +import { beforeEach, expect, test, vi } from 'vitest'; +import type { AST } from '../../../types/ast.js'; +import { fetchPermissions } from '../../lib/fetch-permissions.js'; +import { fetchPolicies } from '../../lib/fetch-policies.js'; +import type { Context } from '../../types.js'; +import { processAst } from './process-ast.js'; + +vi.mock('../../lib/fetch-policies.js'); +vi.mock('../../lib/fetch-permissions.js'); + +vi.mock('../../../services/permissions.js', () => ({ + PermissionsService: vi.fn(), +})); + +vi.mock('../../../services/access.js', () => ({ + AccessService: vi.fn(), +})); + +beforeEach(() => { + vi.clearAllMocks(); + + vi.mocked(fetchPolicies).mockResolvedValue([]); + vi.mocked(fetchPermissions).mockResolvedValue([]); +}); + +test('Returns AST unmodified if accountability is null', async () => { + const ast = { type: 'root', name: 'test-collection', children: [] } as unknown as AST; + const schema = { collections: { 'test-collection': {} } as unknown as SchemaOverview } as unknown as SchemaOverview; + const accountability = null; + + const output = await processAst({ action: 'read', accountability, ast }, { schema } as Context); + + expect(output).toBe(ast); +}); + +test('Returns AST unmodified and unverified is current user is admin', async () => { + const ast = { type: 'root', name: 'test-collection', children: [] } as unknown as AST; + const schema = { collections: { 'test-collection': {} } as unknown as SchemaOverview } as unknown as SchemaOverview; + const accountability = { user: null, roles: [], admin: true } as unknown as Accountability; + + const output = await processAst({ accountability, action: 'read', ast }, { schema } as Context); + + expect(output).toBe(ast); +}); + +test('Validates all paths existence in AST if accountability is null', async () => { + const ast = { type: 'root', name: 'test-collection', children: [] } as unknown as AST; + const schema = { collections: {} } as unknown as SchemaOverview; + const accountability = null; + + await expect(async () => + processAst({ action: 'read', accountability, ast }, { schema } as Context), + ).rejects.toThrowError(ForbiddenError); +}); + +test('Validates all paths existence in AST if current user is admin', async () => { + const ast = { type: 'root', name: 'test-collection', children: [] } as unknown as AST; + const schema = { collections: {} } as unknown as SchemaOverview; + const accountability = { admin: true } as unknown as Accountability; + + await expect(async () => + processAst({ action: 'read', accountability, ast }, { schema } as Context), + ).rejects.toThrowError(ForbiddenError); +}); + +test('Validates all paths in AST and throws if no permissions match', async () => { + const ast = { type: 'root', name: 'test-collection', children: [] } as unknown as AST; + const schema = { collections: { 'test-collection': {} } as unknown as SchemaOverview } as unknown as SchemaOverview; + const accountability = { user: null, roles: [] } as unknown as Accountability; + + vi.mocked(fetchPolicies).mockResolvedValue(['test-policy-1']); + + await expect( + async () => await processAst({ action: 'read', ast, accountability }, { schema } as Context), + ).rejects.toThrowError(ForbiddenError); + + expect(fetchPermissions).toHaveBeenCalledWith( + { + accountability, + action: 'read', + policies: ['test-policy-1'], + collections: ['test-collection'], + }, + { + schema, + }, + ); +}); + +test('Injects permission cases for the provided AST', async () => { + const ast = { + type: 'root', + name: 'test-collection', + children: [ + { + type: 'field', + fieldKey: 'test-field-a', + name: 'test-field-a', + }, + ], + } as unknown as AST; + + const schema = { + collections: { + 'test-collection': { + fields: { + 'test-field-a': {}, + }, + }, + }, + } as unknown as SchemaOverview; + + const accountability = { user: null, roles: [] } as unknown as Accountability; + + vi.mocked(fetchPolicies).mockResolvedValue(['test-policy-1']); + + vi.mocked(fetchPermissions).mockResolvedValue([ + { + policy: 'test-policy-1', + collection: 'test-collection', + action: 'read', + fields: ['*'], + permissions: { status: { _eq: 'published' } }, + validation: null, + presets: null, + }, + ]); + + await processAst({ ast, action: 'read', accountability }, { schema } as Context); + + expect(ast).toEqual({ + type: 'root', + name: 'test-collection', + cases: [ + { + status: { + _eq: 'published', + }, + }, + ], + children: [ + { + type: 'field', + fieldKey: 'test-field-a', + name: 'test-field-a', + whenCase: [0], + }, + ], + }); +}); diff --git a/api/src/permissions/modules/process-ast/process-ast.ts b/api/src/permissions/modules/process-ast/process-ast.ts new file mode 100644 index 0000000000..a364656bce --- /dev/null +++ b/api/src/permissions/modules/process-ast/process-ast.ts @@ -0,0 +1,67 @@ +import type { Accountability, PermissionsAction } from '@directus/types'; +import type { AST } from '../../../types/ast.js'; +import { fetchPermissions } from '../../lib/fetch-permissions.js'; +import { fetchPolicies } from '../../lib/fetch-policies.js'; +import type { Context } from '../../types.js'; +import { fieldMapFromAst } from './lib/field-map-from-ast.js'; +import { injectCases } from './lib/inject-cases.js'; +import type { FieldMap } from './types.js'; +import { collectionsInFieldMap } from './utils/collections-in-field-map.js'; +import { validatePathPermissions } from './utils/validate-path/validate-path-permissions.js'; +import { validatePathExistence } from './utils/validate-path/validate-path-existence.js'; + +export interface ProcessAstOptions { + ast: AST; + action: PermissionsAction; + accountability: Accountability | null; +} + +export async function processAst(options: ProcessAstOptions, context: Context) { + // FieldMap is a Map of paths in the AST, with each path containing the collection and fields in + // that collection that the AST path tries to access + const fieldMap: FieldMap = fieldMapFromAst(options.ast, context.schema); + const collections = collectionsInFieldMap(fieldMap); + + if (!options.accountability || options.accountability.admin) { + // Validate the field existence, even if no permissions apply to the current accountability + for (const [path, { collection, fields }] of [...fieldMap.read.entries(), ...fieldMap.other.entries()]) { + validatePathExistence(path, collection, fields, context.schema); + } + + return options.ast; + } + + const policies = await fetchPolicies(options.accountability, context); + + const permissions = await fetchPermissions( + { action: options.action, policies, collections, accountability: options.accountability }, + context, + ); + + const readPermissions = + options.action === 'read' + ? permissions + : await fetchPermissions( + { action: 'read', policies, collections, accountability: options.accountability }, + context, + ); + + // Validate field existence first + for (const [path, { collection, fields }] of [...fieldMap.read.entries(), ...fieldMap.other.entries()]) { + validatePathExistence(path, collection, fields, context.schema); + } + + // Validate permissions for the fields + for (const [path, { collection, fields }] of fieldMap.other.entries()) { + validatePathPermissions(path, permissions, collection, fields); + } + + // Validate permission for read only fields + for (const [path, { collection, fields }] of fieldMap.read.entries()) { + validatePathPermissions(path, readPermissions, collection, fields); + } + + injectCases(options.ast, permissions); + + return options.ast; +} diff --git a/api/src/permissions/modules/process-ast/types.ts b/api/src/permissions/modules/process-ast/types.ts new file mode 100644 index 0000000000..2873a9156a --- /dev/null +++ b/api/src/permissions/modules/process-ast/types.ts @@ -0,0 +1,17 @@ +export type CollectionKey = string; +export type FieldKey = string; +export type QueryPath = string[]; + +/** + * Key is dot-notation QueryPath, f.e. `category.created_by`. + * Value contains collection context for that path, and fields fetched within + */ +export type FieldMapEntries = Map }>; + +/** + * FieldMapEntries that require only read permissions and those that require action specific permissions + */ +export type FieldMap = { + read: FieldMapEntries; + other: FieldMapEntries; +}; diff --git a/api/src/permissions/modules/process-ast/utils/collections-in-field-map.test.ts b/api/src/permissions/modules/process-ast/utils/collections-in-field-map.test.ts new file mode 100644 index 0000000000..4f14349f62 --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/collections-in-field-map.test.ts @@ -0,0 +1,12 @@ +import { expect, test } from 'vitest'; +import type { FieldMap } from '../types.js'; +import { collectionsInFieldMap } from './collections-in-field-map.js'; + +test('Returns set of collections in given map', () => { + const fieldMap: FieldMap = { + other: new Map([['relation', { collection: 'test-collection-1', fields: new Set() }]]), + read: new Map([['', { collection: 'test-collection-2', fields: new Set() }]]), + }; + + expect(collectionsInFieldMap(fieldMap)).toEqual(['test-collection-1', 'test-collection-2']); +}); diff --git a/api/src/permissions/modules/process-ast/utils/collections-in-field-map.ts b/api/src/permissions/modules/process-ast/utils/collections-in-field-map.ts new file mode 100644 index 0000000000..1e84e4d998 --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/collections-in-field-map.ts @@ -0,0 +1,11 @@ +import type { CollectionKey, FieldMap } from '../types.js'; + +export function collectionsInFieldMap(fieldMap: FieldMap): CollectionKey[] { + const collections: Set = new Set(); + + for (const { collection } of [...fieldMap.other.values(), ...fieldMap.read.values()]) { + collections.add(collection); + } + + return Array.from(collections); +} diff --git a/api/src/permissions/modules/process-ast/utils/dedupe-access.test.ts b/api/src/permissions/modules/process-ast/utils/dedupe-access.test.ts new file mode 100644 index 0000000000..95788a9438 --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/dedupe-access.test.ts @@ -0,0 +1,85 @@ +import type { DeepPartial, Permission } from '@directus/types'; +import { expect, test } from 'vitest'; +import { dedupeAccess } from './dedupe-access.js'; + +test('Merges field sets where access rules are identical', () => { + const input: DeepPartial[] = [ + { + permissions: {}, + fields: ['test-field-a', 'test-field-b'], + }, + { + permissions: {}, + fields: ['test-field-b', 'test-field-c'], + }, + { + permissions: { status: { _eq: 'published' } }, + fields: ['test-field-excluded'], + }, + ]; + + const output = dedupeAccess(input as Permission[]); + + expect(output).toEqual([ + { + rule: {}, + fields: new Set(['test-field-a', 'test-field-b', 'test-field-c']), + }, + { + rule: { status: { _eq: 'published' } }, + fields: new Set(['test-field-excluded']), + }, + ]); +}); + +test('Treats null and {} as {}', () => { + const input: DeepPartial[] = [ + { + permissions: null, + fields: ['test-field-a', 'test-field-b'], + }, + { + permissions: {}, + fields: ['test-field-b', 'test-field-c'], + }, + { + permissions: { status: { _eq: 'published' } }, + fields: ['test-field-excluded'], + }, + ]; + + const output = dedupeAccess(input as Permission[]); + + expect(output).toEqual([ + { + rule: {}, + fields: new Set(['test-field-a', 'test-field-b', 'test-field-c']), + }, + { + rule: { status: { _eq: 'published' } }, + fields: new Set(['test-field-excluded']), + }, + ]); +}); + +test('Merges rules where rule is identical but ordered differently', () => { + const input: DeepPartial[] = [ + { + permissions: { _and: [{ a: { _eq: 1 } }, { b: { _eq: 2 } }] }, + fields: ['test-field-a', 'test-field-b'], + }, + { + permissions: { _and: [{ b: { _eq: 2 } }, { a: { _eq: 1 } }] }, + fields: ['test-field-b', 'test-field-c'], + }, + ]; + + const output = dedupeAccess(input as Permission[]); + + expect(output).toEqual([ + { + rule: { _and: [{ a: { _eq: 1 } }, { b: { _eq: 2 } }] }, + fields: new Set(['test-field-a', 'test-field-b', 'test-field-c']), + }, + ]); +}); diff --git a/api/src/permissions/modules/process-ast/utils/dedupe-access.ts b/api/src/permissions/modules/process-ast/utils/dedupe-access.ts new file mode 100644 index 0000000000..adbd531d9e --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/dedupe-access.ts @@ -0,0 +1,38 @@ +import type { Filter, Permission } from '@directus/types'; +import hash from 'object-hash'; + +/** + * Deduplicate the permissions sets by merging the field sets based on the access control rules + * (`permissions` in Permission rows) + * + * This allows the cases injection to be more efficient by not having to generate duplicate + * case/when clauses for permission sets where the rule access is identical + */ +export function dedupeAccess(permissions: Permission[]): { rule: Filter; fields: Set }[] { + // Map of `ruleHash: fields[]` + const map: Map }> = new Map(); + + for (const permission of permissions) { + const rule = permission.permissions ?? {}; + + // Two JS objects can't be equality checked. Object-hash will resort any nested arrays + // deterministically meaning that this can be used to compare two rule sets where the array + // order does not matter + const ruleHash = hash(rule, { + algorithm: 'passthrough', + unorderedArrays: true, + }); + + if (map.has(ruleHash) === false) { + map.set(ruleHash, { rule, fields: new Set() }); + } + + const info = map.get(ruleHash)!; + + for (const field of permission.fields ?? []) { + info.fields.add(field); + } + } + + return Array.from(map.values()); +} diff --git a/api/src/permissions/modules/process-ast/utils/extract-paths-from-query.test.ts b/api/src/permissions/modules/process-ast/utils/extract-paths-from-query.test.ts new file mode 100644 index 0000000000..774c6295d0 --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/extract-paths-from-query.test.ts @@ -0,0 +1,94 @@ +import type { Query } from '@directus/types'; +import { expect, test } from 'vitest'; +import { extractPathsFromQuery } from './extract-paths-from-query.js'; + +test('Returns empty lists when query does not contain filter sort or aggregate', () => { + expect(extractPathsFromQuery({})).toEqual({ paths: [], readOnlyPaths: [] }); +}); + +test('Returns flattened filter paths if filter exists', () => { + const query: Query = { + filter: { + author: { + name: { + _eq: 'Rijk', + }, + }, + }, + }; + + expect(extractPathsFromQuery(query).readOnlyPaths).toEqual([['author', 'name']]); +}); + +test('Returns sort values split on `.`', () => { + const query: Query = { + sort: ['title', 'author.age'], + }; + + expect(extractPathsFromQuery(query).readOnlyPaths).toEqual([['title'], ['author', 'age']]); +}); + +test('Drops - from sort values', () => { + const query: Query = { + sort: ['-title'], + }; + + expect(extractPathsFromQuery(query).readOnlyPaths).toEqual([['title']]); +}); + +test('Returns fields used in aggregation', () => { + const query: Query = { + aggregate: { + avg: ['price'], + countDistinct: ['id', 'author.age'], + }, + }; + + expect(extractPathsFromQuery(query).paths).toEqual([['price'], ['id'], ['author', 'age']]); +}); + +test('Returns fields used in grouping', () => { + const query: Query = { + group: ['category', 'author.email'], + }; + + expect(extractPathsFromQuery(query).paths).toEqual([['category'], ['author', 'email']]); +}); + +test('Returns only unique field paths', () => { + const query: Query = { + aggregate: { + countDistinct: ['category', 'author.email'], + }, + group: ['category', 'author.email'], + }; + + expect(extractPathsFromQuery(query).paths).toEqual([['category'], ['author', 'email']]); +}); + +test('Returns only unique filter paths', () => { + const query: Query = { + filter: { + _or: [ + { + author: { _eq: 'Rijk' }, + }, + { + author: { _eq: 'Ben' }, + }, + ], + }, + }; + + expect(extractPathsFromQuery(query).readOnlyPaths).toEqual([['author']]); +}); + +test('Does not include wildcard field from aggregate', () => { + const query: Query = { + aggregate: { + count: ['*'], + }, + }; + + expect(extractPathsFromQuery(query).paths).toEqual([]); +}); diff --git a/api/src/permissions/modules/process-ast/utils/extract-paths-from-query.ts b/api/src/permissions/modules/process-ast/utils/extract-paths-from-query.ts new file mode 100644 index 0000000000..29140d6ec9 --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/extract-paths-from-query.ts @@ -0,0 +1,72 @@ +import type { Query } from '@directus/types'; +import { isEqual, uniqWith } from 'lodash-es'; +import type { FieldKey } from '../types.js'; +import { flattenFilter } from './flatten-filter.js'; + +/** + * Converts the passed Query object into a unique list of path arrays, for example: + * + * ``` + * [ + * ['author', 'age'], + * ['category'] + * ] + * ``` + */ +export function extractPathsFromQuery(query: Query) { + /** + * All nested paths used in the current query scope. + * This is generated by flattening the filters and adding in the used sort/aggregate fields. + */ + const paths: FieldKey[][] = []; + const readOnlyPaths: FieldKey[][] = []; + + if (query.filter) { + flattenFilter(readOnlyPaths, query.filter); + } + + if (query.sort) { + for (const field of query.sort) { + // Sort can have dot notation fields for sorting on m2o values Sort fields can start with + // `-` to indicate descending order, which should be dropped for permissions checks + + const parts = field.split('.').map((field) => (field.startsWith('-') ? field.substring(1) : field)); + + if (query.aggregate && parts.length > 0 && parts[0]! in query.aggregate) { + // If query is an aggregate query and the first part is a requested aggregate operation, ignore the whole field. + // The correct field is extracted into the field map when processing the `query.aggregate` fields. + continue; + } + + readOnlyPaths.push(parts); + } + } + + if (query.aggregate) { + for (const fields of Object.values(query.aggregate)) { + for (const field of fields) { + if (field === '*') { + // Don't add wildcard field to the paths + continue; + } + + // Aggregate doesn't currently support aggregating on nested fields, but it doesn't hurt + // to standardize it in the validation layer + paths.push(field.split('.')); + } + } + } + + if (query.group) { + for (const field of query.group) { + // Grouping doesn't currently support grouping on nested fields, but it doesn't hurt to + // standardize it in the validation layer + paths.push(field.split('.')); + } + } + + return { + paths: uniqWith(paths, isEqual), + readOnlyPaths: uniqWith(readOnlyPaths, isEqual), + }; +} diff --git a/api/src/permissions/modules/process-ast/utils/find-related-collection.test.ts b/api/src/permissions/modules/process-ast/utils/find-related-collection.test.ts new file mode 100644 index 0000000000..0aa2ec0928 --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/find-related-collection.test.ts @@ -0,0 +1,46 @@ +import type { DeepPartial, SchemaOverview } from '@directus/types'; +import { expect, test } from 'vitest'; +import { findRelatedCollection } from './find-related-collection.js'; + +test('Returns null if schema overview does not contain relation for given field in given collection', () => { + const schema: DeepPartial = { + relations: [], + }; + + expect(findRelatedCollection('test-collection', 'test-field', schema as SchemaOverview)).toBe(null); +}); + +test('Returns `related_collection` from relationship if current field is m2o fk', () => { + const schema: DeepPartial = { + relations: [ + { + collection: 'test-collection', + field: 'test-field', + related_collection: 'test-related-collection', + }, + ], + }; + + expect(findRelatedCollection('test-collection', 'test-field', schema as SchemaOverview)).toBe( + 'test-related-collection', + ); +}); + +test('Returns `collection` from relationship if current field is o2m alias', () => { + const schema: DeepPartial = { + relations: [ + { + collection: 'test-related-collection', + field: 'test-related-field', + related_collection: 'test-collection', + meta: { + one_field: 'test-field', + }, + }, + ], + }; + + expect(findRelatedCollection('test-collection', 'test-field', schema as SchemaOverview)).toBe( + 'test-related-collection', + ); +}); diff --git a/api/src/permissions/modules/process-ast/utils/find-related-collection.ts b/api/src/permissions/modules/process-ast/utils/find-related-collection.ts new file mode 100644 index 0000000000..0a95ff1da2 --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/find-related-collection.ts @@ -0,0 +1,19 @@ +import type { SchemaOverview } from '@directus/types'; +import { getRelationInfo } from '../../../../utils/get-relation-info.js'; +import type { CollectionKey, FieldKey } from '../types.js'; + +export function findRelatedCollection( + collection: CollectionKey, + field: FieldKey, + schema: SchemaOverview, +): CollectionKey | null { + const { relation } = getRelationInfo(schema.relations, collection, field); + + if (!relation) return null; + + const isO2m = relation.related_collection === collection; + + const relatedCollectionName = isO2m ? relation.collection : relation.related_collection!; + + return relatedCollectionName; +} diff --git a/api/src/permissions/modules/process-ast/utils/flatten-filter.test.ts b/api/src/permissions/modules/process-ast/utils/flatten-filter.test.ts new file mode 100644 index 0000000000..aee2a6afba --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/flatten-filter.test.ts @@ -0,0 +1,198 @@ +import type { Filter, Query } from '@directus/types'; +import { expect, test } from 'vitest'; +import type { FieldKey } from '../types.js'; +import { flattenFilter } from './flatten-filter.js'; + +test('Returns early when no filter is passed', () => { + const paths: FieldKey[][] = []; + + flattenFilter(paths, undefined); + + expect(paths).toEqual([]); +}); + +test('Flattens single level', () => { + const paths: FieldKey[][] = []; + + const filter: Query['filter'] = { + author: { + _eq: 1, + }, + }; + + flattenFilter(paths, filter); + + expect(paths).toEqual([['author']]); +}); + +test('Flattens _eq shortcut', () => { + const paths: FieldKey[][] = []; + + const filter: Query['filter'] = { + author: 'Rijk', + } as Filter; + + flattenFilter(paths, filter); + + expect(paths).toEqual([['author']]); +}); + +test.todo('Flattens single level and handles underscore in field names', () => { + const paths: FieldKey[][] = []; + + const filter: Query['filter'] = { + _author: { + _eq: 1, + }, + }; + + flattenFilter(paths, filter); + + expect(paths).toEqual([['_author']]); +}); + +test('Flattens nested fields', () => { + const paths: FieldKey[][] = []; + + const filter: Query['filter'] = { + author: { + name: { + _eq: 'Rijk', + }, + }, + }; + + flattenFilter(paths, filter); + + expect(paths).toEqual([['author', 'name']]); +}); + +test('Flattens logical groups', () => { + const paths: FieldKey[][] = []; + + const filter: Query['filter'] = { + _and: [ + { + author: { + name: { + _eq: 'Rijk', + }, + }, + }, + { + author: { + age: { + _eq: 28, + }, + }, + }, + ], + }; + + flattenFilter(paths, filter); + + expect(paths).toEqual([ + ['author', 'age'], + ['author', 'name'], + ]); +}); + +test('Flattens nested logical groups', () => { + const paths: FieldKey[][] = []; + + const filter: Query['filter'] = { + _and: [ + { + _or: [ + { + author: { + name: { + _eq: 'Rijk', + }, + }, + }, + { + _and: [ + { + timestamp: { + _gte: '2024-04-12', + }, + }, + { + author: { + age: { + _gt: 21, + }, + }, + }, + ], + }, + ], + }, + ], + }; + + flattenFilter(paths, filter); + + expect(paths).toEqual([['author', 'age'], ['timestamp'], ['author', 'name']]); +}); + +test('Leaves function usage', () => { + const paths: FieldKey[][] = []; + + const filter: Query['filter'] = { + 'year(timestamp)': { + _eq: 2024, + }, + }; + + flattenFilter(paths, filter); + + expect(paths).toEqual([['year(timestamp)']]); +}); + +test.each(['_and', '_or'])('Checks inside of logical operator (%s)', (operator) => { + const paths: FieldKey[][] = []; + + const filter = { + [operator]: [ + { + author: { _eq: 'Rijk' }, + published: { year: { _eq: 2024 } }, + }, + ], + } as Query['filter']; + + flattenFilter(paths, filter); + + expect(paths).toEqual([['published', 'year'], ['author']]); +}); + +test.each(['_some', '_none'])('Checks inside of relational operator (%s)', (operator) => { + const paths: FieldKey[][] = []; + + const filter = { + [operator]: { + author: { _eq: 'Rijk' }, + published: { year: { _eq: 2024 } }, + }, + } as Query['filter']; + + flattenFilter(paths, filter); + + expect(paths).toEqual([['published', 'year'], ['author']]); +}); + +test('Does not look into operators that might contain objects', () => { + const paths: FieldKey[][] = []; + + const filter = { + _intersects: { + type: 'Point', + }, + } as Query['filter']; + + flattenFilter(paths, filter); + + expect(paths).toEqual([]); +}); diff --git a/api/src/permissions/modules/process-ast/utils/flatten-filter.ts b/api/src/permissions/modules/process-ast/utils/flatten-filter.ts new file mode 100644 index 0000000000..553eef25ac --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/flatten-filter.ts @@ -0,0 +1,36 @@ +import type { Query } from '@directus/types'; +import type { FieldKey } from '../types.js'; + +export function flattenFilter(paths: FieldKey[][], filter: Query['filter']) { + if (!filter) return; + + const stack: { current: Record | string; path: string[] }[] = [{ current: filter, path: [] }]; + + while (stack.length > 0) { + const { current, path } = stack.pop()!; + + if (typeof current === 'object' && current !== null) { + // If the current nested value is an array, we ignore the array order and flatten all + // nested objects + const isArray = Array.isArray(current); + + for (const key in current) { + if (!key.startsWith('_') || key === '_and' || key === '_or' || key === '_some' || key === '_none') { + // Only deepen the path if the current value can contain more keys + stack.push({ + current: current[key] as Record | string, + path: isArray ? path : [...path, key], + }); + } else { + // Ignore all operators and logical grouping in the field paths + const parts = path.filter((part) => part.startsWith('_') === false); + if (parts.length > 0) paths.push(parts); + } + } + } else { + // Ignore all operators and logical grouping in the field paths + const parts = path.filter((part) => part.startsWith('_') === false); + if (parts.length > 0) paths.push(parts); + } + } +} diff --git a/api/src/permissions/modules/process-ast/utils/format-a2o-key.test.ts b/api/src/permissions/modules/process-ast/utils/format-a2o-key.test.ts new file mode 100644 index 0000000000..5164083a71 --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/format-a2o-key.test.ts @@ -0,0 +1,6 @@ +import { expect, test } from 'vitest'; +import { formatA2oKey } from './format-a2o-key.js'; + +test('Joins strings with `:` character', () => { + expect(formatA2oKey('item', 'headings')).toBe('item:headings'); +}); diff --git a/api/src/permissions/modules/process-ast/utils/format-a2o-key.ts b/api/src/permissions/modules/process-ast/utils/format-a2o-key.ts new file mode 100644 index 0000000000..da66103f9b --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/format-a2o-key.ts @@ -0,0 +1,3 @@ +export function formatA2oKey(fieldKey: string, collection: string) { + return `${fieldKey}:${collection}`; +} diff --git a/api/src/permissions/modules/process-ast/utils/get-info-for-path.test.ts b/api/src/permissions/modules/process-ast/utils/get-info-for-path.test.ts new file mode 100644 index 0000000000..decf84fda0 --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/get-info-for-path.test.ts @@ -0,0 +1,25 @@ +import { expect, test } from 'vitest'; +import type { FieldMap } from '../types.js'; +import { getInfoForPath } from './get-info-for-path.js'; + +test.each(['other', 'read'])('Returns existing info set if exists for group %s', (group) => { + const fieldMap: FieldMap = { + [group]: new Map([['', { collection: 'test-collection', fields: new Set() }]]), + } as FieldMap; + + expect(getInfoForPath(fieldMap, group as keyof FieldMap, [], 'test-collection')).toBe( + fieldMap[group as keyof FieldMap].get(''), + ); +}); + +test.each(['other', 'read'])( + 'Seeds the map location with an info object if it does not exist yet for group %s', + (group) => { + const fieldMap: FieldMap = { read: new Map(), other: new Map() }; + + const output = getInfoForPath(fieldMap, group as keyof FieldMap, [], 'test-collection'); + + expect(output).toEqual({ collection: 'test-collection', fields: new Set() }); + expect(fieldMap[group as keyof FieldMap].get('')).toBe(output); + }, +); diff --git a/api/src/permissions/modules/process-ast/utils/get-info-for-path.ts b/api/src/permissions/modules/process-ast/utils/get-info-for-path.ts new file mode 100644 index 0000000000..8e8e66fc61 --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/get-info-for-path.ts @@ -0,0 +1,11 @@ +import type { CollectionKey, FieldMap, QueryPath } from '../types.js'; + +export function getInfoForPath(fieldMap: FieldMap, group: keyof FieldMap, path: QueryPath, collection: CollectionKey) { + const pathStr = path.join('.'); + + if (fieldMap[group].has(pathStr) === false) { + fieldMap[group].set(pathStr, { collection, fields: new Set() }); + } + + return fieldMap[group].get(pathStr)!; +} diff --git a/api/src/permissions/modules/process-ast/utils/has-item-permissions.test.ts b/api/src/permissions/modules/process-ast/utils/has-item-permissions.test.ts new file mode 100644 index 0000000000..f988a49c94 --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/has-item-permissions.test.ts @@ -0,0 +1,15 @@ +import type { Permission } from '@directus/types'; +import { expect, test } from 'vitest'; +import { hasItemPermissions } from './has-item-permissions.js'; + +test('Returns false if permissions are null', () => { + expect(hasItemPermissions({ permissions: null } as unknown as Permission)).toBe(false); +}); + +test('Returns false if permissions are empty object', () => { + expect(hasItemPermissions({ permissions: {} } as unknown as Permission)).toBe(false); +}); + +test('Returns false if permissions are object with 1 or more keys', () => { + expect(hasItemPermissions({ permissions: { status: { _eq: 'published' } } } as unknown as Permission)).toBe(true); +}); diff --git a/api/src/permissions/modules/process-ast/utils/has-item-permissions.ts b/api/src/permissions/modules/process-ast/utils/has-item-permissions.ts new file mode 100644 index 0000000000..03fccbe391 --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/has-item-permissions.ts @@ -0,0 +1,5 @@ +import type { Permission } from '@directus/types'; + +export function hasItemPermissions(permission: Permission) { + return permission.permissions !== null && Object.keys(permission.permissions).length > 0; +} diff --git a/api/src/permissions/modules/process-ast/utils/stringify-query-path.test.ts b/api/src/permissions/modules/process-ast/utils/stringify-query-path.test.ts new file mode 100644 index 0000000000..7a4b080030 --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/stringify-query-path.test.ts @@ -0,0 +1,6 @@ +import { expect, test } from 'vitest'; +import { stringifyQueryPath } from './stringify-query-path.js'; + +test('Joins given path with `.`', () => { + expect(stringifyQueryPath(['test', 'path'])).toBe('test.path'); +}); diff --git a/api/src/permissions/modules/process-ast/utils/stringify-query-path.ts b/api/src/permissions/modules/process-ast/utils/stringify-query-path.ts new file mode 100644 index 0000000000..1546f6eeb7 --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/stringify-query-path.ts @@ -0,0 +1,5 @@ +import type { QueryPath } from '../types.js'; + +export function stringifyQueryPath(queryPath: QueryPath): string { + return queryPath.join('.'); +} diff --git a/api/src/permissions/modules/process-ast/utils/validate-path/create-error.ts b/api/src/permissions/modules/process-ast/utils/validate-path/create-error.ts new file mode 100644 index 0000000000..ff8f8e7ab3 --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/validate-path/create-error.ts @@ -0,0 +1,22 @@ +import { type DirectusError, ForbiddenError } from '@directus/errors'; + +export function createCollectionForbiddenError(path: string, collection: string): DirectusError { + const pathSuffix = path === '' ? 'root' : `"${path}"`; + + return new ForbiddenError({ + reason: `You don't have permission to access collection "${collection}" or it does not exist. Queried in ${pathSuffix}.`, + }); +} + +export function createFieldsForbiddenError(path: string, collection: string, fields: string[]): DirectusError { + const pathSuffix = path === '' ? 'root' : `"${path}"`; + + const fieldStr = fields.map((field) => `"${field}"`).join(', '); + + return new ForbiddenError({ + reason: + fields.length === 1 + ? `You don't have permission to access field ${fieldStr} in collection "${collection}" or it does not exist. Queried in ${pathSuffix}.` + : `You don't have permission to access fields ${fieldStr} in collection "${collection}" or they do not exist. Queried in ${pathSuffix}.`, + }); +} diff --git a/api/src/permissions/modules/process-ast/utils/validate-path/validate-path-existence.test.ts b/api/src/permissions/modules/process-ast/utils/validate-path/validate-path-existence.test.ts new file mode 100644 index 0000000000..ac1a726e31 --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/validate-path/validate-path-existence.test.ts @@ -0,0 +1,52 @@ +import { ForbiddenError } from '@directus/errors'; +import type { SchemaOverview } from '@directus/types'; +import { expect, test } from 'vitest'; +import { validatePathExistence } from './validate-path-existence.js'; + +test('Throws if collection does not exist in the schema', () => { + const schema = { collections: {} } as unknown as SchemaOverview; + + expect(() => validatePathExistence('test.path', 'test-collection', new Set(), schema)).toThrowError(ForbiddenError); +}); + +test('Throws if field is not present in the schema', () => { + const schema = { + collections: { + 'test-collection': { + fields: {}, + }, + }, + } as unknown as SchemaOverview; + + expect(() => validatePathExistence('test.path', 'test-collection', new Set(['test-field-a']), schema)).toThrowError( + ForbiddenError, + ); +}); + +test('Throws if fields are not present in the schema', () => { + const schema = { + collections: { + 'test-collection': { + fields: {}, + }, + }, + } as unknown as SchemaOverview; + + expect(() => + validatePathExistence('test.path', 'test-collection', new Set(['test-field-a', 'test-field-b']), schema), + ).toThrowError(ForbiddenError); +}); + +test('Returns without throwing an error if the field is present in the schema', () => { + const schema = { + collections: { + 'test-collection': { + fields: { + 'test-field-a': {}, + }, + }, + }, + } as unknown as SchemaOverview; + + expect(() => validatePathExistence('test.path', 'test-collection', new Set(['test-field-a']), schema)).not.toThrow(); +}); diff --git a/api/src/permissions/modules/process-ast/utils/validate-path/validate-path-existence.ts b/api/src/permissions/modules/process-ast/utils/validate-path/validate-path-existence.ts new file mode 100644 index 0000000000..a6925e080e --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/validate-path/validate-path-existence.ts @@ -0,0 +1,18 @@ +import type { SchemaOverview } from '@directus/types'; +import { createCollectionForbiddenError, createFieldsForbiddenError } from './create-error.js'; + +export function validatePathExistence(path: string, collection: string, fields: Set, schema: SchemaOverview) { + const collectionInfo = schema.collections[collection]; + + if (collectionInfo === undefined) { + throw createCollectionForbiddenError(path, collection); + } + + const requestedFields = Array.from(fields); + + const nonExistentFields = requestedFields.filter((field) => collectionInfo.fields[field] === undefined); + + if (nonExistentFields.length > 0) { + throw createFieldsForbiddenError(path, collection, nonExistentFields); + } +} diff --git a/api/src/permissions/modules/process-ast/utils/validate-path/validate-path-permissions.test.ts b/api/src/permissions/modules/process-ast/utils/validate-path/validate-path-permissions.test.ts new file mode 100644 index 0000000000..3a5196cbcb --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/validate-path/validate-path-permissions.test.ts @@ -0,0 +1,41 @@ +import type { Permission } from '@directus/types'; +import { expect, test } from 'vitest'; +import { validatePathPermissions } from './validate-path-permissions.js'; +import { ForbiddenError } from '@directus/errors'; + +test('Throws if no permissions given for given collection', () => { + expect(() => validatePathPermissions('test.path', [], 'test-collection', new Set())).toThrowError(ForbiddenError); +}); + +test('Returns without throwing if permission fields contains*', () => { + expect(() => + validatePathPermissions( + 'test.path', + [{ collection: 'test-collection', fields: ['*'] } as Permission], + 'test-collection', + new Set(['test-field-b']), + ), + ).not.toThrow(); +}); + +test('Throws if field is requested but not allowed in permissions', () => { + expect(() => + validatePathPermissions( + 'test.path', + [{ collection: 'test-collection', fields: ['test-field-a'] } as Permission], + 'test-collection', + new Set(['test-field-b']), + ), + ).toThrowError(ForbiddenError); +}); + +test('Throws if fields are requested but not allowed in permissions', () => { + expect(() => + validatePathPermissions( + 'test.path', + [{ collection: 'test-collection', fields: ['test-field-a'] } as Permission], + 'test-collection', + new Set(['test-field-b', 'test-field-c']), + ), + ).toThrowError(ForbiddenError); +}); diff --git a/api/src/permissions/modules/process-ast/utils/validate-path/validate-path-permissions.ts b/api/src/permissions/modules/process-ast/utils/validate-path/validate-path-permissions.ts new file mode 100644 index 0000000000..08bc1f4fd0 --- /dev/null +++ b/api/src/permissions/modules/process-ast/utils/validate-path/validate-path-permissions.ts @@ -0,0 +1,43 @@ +import type { Permission } from '@directus/types'; +import { createCollectionForbiddenError, createFieldsForbiddenError } from './create-error.js'; + +export function validatePathPermissions( + path: string, + permissions: Permission[], + collection: string, + fields: Set, +) { + const permissionsForCollection = permissions.filter((permission) => permission.collection === collection); + + if (permissionsForCollection.length === 0) { + throw createCollectionForbiddenError(path, collection); + } + + // Set of all fields that are allowed to be queried combined + const allowedFields: Set = new Set(); + + for (const { fields } of permissionsForCollection) { + if (!fields) { + continue; + } + + for (const field of fields) { + if (field === '*') { + // Early exit in case all fields are allowed + return; + } + + allowedFields.add(field); + } + } + + const requestedFields = Array.from(fields); + + const forbiddenFields = allowedFields.has('*') + ? [] + : requestedFields.filter((field) => allowedFields.has(field) === false); + + if (forbiddenFields.length > 0) { + throw createFieldsForbiddenError(path, collection, forbiddenFields); + } +} diff --git a/api/src/permissions/modules/process-payload/lib/is-field-nullable.test.ts b/api/src/permissions/modules/process-payload/lib/is-field-nullable.test.ts new file mode 100644 index 0000000000..162863952e --- /dev/null +++ b/api/src/permissions/modules/process-payload/lib/is-field-nullable.test.ts @@ -0,0 +1,27 @@ +import type { FieldOverview } from '@directus/types'; +import { expect, test } from 'vitest'; +import { isFieldNullable } from './is-field-nullable.js'; + +test('Returns true if "nullable" is set on the field', () => { + const field = { nullable: true } as FieldOverview; + + expect(isFieldNullable(field)).toBe(true); +}); + +test('Returns true if "generated" is set on the field', () => { + const field = { nullable: false, generated: true } as FieldOverview; + + expect(isFieldNullable(field)).toBe(true); +}); + +test('Returns true if field has a special flag that generates a value', () => { + const field = { nullable: false, generated: false, special: ['uuid'] } as FieldOverview; + + expect(isFieldNullable(field)).toBe(true); +}); + +test('Returns false if the field does not meet any of the conditions ', () => { + const field = { nullable: false, generated: false, special: [] as string[] } as FieldOverview; + + expect(isFieldNullable(field)).toBe(false); +}); diff --git a/api/src/permissions/modules/process-payload/lib/is-field-nullable.ts b/api/src/permissions/modules/process-payload/lib/is-field-nullable.ts new file mode 100644 index 0000000000..e0e83825bb --- /dev/null +++ b/api/src/permissions/modules/process-payload/lib/is-field-nullable.ts @@ -0,0 +1,14 @@ +import type { FieldOverview } from '@directus/types'; +import { GENERATE_SPECIAL } from '../../../../constants.js'; + +/** + * Checks if a given field is allowed to be set to `null`. + */ +export function isFieldNullable(field: FieldOverview) { + if (field.nullable) return true; + if (field.generated) return true; + + const hasGenerateSpecial = GENERATE_SPECIAL.some((name) => field.special.includes(name)); + + return hasGenerateSpecial; +} diff --git a/api/src/permissions/modules/process-payload/process-payload.test.ts b/api/src/permissions/modules/process-payload/process-payload.test.ts new file mode 100644 index 0000000000..92c6f1cc45 --- /dev/null +++ b/api/src/permissions/modules/process-payload/process-payload.test.ts @@ -0,0 +1,290 @@ +import { ForbiddenError } from '@directus/errors'; +import type { Accountability, Permission, PermissionsAction, SchemaOverview } from '@directus/types'; +import { FailedValidationError } from '@directus/validation'; +import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest'; +import { fetchPermissions } from '../../lib/fetch-permissions.js'; +import { fetchPolicies } from '../../lib/fetch-policies.js'; +import type { Context } from '../../types.js'; +import { isFieldNullable } from './lib/is-field-nullable.js'; +import { processPayload } from './process-payload.js'; + +vi.mock('../../lib/fetch-permissions.js'); +vi.mock('../../lib/fetch-policies.js'); +vi.mock('./lib/is-field-nullable.js'); + +vi.mock('../../../services/permissions.js', () => ({ + PermissionsService: vi.fn(), +})); + +vi.mock('../../../services/access.js', () => ({ + AccessService: vi.fn(), +})); + +beforeEach(() => { + vi.mocked(isFieldNullable).mockReturnValue(true); +}); + +afterEach(() => { + vi.resetAllMocks(); +}); + +test('Skips permission checks when admin', async () => { + const payload = {}; + + const schema = { + collections: { + 'collection-a': {}, + }, + } as unknown as SchemaOverview; + + const acc = { admin: true } as unknown as Accountability; + + await expect( + processPayload( + { + collection: 'collection-a', + action: 'read', + accountability: acc, + payload, + }, + { schema } as Context, + ), + ).resolves.toEqual(payload); + + expect(fetchPolicies).toHaveBeenCalledTimes(0); + expect(fetchPermissions).toHaveBeenCalledTimes(0); +}); + +test('Throws forbidden error when permissions length is 0', async () => { + const schema = {} as unknown as SchemaOverview; + const acc = { admin: false } as unknown as Accountability; + + vi.mocked(fetchPermissions).mockResolvedValue([]); + + await expect( + processPayload({ accountability: acc, action: 'read', collection: 'collection-a', payload: {} }, { + schema, + } as Context), + ).rejects.toBeInstanceOf(ForbiddenError); +}); + +test('Throws forbidden error if used fields contain field that has no permission', async () => { + const schema = {} as unknown as SchemaOverview; + const acc = { admin: false } as unknown as Accountability; + + vi.mocked(fetchPermissions).mockResolvedValue([{ fields: ['field-a'] } as Permission]); + + await expect( + processPayload( + { + accountability: acc, + action: 'read', + collection: 'collection-a', + payload: { + 'field-b': 'x', + }, + }, + { schema } as Context, + ), + ).rejects.toBeInstanceOf(ForbiddenError); +}); + +describe('Validates against field validation rules', () => { + const schema = { + collections: { + 'collection-a': { + fields: { + 'field-a': { + validation: { + 'field-a': { + _eq: 1, + }, + }, + }, + }, + }, + }, + } as unknown as SchemaOverview; + + const users = [ + { user: 'admin', admin: true }, + { user: 'non-admin', admin: false }, + ]; + + test.each(users)('$user user', async ({ admin }) => { + const acc = { admin } as unknown as Accountability; + + vi.mocked(fetchPermissions).mockResolvedValue([{ fields: ['field-a'], validation: null } as Permission]); + + expect.assertions(2); + + try { + await processPayload( + { + accountability: acc, + action: 'read', + collection: 'collection-a', + payload: { + 'field-a': 2, + }, + }, + { schema } as Context, + ); + } catch (errors: any) { + expect(errors.length).toBe(1); + expect(errors[0]).toBeInstanceOf(FailedValidationError); + } + }); +}); + +describe('Injects and validates rules for non-nullable fields', () => { + const schema = { + collections: { + 'collection-a': { + fields: { + 'field-a': { + field: 'field-a', + defaultValue: null, + validation: null, + }, + }, + }, + }, + } as unknown as SchemaOverview; + + const users = [ + { user: 'admin', admin: true }, + { user: 'non-admin', admin: false }, + ]; + + const actions: { action: PermissionsAction }[] = [{ action: 'read' }, { action: 'create' }]; + + describe.each(users)('$user user', async ({ admin }) => { + const acc = { admin } as unknown as Accountability; + + test.each(actions)('$action action', async ({ action }) => { + vi.mocked(isFieldNullable).mockReturnValue(false); + vi.mocked(fetchPermissions).mockResolvedValue([{ fields: ['field-a'], validation: null } as Permission]); + + expect.assertions(2); + + try { + await processPayload( + { + accountability: acc, + action, + collection: 'collection-a', + payload: action === 'create' ? {} : { 'field-a': null }, + }, + { schema } as Context, + ); + } catch (errors: any) { + expect(errors.length).toBe(1); + expect(errors[0]).toBeInstanceOf(FailedValidationError); + } + }); + }); +}); + +test('Validates against permission validation rules', async () => { + const schema = { collections: { 'collection-a': { fields: {} } } } as unknown as SchemaOverview; + + const acc = { admin: false } as unknown as Accountability; + + vi.mocked(fetchPermissions).mockResolvedValue([ + { fields: ['field-a'], validation: { 'field-a': { _eq: 1 } } } as unknown as Permission, + ]); + + try { + await processPayload( + { + accountability: acc, + action: 'read', + collection: 'collection-a', + payload: { + 'field-a': 2, + }, + }, + { schema } as Context, + ); + + expect(true).toBe(false); + } catch (errors: any) { + expect(errors.length).toBe(1); + expect(errors[0]).toBeInstanceOf(FailedValidationError); + } +}); + +test('Validates against permission and field validation rules', async () => { + const schema = { + collections: { + 'collection-a': { + fields: { + 'field-a': { + validation: { + 'field-a': { + _eq: 1, + }, + }, + }, + }, + }, + }, + } as unknown as SchemaOverview; + + const acc = { admin: false } as unknown as Accountability; + + vi.mocked(fetchPermissions).mockResolvedValue([ + { fields: ['field-a'], validation: { 'field-a': { _eq: 2 } } } as unknown as Permission, + ]); + + try { + await processPayload( + { + accountability: acc, + action: 'read', + collection: 'collection-a', + payload: { + 'field-a': 3, + }, + }, + { schema } as Context, + ); + + expect(true).toBe(false); + } catch (errors: any) { + expect(errors.length).toBe(2); + expect(errors[0]).toBeInstanceOf(FailedValidationError); + expect(errors[1]).toBeInstanceOf(FailedValidationError); + } +}); + +test('Merges and applies defaults from presets', async () => { + const schema = { collections: { 'collection-a': { fields: {} } } } as unknown as SchemaOverview; + + const acc = { admin: false } as unknown as Accountability; + + vi.mocked(fetchPermissions).mockResolvedValue([ + { fields: ['field-a'], validation: null, presets: { 'field-b': 1 } } as unknown as Permission, + { fields: ['field-a', 'field-b'], validation: null, presets: { 'field-c': 2 } } as unknown as Permission, + { fields: ['*'], validation: null, presets: { 'field-b': 3 } } as unknown as Permission, + ]); + + const payloadWithPresets = await processPayload( + { + accountability: acc, + action: 'read', + collection: 'collection-a', + payload: { + 'field-a': 2, + }, + }, + { schema } as Context, + ); + + expect(payloadWithPresets).toEqual({ + 'field-a': 2, + 'field-b': 3, + 'field-c': 2, + }); +}); diff --git a/api/src/permissions/modules/process-payload/process-payload.ts b/api/src/permissions/modules/process-payload/process-payload.ts new file mode 100644 index 0000000000..ecceec11b3 --- /dev/null +++ b/api/src/permissions/modules/process-payload/process-payload.ts @@ -0,0 +1,112 @@ +import { ForbiddenError } from '@directus/errors'; +import type { Accountability, Filter, Item, PermissionsAction } from '@directus/types'; +import { validatePayload } from '@directus/utils'; +import { FailedValidationError, joiValidationErrorItemToErrorExtensions } from '@directus/validation'; +import { assign, difference, uniq } from 'lodash-es'; +import { fetchPermissions } from '../../lib/fetch-permissions.js'; +import { fetchPolicies } from '../../lib/fetch-policies.js'; +import type { Context } from '../../types.js'; +import { isFieldNullable } from './lib/is-field-nullable.js'; + +export interface ProcessPayloadOptions { + accountability: Accountability; + action: PermissionsAction; + collection: string; + payload: Item; +} + +/** + * @note this only validates the top-level fields. The expectation is that this function is called + * for each level of nested insert separately + */ +export async function processPayload(options: ProcessPayloadOptions, context: Context) { + let permissions; + let permissionValidationRules: (Filter | null)[] = []; + + if (!options.accountability.admin) { + const policies = await fetchPolicies(options.accountability, context); + + permissions = await fetchPermissions( + { action: options.action, policies, collections: [options.collection], accountability: options.accountability }, + context, + ); + + if (permissions.length === 0) { + throw new ForbiddenError({ + reason: `You don't have permission to "${options.action}" from collection "${options.collection}" or it does not exist.`, + }); + } + + const fieldsAllowed = uniq(permissions.map(({ fields }) => fields ?? []).flat()); + + if (fieldsAllowed.includes('*') === false) { + const fieldsUsed = Object.keys(options.payload); + const notAllowed = difference(fieldsUsed, fieldsAllowed); + + if (notAllowed.length > 0) { + const fieldStr = notAllowed.map((field) => `"${field}"`).join(', '); + + throw new ForbiddenError({ + reason: + notAllowed.length === 1 + ? `You don't have permission to access field ${fieldStr} in collection "${options.collection}" or it does not exist.` + : `You don't have permission to access fields ${fieldStr} in collection "${options.collection}" or they do not exist.`, + }); + } + } + + permissionValidationRules = permissions.map(({ validation }) => validation); + } + + const fields = Object.values(context.schema.collections[options.collection]?.fields ?? {}); + + const fieldValidationRules: (Filter | null)[] = []; + + for (const field of fields) { + if (!isFieldNullable(field)) { + const isSubmissionRequired = options.action === 'create' && field.defaultValue === null; + + if (isSubmissionRequired) { + fieldValidationRules.push({ + [field.field]: { + _submitted: true, + }, + }); + } + + fieldValidationRules.push({ + [field.field]: { + _nnull: true, + }, + }); + } + + fieldValidationRules.push(field.validation); + } + + const validationRules = [...fieldValidationRules, ...permissionValidationRules].filter((rule): rule is Filter => { + if (rule === null) return false; + if (Object.keys(rule).length === 0) return false; + return true; + }); + + if (validationRules.length > 0) { + const validationErrors: InstanceType[] = []; + + validationErrors.push( + ...validatePayload({ _and: validationRules }, options.payload) + .map((error) => + error.details.map((details) => new FailedValidationError(joiValidationErrorItemToErrorExtensions(details))), + ) + .flat(), + ); + + if (validationErrors.length > 0) throw validationErrors; + } + + if (!permissions) return options.payload; + + const presets = permissions.map((permission) => permission.presets); + + return assign({}, ...presets, options.payload); +} diff --git a/api/src/permissions/modules/validate-access/lib/validate-collection-access.test.ts b/api/src/permissions/modules/validate-access/lib/validate-collection-access.test.ts new file mode 100644 index 0000000000..d9199e93a0 --- /dev/null +++ b/api/src/permissions/modules/validate-access/lib/validate-collection-access.test.ts @@ -0,0 +1,65 @@ +import type { Accountability, Permission } from '@directus/types'; +import { beforeEach, expect, test, vi } from 'vitest'; +import { fetchPermissions } from '../../../lib/fetch-permissions.js'; +import { fetchPolicies } from '../../../lib/fetch-policies.js'; +import type { Context } from '../../../types.js'; +import { validateCollectionAccess } from './validate-collection-access.js'; + +vi.mock('../../../lib/fetch-permissions.js'); +vi.mock('../../../lib/fetch-policies.js'); + +beforeEach(() => { + vi.clearAllMocks(); + + vi.mocked(fetchPolicies).mockResolvedValue([]); +}); + +test('Returns false if permissions is an empty array', async () => { + vi.mocked(fetchPermissions).mockResolvedValue([]); + + const accountability = {} as unknown as Accountability; + + const res = await validateCollectionAccess( + { accountability, action: 'read', collection: 'collection-a' }, + {} as unknown as Context, + ); + + expect(res).toBe(false); + + expect(fetchPolicies).toHaveBeenCalledWith(accountability, {}); + + expect(fetchPermissions).toHaveBeenCalledWith( + { + accountability, + action: 'read', + policies: [], + collections: ['collection-a'], + }, + {}, + ); +}); + +test('Returns true if permissions exist', async () => { + vi.mocked(fetchPermissions).mockResolvedValue([{} as unknown as Permission]); + + const accountability = {} as unknown as Accountability; + + const res = await validateCollectionAccess( + { accountability, action: 'read', collection: 'collection-a' }, + {} as unknown as Context, + ); + + expect(res).toBe(true); + + expect(fetchPolicies).toHaveBeenCalledWith(accountability, {}); + + expect(fetchPermissions).toHaveBeenCalledWith( + { + accountability, + action: 'read', + policies: [], + collections: ['collection-a'], + }, + {}, + ); +}); diff --git a/api/src/permissions/modules/validate-access/lib/validate-collection-access.ts b/api/src/permissions/modules/validate-access/lib/validate-collection-access.ts new file mode 100644 index 0000000000..0d1706b19b --- /dev/null +++ b/api/src/permissions/modules/validate-access/lib/validate-collection-access.ts @@ -0,0 +1,25 @@ +import type { Accountability, PermissionsAction } from '@directus/types'; +import { fetchPermissions } from '../../../lib/fetch-permissions.js'; +import { fetchPolicies } from '../../../lib/fetch-policies.js'; +import type { Context } from '../../../types.js'; + +export interface ValidateCollectionAccessOptions { + accountability: Accountability; + action: PermissionsAction; + collection: string; +} + +/** + * Check if you have (limited) access to a given collection by making sure there's at least 1 + * permission rule available for the collection and action combo + */ +export async function validateCollectionAccess(options: ValidateCollectionAccessOptions, context: Context) { + const policies = await fetchPolicies(options.accountability, context); + + const permissions = await fetchPermissions( + { action: options.action, policies, collections: [options.collection], accountability: options.accountability }, + context, + ); + + return permissions.length > 0; +} diff --git a/api/src/permissions/modules/validate-access/lib/validate-item-access.test.ts b/api/src/permissions/modules/validate-access/lib/validate-item-access.test.ts new file mode 100644 index 0000000000..e4b7fecdab --- /dev/null +++ b/api/src/permissions/modules/validate-access/lib/validate-item-access.test.ts @@ -0,0 +1,96 @@ +import type { Accountability, SchemaOverview } from '@directus/types'; +import { beforeEach, expect, test, vi } from 'vitest'; +import { getAstFromQuery } from '../../../../database/get-ast-from-query/get-ast-from-query.js'; +import { runAst } from '../../../../database/run-ast/run-ast.js'; +import type { AST } from '../../../../types/ast.js'; +import type { Context } from '../../../types.js'; +import { processAst } from '../../process-ast/process-ast.js'; +import { validateItemAccess } from './validate-item-access.js'; + +vi.mock('../../../../database/get-ast-from-query/get-ast-from-query.js'); +vi.mock('../../../../database/run-ast/run-ast.js'); +vi.mock('../../process-ast/process-ast.js'); + +beforeEach(() => { + vi.clearAllMocks(); +}); + +test('Throws error when primary key does not exist in given collection', async () => { + const schema = { collections: {} } as unknown as SchemaOverview; + const acc = {} as unknown as Accountability; + + await expect( + validateItemAccess({ accountability: acc, action: 'read', collection: 'collection-a', primaryKeys: [1] }, { + schema, + } as Context), + ).rejects.toBeInstanceOf(Error); +}); + +test('Queries the database', async () => { + const schema = { collections: { 'collection-a': { primary: 'field-a' } } } as unknown as SchemaOverview; + const acc = {} as unknown as Accountability; + const ast = {} as unknown as AST; + + vi.mocked(getAstFromQuery).mockResolvedValue(ast); + vi.mocked(runAst).mockResolvedValue([]); + + await expect( + validateItemAccess({ accountability: acc, action: 'read', collection: 'collection-a', primaryKeys: [1] }, { + schema, + } as Context), + ).resolves.toBe(false); + + expect(getAstFromQuery).toHaveBeenCalledWith( + { + collection: 'collection-a', + query: { + fields: [], + limit: 1, + filter: { + 'field-a': { + _in: [1], + }, + }, + }, + accountability: acc, + }, + { schema } as Context, + ); + + expect(processAst).toHaveBeenCalledWith( + { + accountability: acc, + action: 'read', + collection: 'collection-a', + primaryKeys: [1], + ast, + }, + { schema }, + ); +}); + +test('Returns false if no items are returned', async () => { + const schema = { collections: { 'collection-a': { primary: 'field-a' } } } as unknown as SchemaOverview; + const acc = {} as unknown as Accountability; + + vi.mocked(runAst).mockResolvedValue([]); + + await expect( + validateItemAccess({ accountability: acc, action: 'read', collection: 'collection-a', primaryKeys: [1] }, { + schema, + } as Context), + ).resolves.toBe(false); +}); + +test('Returns true the number of returned items matches the number of requested primary keys', async () => { + const schema = { collections: { 'collection-a': { primary: 'field-a' } } } as unknown as SchemaOverview; + const acc = {} as unknown as Accountability; + + vi.mocked(runAst).mockResolvedValue([{}, {}]); + + await expect( + validateItemAccess({ accountability: acc, action: 'read', collection: 'collection-a', primaryKeys: [1, 2] }, { + schema, + } as Context), + ).resolves.toBe(true); +}); diff --git a/api/src/permissions/modules/validate-access/lib/validate-item-access.ts b/api/src/permissions/modules/validate-access/lib/validate-item-access.ts new file mode 100644 index 0000000000..6d0769f104 --- /dev/null +++ b/api/src/permissions/modules/validate-access/lib/validate-item-access.ts @@ -0,0 +1,54 @@ +import type { Accountability, PermissionsAction, PrimaryKey, Query } from '@directus/types'; +import { getAstFromQuery } from '../../../../database/get-ast-from-query/get-ast-from-query.js'; +import { runAst } from '../../../../database/run-ast/run-ast.js'; +import type { Context } from '../../../types.js'; +import { processAst } from '../../process-ast/process-ast.js'; + +export interface ValidateItemAccessOptions { + accountability: Accountability; + action: PermissionsAction; + collection: string; + primaryKeys: PrimaryKey[]; +} + +export async function validateItemAccess(options: ValidateItemAccessOptions, context: Context) { + const primaryKeyField = context.schema.collections[options.collection]?.primary; + + if (!primaryKeyField) { + throw new Error(`Cannot find primary key for collection "${options.collection}"`); + } + + // When we're looking up access to specific items, we have to read them from the database to + // make sure you are allowed to access them. + + const query: Query = { + // We don't actually need any of the field data, just want to know if we can read the item as + // whole or not + fields: [], + limit: options.primaryKeys.length, + filter: { + [primaryKeyField]: { + _in: options.primaryKeys, + }, + }, + }; + + const ast = await getAstFromQuery( + { + accountability: options.accountability, + query, + collection: options.collection, + }, + context, + ); + + await processAst({ ast, ...options }, context); + + const items = await runAst(ast, context.schema, { knex: context.knex }); + + if (items && items.length === options.primaryKeys.length) { + return true; + } + + return false; +} diff --git a/api/src/permissions/modules/validate-access/validate-access.test.ts b/api/src/permissions/modules/validate-access/validate-access.test.ts new file mode 100644 index 0000000000..c773ffe813 --- /dev/null +++ b/api/src/permissions/modules/validate-access/validate-access.test.ts @@ -0,0 +1,59 @@ +import { ForbiddenError } from '@directus/errors'; +import type { Accountability } from '@directus/types'; +import { beforeEach, expect, test, vi } from 'vitest'; +import { AccessService } from '../../../services/access.js'; +import { PermissionsService } from '../../../services/index.js'; +import type { Context } from '../../types.js'; +import { validateCollectionAccess } from './lib/validate-collection-access.js'; +import { validateItemAccess } from './lib/validate-item-access.js'; +import { validateAccess } from './validate-access.js'; + +vi.mock('./lib/validate-item-access.js'); +vi.mock('./lib/validate-collection-access.js'); + +vi.mock('../../../services/permissions.js', () => ({ + PermissionsService: vi.fn(), +})); + +vi.mock('../../../services/access.js', () => ({ + AccessService: vi.fn(), +})); + +beforeEach(() => { + vi.clearAllMocks(); + + AccessService.prototype.readByQuery = vi.fn().mockResolvedValue([]); + PermissionsService.prototype.readByQuery = vi.fn().mockResolvedValue([]); +}); + +test('Returns when admin is true', async () => { + const accountability = { admin: true } as unknown as Accountability; + const action = 'read'; + const collection = 'collection-a'; + + await expect(validateAccess({ accountability, action, collection }, {} as Context)).resolves.toBeUndefined(); +}); + +test('Throws if you do not have item access when primary keys are passed', async () => { + const accountability = { admin: false } as unknown as Accountability; + const action = 'read'; + const collection = 'collection-a'; + + vi.mocked(validateCollectionAccess).mockResolvedValue(false); + + await expect(validateAccess({ accountability, action, collection }, {} as Context)).rejects.toBeInstanceOf( + ForbiddenError, + ); +}); + +test('Throws if you do not have collection access when primary keys are not passed', async () => { + const accountability = { admin: false } as unknown as Accountability; + const action = 'read'; + const collection = 'collection-a'; + + vi.mocked(validateItemAccess).mockResolvedValue(false); + + await expect(validateAccess({ accountability, action, collection }, {} as Context)).rejects.toBeInstanceOf( + ForbiddenError, + ); +}); diff --git a/api/src/permissions/modules/validate-access/validate-access.ts b/api/src/permissions/modules/validate-access/validate-access.ts new file mode 100644 index 0000000000..5346bea73b --- /dev/null +++ b/api/src/permissions/modules/validate-access/validate-access.ts @@ -0,0 +1,40 @@ +import { ForbiddenError } from '@directus/errors'; +import type { Accountability, PermissionsAction, PrimaryKey } from '@directus/types'; +import type { Context } from '../../types.js'; +import { validateCollectionAccess } from './lib/validate-collection-access.js'; +import { validateItemAccess } from './lib/validate-item-access.js'; + +export interface ValidateAccessOptions { + accountability: Accountability; + action: PermissionsAction; + collection: string; + primaryKeys?: PrimaryKey[]; +} + +/** + * Validate if the current user has access to perform action against the given collection and + * optional primary keys. This is done by reading the item from the database using the access + * control rules and checking if we got the expected result back + */ +export async function validateAccess(options: ValidateAccessOptions, context: Context) { + if (options.accountability.admin === true) { + return; + } + + let access; + + // If primary keys are passed, we have to confirm the access by actually trying to read the items + // from the database. If no keys are passed, we can simply check if the collection+action combo + // exists within permissions + if (options.primaryKeys) { + access = await validateItemAccess(options as Required, context); + } else { + access = await validateCollectionAccess(options, context); + } + + if (!access) { + throw new ForbiddenError({ + reason: `You don't have permission to "${options.action}" from collection "${options.collection}" or it does not exist.`, + }); + } +} diff --git a/api/src/permissions/modules/validate-remaining-admin/validate-remaining-admin-count.ts b/api/src/permissions/modules/validate-remaining-admin/validate-remaining-admin-count.ts new file mode 100644 index 0000000000..cd7538748d --- /dev/null +++ b/api/src/permissions/modules/validate-remaining-admin/validate-remaining-admin-count.ts @@ -0,0 +1,9 @@ +import { UnprocessableContentError } from '@directus/errors'; + +export function validateRemainingAdminCount(count: number) { + if (count <= 0) { + throw new UnprocessableContentError({ + reason: `Cannot remove the last admin user from the system`, + }); + } +} diff --git a/api/src/permissions/modules/validate-remaining-admin/validate-remaining-admin-users.ts b/api/src/permissions/modules/validate-remaining-admin/validate-remaining-admin-users.ts new file mode 100644 index 0000000000..befaff1b74 --- /dev/null +++ b/api/src/permissions/modules/validate-remaining-admin/validate-remaining-admin-users.ts @@ -0,0 +1,16 @@ +import { fetchUserCount, type FetchUserCountOptions } from '../../../utils/fetch-user-count/fetch-user-count.js'; +import type { Context } from '../../types.js'; +import { validateRemainingAdminCount } from './validate-remaining-admin-count.js'; + +export interface ValidateRemainingAdminUsersOptions + extends Pick {} + +export async function validateRemainingAdminUsers(options: ValidateRemainingAdminUsersOptions, context: Context) { + const { admin } = await fetchUserCount({ + ...options, + adminOnly: true, + knex: context.knex, + }); + + validateRemainingAdminCount(admin); +} diff --git a/api/src/permissions/types.ts b/api/src/permissions/types.ts new file mode 100644 index 0000000000..7db00e3c46 --- /dev/null +++ b/api/src/permissions/types.ts @@ -0,0 +1,7 @@ +import type { SchemaOverview } from '@directus/types'; +import type { Knex } from 'knex'; + +export interface Context { + schema: SchemaOverview; + knex: Knex; +} diff --git a/api/src/permissions/utils/create-default-accountability.ts b/api/src/permissions/utils/create-default-accountability.ts new file mode 100644 index 0000000000..1f01e17876 --- /dev/null +++ b/api/src/permissions/utils/create-default-accountability.ts @@ -0,0 +1,13 @@ +import type { Accountability } from '@directus/types'; + +export function createDefaultAccountability(overrides?: Partial): Accountability { + return { + role: null, + user: null, + roles: [], + admin: false, + app: false, + ip: null, + ...overrides, + }; +} diff --git a/api/src/permissions/utils/extract-required-dynamic-variable-context.test.ts b/api/src/permissions/utils/extract-required-dynamic-variable-context.test.ts new file mode 100644 index 0000000000..2858b33230 --- /dev/null +++ b/api/src/permissions/utils/extract-required-dynamic-variable-context.test.ts @@ -0,0 +1,37 @@ +import type { Permission } from '@directus/types'; +import { test, expect } from 'vitest'; +import { extractRequiredDynamicVariableContext } from './extract-required-dynamic-variable-context.js'; + +test('Extracts dynamic variables context from permissions', () => { + const permissions = [ + { + permissions: { + _or: [{ id: { _eq: '$CURRENT_USER.id' } }, { id: { _in: '$CURRENT_ROLES.id' } }], + }, + }, + { + permissions: { + id: { _eq: '$CURRENT_POLICIES.foo' }, + }, + }, + { + validation: { + id: { _eq: '$CURRENT_ROLE.name' }, + }, + }, + { + presets: { + id: '$CURRENT_ROLES.description', + }, + }, + ] as unknown as Permission[]; + + const res = extractRequiredDynamicVariableContext(permissions); + + expect(res).toEqual({ + $CURRENT_USER: new Set(['id']), + $CURRENT_ROLE: new Set(['name']), + $CURRENT_ROLES: new Set(['description', 'id']), + $CURRENT_POLICIES: new Set(['foo']), + }); +}); diff --git a/api/src/permissions/utils/extract-required-dynamic-variable-context.ts b/api/src/permissions/utils/extract-required-dynamic-variable-context.ts new file mode 100644 index 0000000000..84c889e6c5 --- /dev/null +++ b/api/src/permissions/utils/extract-required-dynamic-variable-context.ts @@ -0,0 +1,39 @@ +import type { Permission } from '@directus/types'; +import { deepMap } from '@directus/utils'; + +export interface RequiredPermissionContext { + $CURRENT_USER: Set; + $CURRENT_ROLE: Set; + $CURRENT_ROLES: Set; + $CURRENT_POLICIES: Set; +} + +export function extractRequiredDynamicVariableContext(permissions: Permission[]) { + const permissionContext: RequiredPermissionContext = { + $CURRENT_USER: new Set(), + $CURRENT_ROLE: new Set(), + $CURRENT_ROLES: new Set(), + $CURRENT_POLICIES: new Set(), + }; + + for (const permission of permissions) { + deepMap(permission.permissions, extractPermissionData); + deepMap(permission.validation, extractPermissionData); + deepMap(permission.presets, extractPermissionData); + } + + return permissionContext; + + function extractPermissionData(val: any) { + for (const placeholder of [ + '$CURRENT_USER', + '$CURRENT_ROLE', + '$CURRENT_ROLES', + '$CURRENT_POLICIES', + ] as (keyof typeof permissionContext)[]) { + if (typeof val === 'string' && val.startsWith(`${placeholder}.`)) { + permissionContext[placeholder].add(val.replace(`${placeholder}.`, '')); + } + } + } +} diff --git a/api/src/permissions/utils/fetch-dynamic-variable-context.test.ts b/api/src/permissions/utils/fetch-dynamic-variable-context.test.ts new file mode 100644 index 0000000000..a3e6191f81 --- /dev/null +++ b/api/src/permissions/utils/fetch-dynamic-variable-context.test.ts @@ -0,0 +1,104 @@ +import type { Accountability, Permission } from '@directus/types'; +import { beforeEach, test, vi, expect } from 'vitest'; +import { PoliciesService } from '../../services/policies.js'; +import { UsersService } from '../../services/users.js'; +import { RolesService } from '../../services/roles.js'; +import type { Context } from '../types.js'; +import { _fetchDynamicVariableContext as fetchDynamicVariableContext } from './fetch-dynamic-variable-context.js'; + +vi.mock('../../services/users.js', () => ({ + UsersService: vi.fn(), +})); + +vi.mock('../../services/roles.js', () => ({ + RolesService: vi.fn(), +})); + +vi.mock('../../services/policies.js', () => ({ + PoliciesService: vi.fn(), +})); + +beforeEach(() => { + UsersService.prototype.readOne = vi.fn(); + RolesService.prototype.readOne = vi.fn(); + RolesService.prototype.readMany = vi.fn(); + PoliciesService.prototype.readMany = vi.fn(); +}); + +test('Returns filter context for current user', async () => { + const user = {}; + + const permissions = [ + { + permissions: { + key: { _eq: '$CURRENT_USER.email' }, + }, + }, + ] as unknown as Permission[]; + + vi.mocked(UsersService.prototype.readOne).mockResolvedValue(user); + + const res = await fetchDynamicVariableContext( + { + permissions, + accountability: { user: 'user', roles: [] as string[] } as Accountability, + policies: [], + }, + {} as Context, + ); + + expect(res['$CURRENT_USER']).toBe(user); + expect(UsersService.prototype.readOne).toHaveBeenCalledWith('user', { fields: ['email'] }); +}); + +test('Returns filter context for current role', async () => { + const role = {}; + + const permissions = [ + { + permissions: { + key: { _eq: '$CURRENT_ROLE.name' }, + }, + }, + ] as unknown as Permission[]; + + vi.mocked(RolesService.prototype.readOne).mockResolvedValue(role); + + const res = await fetchDynamicVariableContext( + { + permissions, + accountability: { role: 'role', roles: [] as string[] } as Accountability, + policies: [], + }, + {} as Context, + ); + + expect(res['$CURRENT_ROLE']).toBe(role); + expect(RolesService.prototype.readOne).toHaveBeenCalledWith('role', { fields: ['name'] }); +}); + +test('Returns filter context for current policies', async () => { + const policies: any[] = []; + + const permissions = [ + { + permissions: { + key: { _in: '$CURRENT_POLICIES.name' }, + }, + }, + ] as unknown as Permission[]; + + vi.mocked(PoliciesService.prototype.readMany).mockResolvedValue(policies); + + const res = await fetchDynamicVariableContext( + { + permissions, + accountability: { roles: [] as string[] } as Accountability, + policies: ['policy-1'], + }, + {} as Context, + ); + + expect(res['$CURRENT_POLICIES']).toBe(policies); + expect(PoliciesService.prototype.readMany).toHaveBeenCalledWith(['policy-1'], { fields: ['name'] }); +}); diff --git a/api/src/permissions/utils/fetch-dynamic-variable-context.ts b/api/src/permissions/utils/fetch-dynamic-variable-context.ts new file mode 100644 index 0000000000..f59e8333b9 --- /dev/null +++ b/api/src/permissions/utils/fetch-dynamic-variable-context.ts @@ -0,0 +1,68 @@ +import type { Accountability, Permission } from '@directus/types'; +import type { Context } from '../types.js'; +import { extractRequiredDynamicVariableContext } from './extract-required-dynamic-variable-context.js'; +import { withCache } from './with-cache.js'; + +export const fetchDynamicVariableContext = withCache( + 'permission-dynamic-variables', + _fetchDynamicVariableContext, + ({ policies, permissions, accountability: { user, role, roles } }) => ({ + policies, + permissions, + accountability: { + user, + role, + roles, + }, + }), +); + +export interface FetchDynamicVariableContext { + accountability: Pick; + policies: string[]; + permissions: Permission[]; +} + +export async function _fetchDynamicVariableContext(options: FetchDynamicVariableContext, context: Context) { + const { UsersService } = await import('../../services/users.js'); + const { RolesService } = await import('../../services/roles.js'); + const { PoliciesService } = await import('../../services/policies.js'); + + const contextData: Record = {}; + + const permissionContext = extractRequiredDynamicVariableContext(options.permissions); + + if (options.accountability.user && (permissionContext.$CURRENT_USER?.size ?? 0) > 0) { + const usersService = new UsersService(context); + + contextData['$CURRENT_USER'] = await usersService.readOne(options.accountability.user, { + fields: Array.from(permissionContext.$CURRENT_USER!), + }); + } + + if (options.accountability.role && (permissionContext.$CURRENT_ROLE?.size ?? 0) > 0) { + const rolesService = new RolesService(context); + + contextData['$CURRENT_ROLE'] = await rolesService.readOne(options.accountability.role, { + fields: Array.from(permissionContext.$CURRENT_ROLE!), + }); + } + + if (options.accountability.roles.length > 0 && (permissionContext.$CURRENT_ROLES?.size ?? 0) > 0) { + const rolesService = new RolesService(context); + + contextData['$CURRENT_ROLES'] = await rolesService.readMany(options.accountability.roles, { + fields: Array.from(permissionContext.$CURRENT_ROLES!), + }); + } + + if (options.policies.length > 0 && (permissionContext.$CURRENT_POLICIES?.size ?? 0) > 0) { + const policiesService = new PoliciesService(context); + + contextData['$CURRENT_POLICIES'] = await policiesService.readMany(options.policies, { + fields: Array.from(permissionContext.$CURRENT_POLICIES!), + }); + } + + return contextData; +} diff --git a/api/src/permissions/utils/filter-policies-by-ip.test.ts b/api/src/permissions/utils/filter-policies-by-ip.test.ts new file mode 100644 index 0000000000..25c0aecaac --- /dev/null +++ b/api/src/permissions/utils/filter-policies-by-ip.test.ts @@ -0,0 +1,59 @@ +import { expect, test } from 'vitest'; +import type { AccessRow } from '../modules/process-ast/types.js'; +import { filterPoliciesByIp } from './filter-policies-by-ip.js'; + +test('Keeps policies that do not have a ip access rule set configured when IP is null', () => { + const policies: AccessRow[] = [ + { + policy: { + id: 'test-policy-1', + ip_access: null, + }, + }, + { + policy: { + id: 'test-policy-1', + ip_access: ['127.0.0.1'], + }, + }, + ]; + + const output = filterPoliciesByIp(policies, null); + + expect(output).toEqual([ + { + policy: { + id: 'test-policy-1', + ip_access: null, + }, + }, + ]); +}); + +test('Keeps policies that match the IP cidr block', () => { + const policies: AccessRow[] = [ + { + policy: { + id: 'test-policy-1', + ip_access: ['192.168.1.0/22'], + }, + }, + { + policy: { + id: 'test-policy-1', + ip_access: ['127.0.0.1'], + }, + }, + ]; + + const output = filterPoliciesByIp(policies, '192.168.1.25'); + + expect(output).toEqual([ + { + policy: { + id: 'test-policy-1', + ip_access: ['192.168.1.0/22'], + }, + }, + ]); +}); diff --git a/api/src/permissions/utils/filter-policies-by-ip.ts b/api/src/permissions/utils/filter-policies-by-ip.ts new file mode 100644 index 0000000000..1c34910456 --- /dev/null +++ b/api/src/permissions/utils/filter-policies-by-ip.ts @@ -0,0 +1,19 @@ +import { ipInNetworks } from '../../utils/ip-in-networks.js'; +import type { AccessRow } from '../lib/fetch-policies.js'; + +export function filterPoliciesByIp(policies: AccessRow[], ip: string | null | undefined) { + return policies.filter(({ policy }) => { + // Keep policies that don't have an ip address allow list configured + if (!policy.ip_access || policy.ip_access.length === 0) { + return true; + } + + // If the client's IP address is unknown, we can't validate it against the allow list and will + // have to default to the more secure option of preventing access + if (!ip) { + return false; + } + + return ipInNetworks(ip, policy.ip_access); + }); +} diff --git a/api/src/permissions/utils/get-unaliased-field-key.ts b/api/src/permissions/utils/get-unaliased-field-key.ts new file mode 100644 index 0000000000..067a409828 --- /dev/null +++ b/api/src/permissions/utils/get-unaliased-field-key.ts @@ -0,0 +1,19 @@ +import type { FieldNode, FunctionFieldNode, NestedCollectionNode } from '../../types/index.js'; +import { parseFilterKey } from '../../utils/parse-filter-key.js'; + +/** + * Derive the unaliased field key from the given AST node. + */ +export function getUnaliasedFieldKey(node: NestedCollectionNode | FieldNode | FunctionFieldNode) { + switch (node.type) { + case 'o2m': + return node.relation.meta!.one_field!; + case 'a2o': + case 'm2o': + return node.relation.field; + case 'field': + case 'functionField': + // The field name might still include a function, so process that here as well + return parseFilterKey(node.name).fieldName; + } +} diff --git a/api/src/permissions/utils/process-permissions.ts b/api/src/permissions/utils/process-permissions.ts new file mode 100644 index 0000000000..cc73eab819 --- /dev/null +++ b/api/src/permissions/utils/process-permissions.ts @@ -0,0 +1,18 @@ +import type { Accountability, Permission } from '@directus/types'; +import { parseFilter, parsePreset } from '@directus/utils'; + +export interface ProcessPermissionsOptions { + permissions: Permission[]; + accountability: Pick; + permissionsContext: Record; +} + +export function processPermissions({ permissions, accountability, permissionsContext }: ProcessPermissionsOptions) { + return permissions.map((permission) => { + permission.permissions = parseFilter(permission.permissions, accountability, permissionsContext); + permission.validation = parseFilter(permission.validation, accountability, permissionsContext); + permission.presets = parsePreset(permission.presets, accountability, permissionsContext); + + return permission; + }); +} diff --git a/api/src/permissions/utils/with-cache.ts b/api/src/permissions/utils/with-cache.ts new file mode 100644 index 0000000000..eb2552e420 --- /dev/null +++ b/api/src/permissions/utils/with-cache.ts @@ -0,0 +1,35 @@ +import { getSimpleHash } from '@directus/utils'; +import { useCache } from '../cache.js'; + +/** + * The `pick` parameter can be used to stabilize cache keys, by only using a subset of the available parameters and + * ensuring key order. + * + * If the `pick` function is provided, we pass the picked result to the handler, in order for TypeScript to ensure that + * the function only relies on the parameters that are used for generating the cache key. + * + * @NOTE only uses the first parameter for memoization + */ +export function withCache R, R, Arg0 = Parameters[0]>( + namespace: string, + handler: F, + prepareArg?: (arg0: Arg0) => Arg0, +) { + const cache = useCache(); + + return (async (arg0: Arg0, ...args: any[]) => { + arg0 = prepareArg ? prepareArg(arg0) : arg0; + const key = namespace + '-' + getSimpleHash(JSON.stringify(arg0)); + const cached = await cache.get(key); + + if (cached !== undefined) { + return cached as R; + } + + const res = await handler(arg0, ...args); + + cache.set(key, res); + + return res; + }) as F; +} diff --git a/api/src/services/access.ts b/api/src/services/access.ts new file mode 100644 index 0000000000..d4e0266961 --- /dev/null +++ b/api/src/services/access.ts @@ -0,0 +1,65 @@ +import type { Item, PrimaryKey } from '@directus/types'; +import { clearSystemCache } from '../cache.js'; +import type { AbstractServiceOptions, MutationOptions } from '../types/index.js'; +import { UserIntegrityCheckFlag } from '../utils/validate-user-count-integrity.js'; +import { ItemsService } from './items.js'; + +export class AccessService extends ItemsService { + constructor(options: AbstractServiceOptions) { + super('directus_access', options); + } + + private async clearCaches(opts?: MutationOptions) { + await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache }); + + if (this.cache && opts?.autoPurgeCache !== false) { + await this.cache.clear(); + } + } + + override async createOne(data: Partial, opts: MutationOptions = {}): Promise { + // Creating a new policy attachments affects the number of admin/app/api users. + // But it can only add app or admin users, so no need to check the remaining admin users. + opts.userIntegrityCheckFlags = + (opts.userIntegrityCheckFlags ?? UserIntegrityCheckFlag.None) | UserIntegrityCheckFlag.UserLimits; + + opts.onRequireUserIntegrityCheck?.(opts.userIntegrityCheckFlags); + + const result = await super.createOne(data, opts); + + // A new policy has been attached to a user or a role, clear the caches + await this.clearCaches(); + + return result; + } + + override async updateMany( + keys: PrimaryKey[], + data: Partial, + opts: MutationOptions = {}, + ): Promise { + // Updating policy attachments might affect the number of admin/app/api users + opts.userIntegrityCheckFlags = UserIntegrityCheckFlag.All; + opts.onRequireUserIntegrityCheck?.(opts.userIntegrityCheckFlags); + + const result = await super.updateMany(keys, data, { ...opts, userIntegrityCheckFlags: UserIntegrityCheckFlag.All }); + + // Some policy attachments have been updated, clear the caches + await this.clearCaches(); + + return result; + } + + override async deleteMany(keys: PrimaryKey[], opts: MutationOptions = {}): Promise { + // Changes here can affect the number of admin/app/api users + opts.userIntegrityCheckFlags = UserIntegrityCheckFlag.All; + opts.onRequireUserIntegrityCheck?.(opts.userIntegrityCheckFlags); + + const result = await super.deleteMany(keys, opts); + + // Some policy attachments have been deleted, clear the caches + await this.clearCaches(); + + return result; + } +} diff --git a/api/src/services/activity.ts b/api/src/services/activity.ts index 6509a39577..daffad1417 100644 --- a/api/src/services/activity.ts +++ b/api/src/services/activity.ts @@ -4,12 +4,14 @@ import { ErrorCode, isDirectusError } from '@directus/errors'; import type { Accountability, Item, PrimaryKey } from '@directus/types'; import { uniq } from 'lodash-es'; import { useLogger } from '../logger/index.js'; +import { fetchRolesTree } from '../permissions/lib/fetch-roles-tree.js'; +import { fetchGlobalAccess } from '../permissions/modules/fetch-global-access/fetch-global-access.js'; +import { validateAccess } from '../permissions/modules/validate-access/validate-access.js'; +import { createDefaultAccountability } from '../permissions/utils/create-default-accountability.js'; import type { AbstractServiceOptions, MutationOptions } from '../types/index.js'; -import { getPermissions } from '../utils/get-permissions.js'; import { isValidUuid } from '../utils/is-valid-uuid.js'; import { Url } from '../utils/url.js'; import { userName } from '../utils/user-name.js'; -import { AuthorizationService } from './authorization.js'; import { ItemsService } from './items.js'; import { NotificationsService } from './notifications.js'; import { UsersService } from './users.js'; @@ -41,23 +43,36 @@ export class ActivityService extends ItemsService { const userID = mention.substring(1); const user = await this.usersService.readOne(userID, { - fields: ['id', 'first_name', 'last_name', 'email', 'role.id', 'role.admin_access', 'role.app_access'], + fields: ['id', 'first_name', 'last_name', 'email', 'role'], }); - const accountability: Accountability = { + const roles = await fetchRolesTree(user['role'], this.knex); + const globalAccess = await fetchGlobalAccess({ user: user['id'], roles, ip: null }, this.knex); + + const accountability: Accountability = createDefaultAccountability({ user: userID, role: user['role']?.id ?? null, - admin: user['role']?.admin_access ?? null, - app: user['role']?.app_access ?? null, - }; + roles, + ...globalAccess, + }); - accountability.permissions = await getPermissions(accountability, this.schema); - - const authorizationService = new AuthorizationService({ schema: this.schema, accountability }); const usersService = new UsersService({ schema: this.schema, accountability }); try { - await authorizationService.checkAccess('read', data['collection'], data['item']); + if (this.accountability) { + await validateAccess( + { + accountability: this.accountability, + action: 'read', + collection: data['collection'], + primaryKeys: [data['item']], + }, + { + knex: this.knex, + schema: this.schema, + }, + ); + } const templateData = await usersService.readByQuery({ fields: ['id', 'first_name', 'last_name', 'email'], diff --git a/api/src/services/assets.ts b/api/src/services/assets.ts index 4d2a9747fa..c094ca78f7 100644 --- a/api/src/services/assets.ts +++ b/api/src/services/assets.ts @@ -6,7 +6,7 @@ import { ServiceUnavailableError, } from '@directus/errors'; import type { Range, Stat } from '@directus/storage'; -import type { Accountability, File } from '@directus/types'; +import type { Accountability, File, SchemaOverview } from '@directus/types'; import type { Knex } from 'knex'; import { clamp } from 'lodash-es'; import { contentType } from 'mime-types'; @@ -17,12 +17,12 @@ import sharp from 'sharp'; import { SUPPORTED_IMAGE_TRANSFORM_FORMATS } from '../constants.js'; import getDatabase from '../database/index.js'; import { useLogger } from '../logger/index.js'; +import { validateAccess } from '../permissions/modules/validate-access/validate-access.js'; import { getStorage } from '../storage/index.js'; import type { AbstractServiceOptions, Transformation, TransformationSet } from '../types/index.js'; import { getMilliseconds } from '../utils/get-milliseconds.js'; import { isValidUuid } from '../utils/is-valid-uuid.js'; import * as TransformationUtils from '../utils/transformations.js'; -import { AuthorizationService } from './authorization.js'; import { FilesService } from './files.js'; import { getSharpInstance } from './files/lib/get-sharp-instance.js'; @@ -32,14 +32,14 @@ const logger = useLogger(); export class AssetsService { knex: Knex; accountability: Accountability | null; - authorizationService: AuthorizationService; + schema: SchemaOverview; filesService: FilesService; constructor(options: AbstractServiceOptions) { this.knex = options.knex || getDatabase(); this.accountability = options.accountability || null; + this.schema = options.schema; this.filesService = new FilesService({ ...options, accountability: null }); - this.authorizationService = new AuthorizationService(options); } async getAsset( @@ -63,8 +63,16 @@ export class AssetsService { */ if (!isValidUuid(id)) throw new ForbiddenError(); - if (systemPublicKeys.includes(id) === false && this.accountability?.admin !== true) { - await this.authorizationService.checkAccess('read', 'directus_files', id); + if (systemPublicKeys.includes(id) === false && this.accountability) { + await validateAccess( + { + accountability: this.accountability, + action: 'read', + collection: 'directus_files', + primaryKeys: [id], + }, + { knex: this.knex, schema: this.schema }, + ); } const file = (await this.filesService.readOne(id, { limit: 1 })) as File; diff --git a/api/src/services/authentication.ts b/api/src/services/authentication.ts index ef81519149..a82112c55f 100644 --- a/api/src/services/authentication.ts +++ b/api/src/services/authentication.ts @@ -1,3 +1,5 @@ +import { fetchRolesTree } from '../permissions/lib/fetch-roles-tree.js'; +import { fetchGlobalAccess } from '../permissions/modules/fetch-global-access/fetch-global-access.js'; import { Action } from '@directus/constants'; import { useEnv } from '@directus/env'; import { @@ -73,23 +75,20 @@ export class AuthenticationService { const user = await this.knex .select( - 'u.id', - 'u.first_name', - 'u.last_name', - 'u.email', - 'u.password', - 'u.status', - 'u.role', - 'r.admin_access', - 'r.app_access', - 'u.tfa_secret', - 'u.provider', - 'u.external_identifier', - 'u.auth_data', + 'id', + 'first_name', + 'last_name', + 'email', + 'password', + 'status', + 'role', + 'tfa_secret', + 'provider', + 'external_identifier', + 'auth_data', ) - .from('directus_users as u') - .leftJoin('directus_roles as r', 'u.role', 'r.id') - .where('u.id', userId) + .from('directus_users') + .where('id', userId) .first(); const updatedPayload = await emitter.emitFilter( @@ -185,11 +184,18 @@ export class AuthenticationService { } } + const roles = await fetchRolesTree(user.role, this.knex); + + const globalAccess = await fetchGlobalAccess( + { roles, user: user.id, ip: this.accountability?.ip ?? null }, + this.knex, + ); + const tokenPayload: DirectusTokenPayload = { id: user.id, role: user.role, - app_access: user.app_access, - admin_access: user.admin_access, + app_access: globalAccess.app, + admin_access: globalAccess.admin, }; const refreshToken = nanoid(64); @@ -285,9 +291,7 @@ export class AuthenticationService { user_provider: 'u.provider', user_external_identifier: 'u.external_identifier', user_auth_data: 'u.auth_data', - role_id: 'r.id', - role_admin_access: 'r.admin_access', - role_app_access: 'r.app_access', + user_role: 'u.role', share_id: 'd.id', share_item: 'd.item', share_role: 'd.role', @@ -300,9 +304,6 @@ export class AuthenticationService { .from('directus_sessions AS s') .leftJoin('directus_users AS u', 's.user', 'u.id') .leftJoin('directus_shares AS d', 's.share', 'd.id') - .leftJoin('directus_roles AS r', (join) => { - join.onIn('r.id', [this.knex.ref('u.role'), this.knex.ref('d.role')]); - }) .where('s.token', refreshToken) .andWhere('s.expires', '>=', new Date()) .andWhere((subQuery) => { @@ -329,6 +330,13 @@ export class AuthenticationService { } } + const roles = await fetchRolesTree(record.user_role, this.knex); + + const globalAccess = await fetchGlobalAccess( + { user: record.user_id, roles, ip: this.accountability?.ip ?? null }, + this.knex, + ); + if (record.user_id) { const provider = getAuthProvider(record.user_provider); @@ -342,9 +350,9 @@ export class AuthenticationService { provider: record.user_provider, external_identifier: record.user_external_identifier, auth_data: record.user_auth_data, - role: record.role_id, - app_access: record.role_app_access, - admin_access: record.role_admin_access, + role: record.user_role, + app_access: globalAccess.app, + admin_access: globalAccess.admin, }); } @@ -354,9 +362,9 @@ export class AuthenticationService { const tokenPayload: DirectusTokenPayload = { id: record.user_id, - role: record.role_id, - app_access: record.role_app_access, - admin_access: record.role_admin_access, + role: record.user_role, + app_access: globalAccess.app, + admin_access: globalAccess.admin, }; if (options?.session) { diff --git a/api/src/services/authorization.ts b/api/src/services/authorization.ts deleted file mode 100644 index 224261d5a1..0000000000 --- a/api/src/services/authorization.ts +++ /dev/null @@ -1,627 +0,0 @@ -import { ForbiddenError } from '@directus/errors'; -import type { - Accountability, - Aggregate, - Filter, - Item, - Permission, - PermissionsAction, - PrimaryKey, - Query, - SchemaOverview, -} from '@directus/types'; -import { validatePayload } from '@directus/utils'; -import { FailedValidationError, joiValidationErrorItemToErrorExtensions } from '@directus/validation'; -import type { Knex } from 'knex'; -import { cloneDeep, flatten, isArray, isNil, merge, reduce, uniq, uniqWith } from 'lodash-es'; -import { GENERATE_SPECIAL } from '../constants.js'; -import getDatabase from '../database/index.js'; -import type { - AST, - AbstractServiceOptions, - FieldNode, - FunctionFieldNode, - NestedCollectionNode, -} from '../types/index.js'; -import { getRelationInfo } from '../utils/get-relation-info.js'; -import { parseFilterKey } from '../utils/parse-filter-key.js'; -import { ItemsService } from './items.js'; -import { PayloadService } from './payload.js'; - -export class AuthorizationService { - knex: Knex; - accountability: Accountability | null; - payloadService: PayloadService; - schema: SchemaOverview; - - constructor(options: AbstractServiceOptions) { - this.knex = options.knex || getDatabase(); - this.accountability = options.accountability || null; - this.schema = options.schema; - - this.payloadService = new PayloadService('directus_permissions', { - knex: this.knex, - schema: this.schema, - }); - } - - async processAST(ast: AST, action: PermissionsAction = 'read'): Promise { - const collectionsRequested = getCollectionsFromAST(ast); - - const permissionsForCollections = - uniqWith( - this.accountability?.permissions?.filter((permission) => { - return ( - permission.action === action && - collectionsRequested.map(({ collection }) => collection).includes(permission.collection) - ); - }), - (curr, prev) => curr.collection === prev.collection && curr.action === prev.action && curr.role === prev.role, - ) ?? []; - - // If the permissions don't match the collections, you don't have permission to read all of them - const uniqueCollectionsRequestedCount = uniq(collectionsRequested.map(({ collection }) => collection)).length; - - if (uniqueCollectionsRequestedCount !== permissionsForCollections.length) { - throw new ForbiddenError(); - } - - validateFields(ast); - validateFilterPermissions(ast, this.schema, action, this.accountability); - applyFilters(ast, this.accountability); - - return ast; - - /** - * Traverses the AST and returns an array of all collections that are being fetched - */ - function getCollectionsFromAST(ast: AST | NestedCollectionNode): { collection: string; field: string }[] { - const collections = []; - - if (ast.type === 'a2o') { - collections.push(...ast.names.map((name) => ({ collection: name, field: ast.fieldKey }))); - - for (const children of Object.values(ast.children)) { - for (const nestedNode of children) { - if (nestedNode.type !== 'field' && nestedNode.type !== 'functionField') { - collections.push(...getCollectionsFromAST(nestedNode)); - } - } - } - } else { - collections.push({ - collection: ast.name, - field: ast.type === 'root' ? null : ast.fieldKey, - }); - - for (const nestedNode of ast.children) { - if (nestedNode.type === 'functionField') { - collections.push({ - collection: nestedNode.relatedCollection, - field: null, - }); - } else if (nestedNode.type !== 'field') { - collections.push(...getCollectionsFromAST(nestedNode)); - } - } - } - - return collections as { collection: string; field: string }[]; - } - - function validateFields(ast: AST | NestedCollectionNode | FieldNode | FunctionFieldNode) { - if (ast.type !== 'field' && ast.type !== 'functionField') { - if (ast.type === 'a2o') { - for (const [collection, children] of Object.entries(ast.children)) { - checkFields(collection, children, ast.query?.[collection]?.aggregate); - } - } else { - checkFields(ast.name, ast.children, ast.query?.aggregate); - } - } - - function checkFields( - collection: string, - children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[], - aggregate?: Aggregate | null, - ) { - // We check the availability of the permissions in the step before this is run - const permissions = permissionsForCollections.find((permission) => permission.collection === collection)!; - const allowedFields = permissions.fields || []; - - if (aggregate && allowedFields.includes('*') === false) { - for (const aliasMap of Object.values(aggregate)) { - if (!aliasMap) continue; - - for (const column of Object.values(aliasMap)) { - if (column === '*') continue; - if (allowedFields.includes(column) === false) throw new ForbiddenError(); - } - } - } - - for (const childNode of children) { - if (childNode.type !== 'field') { - validateFields(childNode); - continue; - } - - if (allowedFields.includes('*')) continue; - - const { fieldName } = parseFilterKey(childNode.name); - - if (allowedFields.includes(fieldName) === false) { - throw new ForbiddenError(); - } - } - } - } - - function validateFilterPermissions( - ast: AST | NestedCollectionNode | FieldNode | FunctionFieldNode, - schema: SchemaOverview, - action: PermissionsAction, - accountability: Accountability | null, - ) { - let requiredFieldPermissions: Record> = {}; - - if (ast.type !== 'field' && ast.type !== 'functionField') { - if (ast.type === 'a2o') { - for (const collection of Object.keys(ast.children)) { - requiredFieldPermissions = mergeRequiredFieldPermissions( - requiredFieldPermissions, - extractRequiredFieldPermissions(collection, ast.query?.[collection]?.filter ?? {}), - ); - - for (const child of ast.children[collection]!) { - const childPermissions = validateFilterPermissions(child, schema, action, accountability); - - if (Object.keys(childPermissions).length > 0) { - //Only add relational field if deep child has a filter - if (child.type !== 'field') { - (requiredFieldPermissions[collection] || (requiredFieldPermissions[collection] = new Set())).add( - child.fieldKey, - ); - } - - requiredFieldPermissions = mergeRequiredFieldPermissions(requiredFieldPermissions, childPermissions); - } - } - } - } else { - requiredFieldPermissions = mergeRequiredFieldPermissions( - requiredFieldPermissions, - extractRequiredFieldPermissions(ast.name, ast.query?.filter ?? {}), - ); - - for (const child of ast.children) { - const childPermissions = validateFilterPermissions(child, schema, action, accountability); - - if (Object.keys(childPermissions).length > 0) { - // Only add relational field if deep child has a filter - if (child.type !== 'field') { - (requiredFieldPermissions[ast.name] || (requiredFieldPermissions[ast.name] = new Set())).add( - child.fieldKey, - ); - } - - requiredFieldPermissions = mergeRequiredFieldPermissions(requiredFieldPermissions, childPermissions); - } - } - } - } - - if (ast.type === 'root') { - // Validate all required permissions once at the root level - checkFieldPermissions(ast.name, schema, action, requiredFieldPermissions, ast.query.alias); - } - - return requiredFieldPermissions; - - function extractRequiredFieldPermissions( - collection: string, - filter: Filter, - parentCollection?: string, - parentField?: string, - ) { - return reduce( - filter, - function (result: Record>, filterValue, filterKey) { - if (filterKey.startsWith('_')) { - if (filterKey === '_and' || filterKey === '_or') { - if (isArray(filterValue)) { - for (const filter of filterValue as Filter[]) { - const requiredPermissions = extractRequiredFieldPermissions( - collection, - filter, - parentCollection, - parentField, - ); - - result = mergeRequiredFieldPermissions(result, requiredPermissions); - } - } - - return result; - } - - // Filter value is not a filter, so we should skip it - return result; - } - // virtual o2m/o2a filter in the form of `$FOLLOW(...)` - else if (collection && filterKey.startsWith('$FOLLOW')) { - (result[collection] || (result[collection] = new Set())).add(filterKey); - // add virtual relation to the required permissions - const { relation } = getRelationInfo([], collection, filterKey); - - if (relation?.collection && relation?.field) { - (result[relation.collection] || (result[relation.collection] = new Set())).add(relation.field); - } - } - // a2o filter in the form of `item:collection` - else if (filterKey.includes(':')) { - const [field, collectionScope] = filterKey.split(':'); - - if (collection) { - // Add the `item` field to the required permissions - (result[collection] || (result[collection] = new Set())).add(field!); - - // Add the `collection` field to the required permissions - result[collection]!.add('collection'); - } else { - const relation = schema.relations.find((relation) => { - return ( - (relation.collection === parentCollection && relation.field === parentField) || - (relation.related_collection === parentCollection && relation.meta?.one_field === parentField) - ); - }); - - // Filter key not found in parent collection - if (!relation) throw new ForbiddenError(); - - const relatedCollectionName = - relation.related_collection === parentCollection ? relation.collection : relation.related_collection!; - - // Add the `item` field to the required permissions - (result[relatedCollectionName] || (result[relatedCollectionName] = new Set())).add(field!); - - // Add the `collection` field to the required permissions - result[relatedCollectionName]!.add('collection'); - } - - // Continue to parse the filter for nested `collection` afresh - const requiredPermissions = extractRequiredFieldPermissions(collectionScope!, filterValue); - result = mergeRequiredFieldPermissions(result, requiredPermissions); - } else { - if (collection) { - (result[collection] || (result[collection] = new Set())).add(filterKey); - } else { - const relation = schema.relations.find((relation) => { - return ( - (relation.collection === parentCollection && relation.field === parentField) || - (relation.related_collection === parentCollection && relation.meta?.one_field === parentField) - ); - }); - - // Filter key not found in parent collection - if (!relation) throw new ForbiddenError(); - - parentCollection = - relation.related_collection === parentCollection ? relation.collection : relation.related_collection!; - - (result[parentCollection] || (result[parentCollection] = new Set())).add(filterKey); - } - - if (typeof filterValue === 'object') { - // Parent collection is undefined when we process the top level filter - if (!parentCollection) parentCollection = collection; - - for (const [childFilterKey, childFilterValue] of Object.entries(filterValue)) { - if (childFilterKey.startsWith('_')) { - if (childFilterKey === '_and' || childFilterKey === '_or') { - if (isArray(childFilterValue)) { - for (const filter of childFilterValue as Filter[]) { - const requiredPermissions = extractRequiredFieldPermissions( - '', - filter, - parentCollection, - filterKey, - ); - - result = mergeRequiredFieldPermissions(result, requiredPermissions); - } - } - } - } else { - const requiredPermissions = extractRequiredFieldPermissions( - '', - filterValue, - parentCollection, - filterKey, - ); - - result = mergeRequiredFieldPermissions(result, requiredPermissions); - } - } - } - } - - return result; - }, - {}, - ); - } - - function mergeRequiredFieldPermissions(current: Record>, child: Record>) { - for (const collection of Object.keys(child)) { - if (!current[collection]) { - current[collection] = child[collection]!; - } else { - current[collection] = new Set([...current[collection]!, ...child[collection]!]); - } - } - - return current; - } - - function checkFieldPermissions( - rootCollection: string, - schema: SchemaOverview, - action: PermissionsAction, - requiredPermissions: Record>, - aliasMap?: Record | null, - ) { - if (accountability?.admin === true) return; - - for (const collection of Object.keys(requiredPermissions)) { - const permission = accountability?.permissions?.find( - (permission) => permission.collection === collection && permission.action === 'read', - ); - - let allowedFields: string[]; - - // Allow the filtering of top level ID for actions such as update and delete - if (action !== 'read' && collection === rootCollection) { - const actionPermission = accountability?.permissions?.find( - (permission) => permission.collection === collection && permission.action === action, - ); - - if (!actionPermission || !actionPermission.fields) { - throw new ForbiddenError(); - } - - allowedFields = permission?.fields - ? [...permission.fields, schema.collections[collection]!.primary] - : [schema.collections[collection]!.primary]; - } else if (!permission || !permission.fields) { - throw new ForbiddenError(); - } else { - allowedFields = permission.fields; - } - - if (allowedFields.includes('*')) continue; - // Allow legacy permissions with an empty fields array, where id can be accessed - if (allowedFields.length === 0) allowedFields.push(schema.collections[collection]!.primary); - - for (const field of requiredPermissions[collection]!) { - if (field.startsWith('$FOLLOW')) continue; - const { fieldName } = parseFilterKey(field); - let originalFieldName = fieldName; - - if (collection === rootCollection && aliasMap?.[fieldName]) { - originalFieldName = aliasMap[fieldName]!; - } - - if (!allowedFields.includes(originalFieldName)) { - throw new ForbiddenError(); - } - } - } - } - } - - function applyFilters( - ast: AST | NestedCollectionNode | FieldNode | FunctionFieldNode, - accountability: Accountability | null, - ): AST | NestedCollectionNode | FieldNode | FunctionFieldNode { - if (ast.type === 'functionField') { - const collection = ast.relatedCollection; - - updateFilterQuery(collection, ast.query); - } else if (ast.type !== 'field') { - if (ast.type === 'a2o') { - const collections = Object.keys(ast.children); - - for (const collection of collections) { - updateFilterQuery(collection, ast.query[collection]!); - } - - for (const [collection, children] of Object.entries(ast.children)) { - ast.children[collection] = children.map((child) => applyFilters(child, accountability)) as ( - | NestedCollectionNode - | FieldNode - )[]; - } - } else { - const collection = ast.name; - - updateFilterQuery(collection, ast.query); - - ast.children = ast.children.map((child) => applyFilters(child, accountability)) as ( - | NestedCollectionNode - | FieldNode - )[]; - } - } - - return ast; - - function updateFilterQuery(collection: string, query: Query) { - // We check the availability of the permissions in the step before this is run - const permissions = permissionsForCollections.find((permission) => permission.collection === collection)!; - - if (!query.filter || Object.keys(query.filter).length === 0) { - query.filter = { _and: [] }; - } else { - query.filter = { _and: [query.filter] }; - } - - if (permissions.permissions && Object.keys(permissions.permissions).length > 0) { - query.filter._and.push(permissions.permissions); - } - - if (query.filter._and.length === 0) delete query.filter; - } - } - } - - /** - * Checks if the provided payload matches the configured permissions, and adds the presets to the payload. - */ - validatePayload(action: PermissionsAction, collection: string, data: Partial): Partial { - const payload = cloneDeep(data); - - let permission: Permission | undefined; - - if (this.accountability?.admin === true) { - permission = { - id: 0, - role: this.accountability?.role, - collection, - action, - permissions: {}, - validation: {}, - fields: ['*'], - presets: {}, - }; - } else { - permission = this.accountability?.permissions?.find((permission) => { - return permission.collection === collection && permission.action === action; - }); - - if (!permission) throw new ForbiddenError(); - - // Check if you have permission to access the fields you're trying to access - - const allowedFields = permission.fields || []; - - if (allowedFields.includes('*') === false) { - const keysInData = Object.keys(payload); - const invalidKeys = keysInData.filter((fieldKey) => allowedFields.includes(fieldKey) === false); - - if (invalidKeys.length > 0) { - throw new ForbiddenError(); - } - } - } - - const preset = permission.presets ?? {}; - - const payloadWithPresets = merge({}, preset, payload); - - const fieldValidationRules = Object.values(this.schema.collections[collection]!.fields) - .map((field) => field.validation) - .filter((v) => v) as Filter[]; - - const hasValidationRules = - isNil(permission.validation) === false && Object.keys(permission.validation ?? {}).length > 0; - - const hasFieldValidationRules = fieldValidationRules && fieldValidationRules.length > 0; - - const requiredColumns: SchemaOverview['collections'][string]['fields'][string][] = []; - - for (const field of Object.values(this.schema.collections[collection]!.fields)) { - const specials = field?.special ?? []; - - const hasGenerateSpecial = GENERATE_SPECIAL.some((name) => specials.includes(name)); - - const nullable = field.nullable || hasGenerateSpecial || field.generated; - - if (!nullable) { - requiredColumns.push(field); - } - } - - if (hasValidationRules === false && hasFieldValidationRules === false && requiredColumns.length === 0) { - return payloadWithPresets; - } - - if (requiredColumns.length > 0) { - permission.validation = hasValidationRules ? { _and: [permission.validation!] } : { _and: [] }; - - for (const field of requiredColumns) { - if (action === 'create' && field.defaultValue === null) { - permission.validation._and.push({ - [field.field]: { - _submitted: true, - }, - }); - } - - permission.validation._and.push({ - [field.field]: { - _nnull: true, - }, - }); - } - } - - if (hasFieldValidationRules) { - if (permission.validation && Object.keys(permission.validation).length > 0) { - permission.validation = { _and: [permission.validation, ...fieldValidationRules] }; - } else { - permission.validation = { _and: fieldValidationRules }; - } - } - - const validationErrors: InstanceType[] = []; - - validationErrors.push( - ...flatten( - validatePayload(permission.validation!, payloadWithPresets).map((error) => - error.details.map((details) => new FailedValidationError(joiValidationErrorItemToErrorExtensions(details))), - ), - ), - ); - - if (validationErrors.length > 0) throw validationErrors; - - return payloadWithPresets; - } - - async checkAccess(action: PermissionsAction, collection: string, pk?: PrimaryKey | PrimaryKey[]): Promise { - if (this.accountability?.admin === true) return; - - const itemsService = new ItemsService(collection, { - accountability: this.accountability, - knex: this.knex, - schema: this.schema, - }); - - const query: Query = { - fields: ['*'], - }; - - if (Array.isArray(pk)) { - const result = await itemsService.readMany(pk, { ...query, limit: pk.length }, { permissionsAction: action }); - - // for the unexpected case that the result is not an array (for example due to filter hook) - if (!isArray(result)) throw new ForbiddenError(); - - if (result.length !== pk.length) throw new ForbiddenError(); - } else if (pk) { - const result = await itemsService.readOne(pk, query, { permissionsAction: action }); - if (!result) throw new ForbiddenError(); - } else { - query.limit = 1; - const result = await itemsService.readByQuery(query, { permissionsAction: action }); - - // for the unexpected case that the result is not an array (for example due to filter hook) - if (!isArray(result)) throw new ForbiddenError(); - - // for create action, an empty array is expected - for other actions, the first item is expected to be available - const access = action === 'create' ? result.length === 0 : !!result[0]; - if (!access) throw new ForbiddenError(); - } - } -} diff --git a/api/src/services/collections.ts b/api/src/services/collections.ts index 3af7d1ac26..dc2ad3fd4c 100644 --- a/api/src/services/collections.ts +++ b/api/src/services/collections.ts @@ -14,6 +14,8 @@ import type { Helpers } from '../database/helpers/index.js'; import { getHelpers } from '../database/helpers/index.js'; import getDatabase, { getSchemaInspector } from '../database/index.js'; import emitter from '../emitter.js'; +import { fetchAllowedCollections } from '../permissions/modules/fetch-allowed-collections/fetch-allowed-collections.js'; +import { validateAccess } from '../permissions/modules/validate-access/validate-access.js'; import type { AbstractServiceOptions, ActionEventParams, Collection, MutationOptions } from '../types/index.js'; import { getSchema } from '../utils/get-schema.js'; import { shouldClearCache } from '../utils/should-clear-cache.js'; @@ -292,11 +294,16 @@ export class CollectionsService { {}, ); - let collectionsYouHavePermissionToRead: string[] = this.accountability - .permissions!.filter((permission) => { - return permission.action === 'read'; - }) - .map(({ collection }) => collection); + let collectionsYouHavePermissionToRead = await fetchAllowedCollections( + { + accountability: this.accountability, + action: 'read', + }, + { + knex: this.knex, + schema: this.schema, + }, + ); for (const collection of collectionsYouHavePermissionToRead) { const group = collectionsGroups[collection]; @@ -363,20 +370,22 @@ export class CollectionsService { * Read many collections by name */ async readMany(collectionKeys: string[]): Promise { - if (this.accountability && this.accountability.admin !== true) { - const permissions = this.accountability.permissions!.filter((permission) => { - return permission.action === 'read' && collectionKeys.includes(permission.collection); - }); - - if (collectionKeys.length !== permissions.length) { - const collectionsYouHavePermissionToRead = permissions.map(({ collection }) => collection); - - for (const collectionKey of collectionKeys) { - if (collectionsYouHavePermissionToRead.includes(collectionKey) === false) { - throw new ForbiddenError(); - } - } - } + if (this.accountability) { + await Promise.all( + collectionKeys.map((collection) => + validateAccess( + { + accountability: this.accountability!, + action: 'read', + collection, + }, + { + schema: this.schema, + knex: this.knex, + }, + ), + ), + ); } const collections = await this.readByQuery(); diff --git a/api/src/services/fields.ts b/api/src/services/fields.ts index bbbc5048c8..b38223be9b 100644 --- a/api/src/services/fields.ts +++ b/api/src/services/fields.ts @@ -1,9 +1,10 @@ import { - KNEX_TYPES, - REGEX_BETWEEN_PARENS, DEFAULT_NUMERIC_PRECISION, DEFAULT_NUMERIC_SCALE, + KNEX_TYPES, + REGEX_BETWEEN_PARENS, } from '@directus/constants'; +import { useEnv } from '@directus/env'; import { ForbiddenError, InvalidPayloadError } from '@directus/errors'; import type { Column, SchemaInspector } from '@directus/schema'; import { createInspector } from '@directus/schema'; @@ -19,6 +20,9 @@ import type { Helpers } from '../database/helpers/index.js'; import { getHelpers } from '../database/helpers/index.js'; import getDatabase, { getSchemaInspector } from '../database/index.js'; import emitter from '../emitter.js'; +import { fetchPermissions } from '../permissions/lib/fetch-permissions.js'; +import { fetchPolicies } from '../permissions/lib/fetch-policies.js'; +import { validateAccess } from '../permissions/modules/validate-access/validate-access.js'; import type { AbstractServiceOptions, ActionEventParams, MutationOptions } from '../types/index.js'; import getDefaultValue from '../utils/get-default-value.js'; import { getSystemFieldRowsWithAuthProviders } from '../utils/get-field-system-rows.js'; @@ -30,7 +34,6 @@ import { transaction } from '../utils/transaction.js'; import { ItemsService } from './items.js'; import { PayloadService } from './payload.js'; import { RelationsService } from './relations.js'; -import { useEnv } from '@directus/env'; const systemFieldRows = getSystemFieldRowsWithAuthProviders(); const env = useEnv(); @@ -63,12 +66,6 @@ export class FieldsService { this.schemaCache = localSchemaCache; } - private get hasReadAccess() { - return !!this.accountability?.permissions?.find((permission) => { - return permission.collection === 'directus_fields' && permission.action === 'read'; - }); - } - async columnInfo(collection?: string): Promise; async columnInfo(collection: string, field: string): Promise; async columnInfo(collection?: string, field?: string): Promise { @@ -102,8 +99,18 @@ export class FieldsService { async readAll(collection?: string): Promise { let fields: FieldMeta[]; - if (this.accountability && this.accountability.admin !== true && this.hasReadAccess === false) { - throw new ForbiddenError(); + if (this.accountability) { + await validateAccess( + { + accountability: this.accountability, + action: 'read', + collection: 'directus_fields', + }, + { + schema: this.schema, + knex: this.knex, + }, + ); } const nonAuthorizedItemsService = new ItemsService('directus_fields', { @@ -195,14 +202,34 @@ export class FieldsService { // Filter the result so we only return the fields you have read access to if (this.accountability && this.accountability.admin !== true) { - const permissions = this.accountability.permissions!.filter((permission) => { - return permission.action === 'read'; - }); + const policies = await fetchPolicies(this.accountability, { knex: this.knex, schema: this.schema }); - const allowedFieldsInCollection: Record = {}; + const permissions = await fetchPermissions( + collection + ? { + action: 'read', + policies, + collections: [collection], + accountability: this.accountability, + } + : { + action: 'read', + policies, + accountability: this.accountability, + }, + { knex: this.knex, schema: this.schema }, + ); + + const allowedFieldsInCollection: Record> = {}; permissions.forEach((permission) => { - allowedFieldsInCollection[permission.collection] = permission.fields ?? []; + if (!allowedFieldsInCollection[permission.collection]) { + allowedFieldsInCollection[permission.collection] = new Set(); + } + + for (const field of permission.fields ?? []) { + allowedFieldsInCollection[permission.collection]!.add(field); + } }); if (collection && collection in allowedFieldsInCollection === false) { @@ -212,8 +239,8 @@ export class FieldsService { return result.filter((field) => { if (field.collection in allowedFieldsInCollection === false) return false; const allowedFields = allowedFieldsInCollection[field.collection]!; - if (allowedFields[0] === '*') return true; - return allowedFields.includes(field.field); + if (allowedFields.has('*')) return true; + return allowedFields.has(field.field); }); } @@ -233,19 +260,38 @@ export class FieldsService { async readOne(collection: string, field: string): Promise> { if (this.accountability && this.accountability.admin !== true) { - if (this.hasReadAccess === false) { - throw new ForbiddenError(); + await validateAccess( + { + accountability: this.accountability, + action: 'read', + collection, + }, + { + schema: this.schema, + knex: this.knex, + }, + ); + + const policies = await fetchPolicies(this.accountability, { knex: this.knex, schema: this.schema }); + + const permissions = await fetchPermissions( + { action: 'read', policies, collections: [collection], accountability: this.accountability }, + { knex: this.knex, schema: this.schema }, + ); + + let hasAccess = false; + + for (const permission of permissions) { + if (permission.fields) { + if (permission.fields.includes('*') || permission.fields.includes(field)) { + hasAccess = true; + break; + } + } } - const permissions = this.accountability.permissions!.find((permission) => { - return permission.action === 'read' && permission.collection === collection; - }); - - if (!permissions || !permissions.fields) throw new ForbiddenError(); - - if (permissions.fields.includes('*') === false) { - const allowedFields = permissions.fields; - if (allowedFields.includes(field) === false) throw new ForbiddenError(); + if (!hasAccess) { + throw new ForbiddenError(); } } diff --git a/api/src/services/files.ts b/api/src/services/files.ts index a220c857be..9381f16e9b 100644 --- a/api/src/services/files.ts +++ b/api/src/services/files.ts @@ -15,6 +15,7 @@ import url from 'url'; import { RESUMABLE_UPLOADS } from '../constants.js'; import emitter from '../emitter.js'; import { useLogger } from '../logger/index.js'; +import { validateAccess } from '../permissions/modules/validate-access/validate-access.js'; import { getAxios } from '../request/index.js'; import { getStorage } from '../storage/index.js'; import type { AbstractServiceOptions, MutationOptions } from '../types/index.js'; @@ -200,12 +201,18 @@ export class FilesService extends ItemsService { * Import a single file from an external URL */ async importOne(importURL: string, body: Partial): Promise { - const fileCreatePermissions = this.accountability?.permissions?.find( - (permission) => permission.collection === 'directus_files' && permission.action === 'create', - ); - - if (this.accountability && this.accountability?.admin !== true && !fileCreatePermissions) { - throw new ForbiddenError(); + if (this.accountability) { + await validateAccess( + { + accountability: this.accountability, + action: 'create', + collection: 'directus_files', + }, + { + knex: this.knex, + schema: this.schema, + }, + ); } let fileResponse; diff --git a/api/src/services/graphql/index.ts b/api/src/services/graphql/index.ts index 7116e91f54..0b05242688 100644 --- a/api/src/services/graphql/index.ts +++ b/api/src/services/graphql/index.ts @@ -2,7 +2,16 @@ import { Action, FUNCTIONS } from '@directus/constants'; import { useEnv } from '@directus/env'; import { ErrorCode, ForbiddenError, InvalidPayloadError, isDirectusError, type DirectusError } from '@directus/errors'; import { isSystemCollection } from '@directus/system-data'; -import type { Accountability, Aggregate, Filter, Item, PrimaryKey, Query, SchemaOverview } from '@directus/types'; +import type { + Accountability, + Aggregate, + CollectionAccess, + Filter, + Item, + PrimaryKey, + Query, + SchemaOverview, +} from '@directus/types'; import { parseFilterFunctionPath, toBoolean } from '@directus/utils'; import argon2 from 'argon2'; import type { @@ -55,6 +64,9 @@ import { } from '../../constants.js'; import getDatabase from '../../database/index.js'; import { rateLimiter } from '../../middleware/rate-limiter-registration.js'; +import { fetchAllowedFieldMap } from '../../permissions/modules/fetch-allowed-field-map/fetch-allowed-field-map.js'; +import { fetchInconsistentFieldMap } from '../../permissions/modules/fetch-inconsistent-field-map/fetch-inconsistent-field-map.js'; +import { createDefaultAccountability } from '../../permissions/utils/create-default-accountability.js'; import type { AbstractServiceOptions, AuthenticationMode, GraphQLParams } from '../../types/index.js'; import { generateHash } from '../../utils/generate-hash.js'; import { getGraphQLType } from '../../utils/get-graphql-type.js'; @@ -93,6 +105,9 @@ import { GraphQLVoid } from './types/void.js'; import { addPathToValidationError } from './utils/add-path-to-validation-error.js'; import processError from './utils/process-error.js'; import { sanitizeGraphqlSchema } from './utils/sanitize-gql-schema.js'; +import { fetchAccountabilityCollectionAccess } from '../../permissions/modules/fetch-accountability-collection-access/fetch-accountability-collection-access.js'; +import { fetchAccountabilityPolicyGlobals } from '../../permissions/modules/fetch-accountability-policy-globals/fetch-accountability-policy-globals.js'; +import { RolesService } from '../roles.js'; const env = useEnv(); @@ -138,7 +153,7 @@ export class GraphQLService { operationName, contextValue, }: GraphQLParams): Promise { - const schema = this.getSchema(); + const schema = await this.getSchema(); const validationErrors = validate(schema, document, validationRules).map((validationError) => addPathToValidationError(validationError), @@ -178,10 +193,10 @@ export class GraphQLService { /** * Generate the GraphQL schema. Pulls from the schema information generated by the get-schema util. */ - getSchema(): GraphQLSchema; - getSchema(type: 'schema'): GraphQLSchema; - getSchema(type: 'sdl'): GraphQLSchema | string; - getSchema(type: 'schema' | 'sdl' = 'schema'): GraphQLSchema | string { + async getSchema(): Promise; + async getSchema(type: 'schema'): Promise; + async getSchema(type: 'sdl'): Promise; + async getSchema(type: 'schema' | 'sdl' = 'schema'): Promise { const key = `${this.scope}_${type}_${this.accountability?.role}_${this.accountability?.user}`; const cachedSchema = cache.get(key); @@ -193,25 +208,91 @@ export class GraphQLService { const schemaComposer = new SchemaComposer(); + let schema: { read: SchemaOverview; create: SchemaOverview; update: SchemaOverview; delete: SchemaOverview }; + const sanitizedSchema = sanitizeGraphqlSchema(this.schema); - const schema = { - read: - this.accountability?.admin === true - ? sanitizedSchema - : reduceSchema(sanitizedSchema, this.accountability?.permissions || null, ['read']), - create: - this.accountability?.admin === true - ? sanitizedSchema - : reduceSchema(sanitizedSchema, this.accountability?.permissions || null, ['create']), - update: - this.accountability?.admin === true - ? sanitizedSchema - : reduceSchema(sanitizedSchema, this.accountability?.permissions || null, ['update']), - delete: - this.accountability?.admin === true - ? sanitizedSchema - : reduceSchema(sanitizedSchema, this.accountability?.permissions || null, ['delete']), + if (!this.accountability || this.accountability.admin) { + schema = { + read: sanitizedSchema, + create: sanitizedSchema, + update: sanitizedSchema, + delete: sanitizedSchema, + }; + } else { + schema = { + read: reduceSchema( + sanitizedSchema, + await fetchAllowedFieldMap( + { + accountability: this.accountability, + action: 'read', + }, + { schema: this.schema, knex: this.knex }, + ), + ), + create: reduceSchema( + sanitizedSchema, + await fetchAllowedFieldMap( + { + accountability: this.accountability, + action: 'create', + }, + { schema: this.schema, knex: this.knex }, + ), + ), + update: reduceSchema( + sanitizedSchema, + await fetchAllowedFieldMap( + { + accountability: this.accountability, + action: 'update', + }, + { schema: this.schema, knex: this.knex }, + ), + ), + delete: reduceSchema( + sanitizedSchema, + await fetchAllowedFieldMap( + { + accountability: this.accountability, + action: 'delete', + }, + { schema: this.schema, knex: this.knex }, + ), + ), + }; + } + + const inconsistentFields = { + read: await fetchInconsistentFieldMap( + { + accountability: this.accountability, + action: 'read', + }, + { schema: this.schema, knex: this.knex }, + ), + create: await fetchInconsistentFieldMap( + { + accountability: this.accountability, + action: 'create', + }, + { schema: this.schema, knex: this.knex }, + ), + update: await fetchInconsistentFieldMap( + { + accountability: this.accountability, + action: 'update', + }, + { schema: this.schema, knex: this.knex }, + ), + delete: await fetchInconsistentFieldMap( + { + accountability: this.accountability, + action: 'delete', + }, + { schema: this.schema, knex: this.knex }, + ), }; const subscriptionEventType = schemaComposer.createEnumTC({ @@ -463,6 +544,8 @@ export class GraphQLService { field.special, ); + const fieldIsInconsistent = inconsistentFields[action][collection.collection]?.includes(field.field); + // GraphQL doesn't differentiate between not-null and has-to-be-submitted. We // can't non-null in update, as that would require every not-nullable field to be // submitted on updates @@ -470,12 +553,13 @@ export class GraphQLService { field.nullable === false && !field.defaultValue && !GENERATE_SPECIAL.some((flag) => field.special.includes(flag)) && + fieldIsInconsistent === false && action !== 'update' ) { type = new GraphQLNonNull(type); } - if (collection.primary === field.field) { + if (collection.primary === field.field && fieldIsInconsistent === false) { // permissions IDs need to be nullable https://github.com/directus/directus/issues/20509 if (collection.collection === 'directus_permissions') { type = GraphQLID; @@ -2181,7 +2265,7 @@ export class GraphQLService { scope: args['scope'] ?? 'items', }); - return service.getSchema('sdl'); + return await service.getSchema('sdl'); }, }, server_ping: { @@ -2241,7 +2325,7 @@ export class GraphQLService { otp: GraphQLString, }, resolve: async (_, args, { req, res }) => { - const accountability: Accountability = { role: null }; + const accountability: Accountability = createDefaultAccountability(); if (req?.ip) accountability.ip = req.ip; @@ -2293,7 +2377,7 @@ export class GraphQLService { mode: AuthMode, }, resolve: async (_, args, { req, res }) => { - const accountability: Accountability = { role: null }; + const accountability: Accountability = createDefaultAccountability(); if (req?.ip) accountability.ip = req.ip; @@ -2360,7 +2444,7 @@ export class GraphQLService { mode: AuthMode, }, resolve: async (_, args, { req, res }) => { - const accountability: Accountability = { role: null }; + const accountability: Accountability = createDefaultAccountability(); if (req?.ip) accountability.ip = req.ip; @@ -2417,7 +2501,7 @@ export class GraphQLService { reset_url: GraphQLString, }, resolve: async (_, args, { req }) => { - const accountability: Accountability = { role: null }; + const accountability: Accountability = createDefaultAccountability(); if (req?.ip) accountability.ip = req.ip; @@ -2446,7 +2530,7 @@ export class GraphQLService { password: new GraphQLNonNull(GraphQLString), }, resolve: async (_, args, { req }) => { - const accountability: Accountability = { role: null }; + const accountability: Accountability = createDefaultAccountability(); if (req?.ip) accountability.ip = req.ip; @@ -3160,6 +3244,81 @@ export class GraphQLService { }); } + if ('directus_permissions' in schema.read.collections) { + schemaComposer.Query.addFields({ + permissions_me: { + type: schemaComposer.createScalarTC({ + name: 'permissions_me_type', + parseValue: (value: unknown) => value as CollectionAccess, + serialize: (value) => value, + }), + resolve: async (_, _args, __, _info) => { + if (!this.accountability?.user && !this.accountability?.role) return null; + + const result = await fetchAccountabilityCollectionAccess(this.accountability, { + schema: this.schema, + knex: getDatabase(), + }); + + return result; + }, + }, + }); + } + + if ('directus_roles' in schema.read.collections) { + schemaComposer.Query.addFields({ + roles_me: { + type: ReadCollectionTypes['directus_roles']!.List, + resolve: async (_, args, __, info) => { + if (!this.accountability?.user && !this.accountability?.role) return null; + + const service = new RolesService({ + accountability: this.accountability, + schema: this.schema, + }); + + const selections = this.replaceFragmentsInSelections( + info.fieldNodes[0]?.selectionSet?.selections, + info.fragments, + ); + + const query = this.getQuery(args, selections || [], info.variableValues); + query.limit = -1; + + const roles = await service.readMany(this.accountability.roles, query); + + return roles; + }, + }, + }); + } + + if ('directus_policies' in schema.read.collections) { + schemaComposer.Query.addFields({ + policies_me_globals: { + type: schemaComposer.createObjectTC({ + name: 'policy_me_globals_type', + fields: { + enforce_tfa: 'Boolean', + app_access: 'Boolean', + admin_access: 'Boolean', + }, + }), + resolve: async (_, _args, __, _info) => { + if (!this.accountability?.user && !this.accountability?.role) return null; + + const result = await fetchAccountabilityPolicyGlobals(this.accountability, { + schema: this.schema, + knex: getDatabase(), + }); + + return result; + }, + }, + }); + } + if ('directus_users' in schema.update.collections && this.accountability?.user) { schemaComposer.Mutation.addFields({ update_users_me: { diff --git a/api/src/services/graphql/subscription.ts b/api/src/services/graphql/subscription.ts index 643bc65823..70f78586ee 100644 --- a/api/src/services/graphql/subscription.ts +++ b/api/src/services/graphql/subscription.ts @@ -3,7 +3,6 @@ import { useBus } from '../../bus/index.js'; import type { GraphQLService } from './index.js'; import { getSchema } from '../../utils/get-schema.js'; import type { GraphQLResolveInfo, SelectionNode } from 'graphql'; -import { refreshAccountability } from '../../websocket/authenticate.js'; import { getPayload } from '../../websocket/utils/items.js'; import type { Subscription } from '../../websocket/types.js'; import type { WebSocketEvent } from '../../websocket/messages.js'; @@ -30,7 +29,6 @@ export function createSubscriptionGenerator(self: GraphQLService, event: string) continue; // skip filtered events } - const accountability = await refreshAccountability(self.accountability); const schema = await getSchema(); const subscription: Omit = { @@ -49,7 +47,7 @@ export function createSubscriptionGenerator(self: GraphQLService, event: string) if (eventData['action'] === 'create') { try { subscription.item = eventData['key']; - const result = await getPayload(subscription, accountability, schema, eventData); + const result = await getPayload(subscription, self.accountability, schema, eventData); yield { [event]: { @@ -67,7 +65,7 @@ export function createSubscriptionGenerator(self: GraphQLService, event: string) for (const key of eventData['keys']) { try { subscription.item = key; - const result = await getPayload(subscription, accountability, schema, eventData); + const result = await getPayload(subscription, self.accountability, schema, eventData); yield { [event]: { diff --git a/api/src/services/import-export.test.ts b/api/src/services/import-export.test.ts new file mode 100644 index 0000000000..d1915629b1 --- /dev/null +++ b/api/src/services/import-export.test.ts @@ -0,0 +1,282 @@ +import { expect, test } from 'vitest'; +import { getHeadingsForCsvExport } from './import-export.js'; +import type { FieldNode, FunctionFieldNode, NestedCollectionNode } from '../types/ast.js'; + +test('Get the headings for CSV export from the field node tree', () => { + /** + * this is an example result from parseFields + * It includes the following: + * - a field node + * - a m2o node with a nested m2o node + * - a o2m node + * - a o2m node which is the parsing result of a m2a relationship + */ + + const parsedFields: (NestedCollectionNode | FieldNode | FunctionFieldNode)[] = [ + { + type: 'field', + name: 'id', + fieldKey: 'id', + whenCase: [], + }, + { + type: 'field', + name: 'title', + fieldKey: 'title', + whenCase: [], + }, + { + type: 'm2o', + name: 'authors', + fieldKey: 'author', + parentKey: 'id', + relatedKey: 'id', + relation: { + collection: 'articles', + field: 'author', + related_collection: 'authors', + schema: { + constraint_name: 'articles_author_foreign', + table: 'articles', + column: 'author', + foreign_key_schema: 'public', + foreign_key_table: 'authors', + foreign_key_column: 'id', + on_update: 'NO ACTION', + on_delete: 'SET NULL', + }, + meta: { + id: 1, + many_collection: 'articles', + many_field: 'author', + one_collection: 'authors', + one_field: null, + one_collection_field: null, + one_allowed_collections: null, + junction_field: null, + sort_field: null, + one_deselect_action: 'nullify', + }, + }, + query: {}, + children: [ + { + type: 'field', + name: 'id', + fieldKey: 'id', + whenCase: [], + }, + { + type: 'field', + name: 'first_name', + fieldKey: 'first_name', + whenCase: [], + }, + { + type: 'field', + name: 'last_name', + fieldKey: 'last_name', + whenCase: [], + }, + { + type: 'm2o', + name: 'addresses', + fieldKey: 'address', + parentKey: 'id', + relatedKey: 'id', + relation: { + collection: 'addresses', + field: 'address', + related_collection: 'authors', + schema: { + constraint_name: 'articles_author_foreign', + table: 'articles', + column: 'author', + foreign_key_schema: 'public', + foreign_key_table: 'authors', + foreign_key_column: 'id', + on_update: 'NO ACTION', + on_delete: 'SET NULL', + }, + meta: { + id: 1, + many_collection: 'articles', + many_field: 'author', + one_collection: 'authors', + one_field: null, + one_collection_field: null, + one_allowed_collections: null, + junction_field: null, + sort_field: null, + one_deselect_action: 'nullify', + }, + }, + query: {}, + children: [ + { + type: 'field', + name: 'id', + fieldKey: 'id', + whenCase: [], + }, + { + type: 'field', + name: 'street', + fieldKey: 'street', + whenCase: [], + }, + { + type: 'field', + name: 'city', + fieldKey: 'city', + whenCase: [], + }, + ], + cases: [], + whenCase: [], + }, + ], + cases: [], + whenCase: [], + }, + { + type: 'o2m', + name: 'headlines', + fieldKey: 'headings', + parentKey: 'id', + relatedKey: 'id', + relation: { + collection: 'headlines', + field: 'article', + related_collection: 'articles', + schema: { + constraint_name: 'headlines_article_foreign', + table: 'headlines', + column: 'article', + foreign_key_schema: 'public', + foreign_key_table: 'articles', + foreign_key_column: 'id', + on_update: 'NO ACTION', + on_delete: 'SET NULL', + }, + meta: { + id: 3, + many_collection: 'headlines', + many_field: 'article', + one_collection: 'articles', + one_field: 'headings', + one_collection_field: null, + one_allowed_collections: null, + junction_field: null, + sort_field: null, + one_deselect_action: 'nullify', + }, + }, + query: { + sort: ['id'], + }, + children: [ + { + type: 'field', + name: 'id', + fieldKey: 'id', + whenCase: [], + }, + { + type: 'field', + name: 'title', + fieldKey: 'title', + whenCase: [], + }, + ], + cases: [], + whenCase: [], + }, + { + type: 'o2m', + name: 'articles_m2a', + fieldKey: 'some-m2a', + parentKey: 'id', + relatedKey: 'id', + relation: { + collection: 'articles_m2a', + field: 'articles_id', + related_collection: 'articles', + schema: { + constraint_name: 'articles_m2a_articles_id_foreign', + table: 'articles_m2a', + column: 'articles_id', + foreign_key_schema: 'public', + foreign_key_table: 'articles', + foreign_key_column: 'id', + on_update: 'NO ACTION', + on_delete: 'SET NULL', + }, + meta: { + id: 5, + many_collection: 'articles_m2a', + many_field: 'articles_id', + one_collection: 'articles', + one_field: 'some-m2a', + one_collection_field: null, + one_allowed_collections: null, + junction_field: 'item', + sort_field: null, + one_deselect_action: 'nullify', + }, + }, + query: { + sort: ['id'], + }, + children: [ + { + type: 'field', + name: 'id', + fieldKey: 'id', + whenCase: [], + }, + { + type: 'field', + name: 'articles_id', + fieldKey: 'articles_id', + whenCase: [], + }, + { + type: 'field', + name: 'item', + fieldKey: 'item', + whenCase: [], + }, + { + type: 'field', + name: 'collection', + fieldKey: 'collection', + whenCase: [], + }, + ], + cases: [], + whenCase: [], + }, + ]; + + const res = getHeadingsForCsvExport(parsedFields); + + const expectedHeadlinesForCsvExport = [ + 'id', + 'title', + + // headings for m2o node with another nested m2o node + 'author.id', + 'author.first_name', + 'author.last_name', + 'author.address.id', + 'author.address.street', + 'author.address.city', + + // headings for the o2m nodes + 'headings', + 'some-m2a', + ]; + + expect(res).toEqual(expectedHeadlinesForCsvExport); +}); diff --git a/api/src/services/import-export.ts b/api/src/services/import-export.ts index 2d46c4c9e5..6258d5087f 100644 --- a/api/src/services/import-export.ts +++ b/api/src/services/import-export.ts @@ -23,7 +23,14 @@ import StreamArray from 'stream-json/streamers/StreamArray.js'; import getDatabase from '../database/index.js'; import emitter from '../emitter.js'; import { useLogger } from '../logger/index.js'; -import type { AbstractServiceOptions, ActionEventParams } from '../types/index.js'; +import { validateAccess } from '../permissions/modules/validate-access/validate-access.js'; +import type { + AbstractServiceOptions, + ActionEventParams, + FunctionFieldNode, + FieldNode, + NestedCollectionNode, +} from '../types/index.js'; import { getDateFormatted } from '../utils/get-date-formatted.js'; import { getService } from '../utils/get-service.js'; import { transaction } from '../utils/transaction.js'; @@ -32,6 +39,7 @@ import { userName } from '../utils/user-name.js'; import { FilesService } from './files.js'; import { NotificationsService } from './notifications.js'; import { UsersService } from './users.js'; +import { parseFields } from '../database/get-ast-from-query/lib/parse-fields.js'; const env = useEnv(); const logger = useLogger(); @@ -52,16 +60,30 @@ export class ImportService { async import(collection: string, mimetype: string, stream: Readable): Promise { if (this.accountability?.admin !== true && isSystemCollection(collection)) throw new ForbiddenError(); - const createPermissions = this.accountability?.permissions?.find( - (permission) => permission.collection === collection && permission.action === 'create', - ); + if (this.accountability) { + await validateAccess( + { + accountability: this.accountability, + action: 'create', + collection, + }, + { + schema: this.schema, + knex: this.knex, + }, + ); - const updatePermissions = this.accountability?.permissions?.find( - (permission) => permission.collection === collection && permission.action === 'update', - ); - - if (this.accountability?.admin !== true && (!createPermissions || !updatePermissions)) { - throw new ForbiddenError(); + await validateAccess( + { + accountability: this.accountability, + action: 'update', + collection, + }, + { + schema: this.schema, + knex: this.knex, + }, + ); } switch (mimetype) { @@ -323,11 +345,35 @@ export class ExportService { readCount += result.length; if (result.length) { + let csvHeadings = null; + + if (format === 'csv') { + if (!query.fields) query.fields = ['*']; + + // to ensure the all headings are included in the CSV file, all possible fields need to be determined. + + const parsedFields = await parseFields( + { + parentCollection: collection, + fields: query.fields, + query: query, + accountability: this.accountability, + }, + { + schema: this.schema, + knex: database, + }, + ); + + csvHeadings = getHeadingsForCsvExport(parsedFields); + } + await appendFile( tmpFile.path, this.transform(result, format, { includeHeader: batch === 0, includeFooter: batch + 1 === batchesRequired, + fields: csvHeadings, }), ); } @@ -415,6 +461,7 @@ Your export of ${collection} is ready. Click here to view. options?: { includeHeader?: boolean; includeFooter?: boolean; + fields?: string[] | null; }, ): string { if (format === 'json') { @@ -448,12 +495,14 @@ Your export of ${collection} is ready. Click here to view. if (format === 'csv') { if (input.length === 0) return ''; - const parser = new CSVParser({ - transforms: [CSVTransforms.flatten({ separator: '.' })], - header: options?.includeHeader !== false, - }); + const transforms = [CSVTransforms.flatten({ separator: '.' })]; + const header = options?.includeHeader !== false; - let string = parser.parse(input); + const transformOptions = options?.fields + ? { transforms, header, fields: options?.fields } + : { transforms, header }; + + let string = new CSVParser(transformOptions).parse(input); if (options?.includeHeader === false) { string = '\n' + string; @@ -469,3 +518,35 @@ Your export of ${collection} is ready. Click here to view. throw new ServiceUnavailableError({ service: 'export', reason: `Illegal export type used: "${format}"` }); } } +/* + * Recursive function to traverse the field nodes, to determine the headings for the CSV export file. + * + * Relational nodes which target a single item get expanded, which means that their nested fields get their own column in the csv file. + * For relational nodes which target a multiple items, the nested field names are not going to be expanded. + * Instead they will be stored as a single value/cell of the CSV file. + */ +export function getHeadingsForCsvExport( + nodes: (NestedCollectionNode | FieldNode | FunctionFieldNode)[] | undefined, + prefix: string = '', +) { + let fieldNames: string[] = []; + + if (!nodes) return fieldNames; + + nodes.forEach((node) => { + switch (node.type) { + case 'field': + case 'functionField': + case 'o2m': + case 'a2o': + fieldNames.push(prefix ? `${prefix}.${node.fieldKey}` : node.fieldKey); + break; + case 'm2o': + fieldNames = fieldNames.concat( + getHeadingsForCsvExport(node.children, prefix ? `${prefix}.${node.fieldKey}` : node.fieldKey), + ); + } + }); + + return fieldNames; +} diff --git a/api/src/services/index.ts b/api/src/services/index.ts index a6fcf28649..371f01f8d8 100644 --- a/api/src/services/index.ts +++ b/api/src/services/index.ts @@ -1,7 +1,7 @@ +export * from './access.js'; export * from './activity.js'; export * from './assets.js'; export * from './authentication.js'; -export * from './authorization.js'; export * from './collections.js'; export * from './dashboards.js'; export * from './extensions.js'; @@ -18,7 +18,8 @@ export * from './notifications.js'; export * from './operations.js'; export * from './panels.js'; export * from './payload.js'; -export * from './permissions/index.js'; +export * from './permissions.js'; +export * from './policies.js'; export * from './presets.js'; export * from './relations.js'; export * from './revisions.js'; diff --git a/api/src/services/items.test.ts b/api/src/services/items.test.ts new file mode 100644 index 0000000000..35e072daa5 --- /dev/null +++ b/api/src/services/items.test.ts @@ -0,0 +1,116 @@ +import type { SchemaOverview } from '@directus/types'; +import knex, { type Knex } from 'knex'; +import { MockClient, Tracker, createTracker } from 'knex-mock-client'; +import { afterEach, beforeAll, beforeEach, describe, expect, it, vi, type MockedFunction } from 'vitest'; +import { UserIntegrityCheckFlag, validateUserCountIntegrity } from '../utils/validate-user-count-integrity.js'; +import { ItemsService } from './index.js'; + +vi.mock('../../src/database/index', () => ({ + default: vi.fn(), + getDatabaseClient: vi.fn().mockReturnValue('postgres'), +})); + +vi.mock('../utils/validate-user-count-integrity.js'); + +const testSchema = { + collections: { + test: { + collection: 'test', + primary: 'id', + singleton: false, + sortField: null, + note: null, + accountability: null, + fields: { + id: { + field: 'id', + defaultValue: null, + nullable: false, + generated: true, + type: 'integer', + dbType: 'integer', + precision: null, + scale: null, + special: [], + note: null, + validation: null, + alias: false, + }, + }, + }, + }, + relations: [], +} as SchemaOverview; + +describe('Integration Tests', () => { + let db: MockedFunction; + let tracker: Tracker; + + beforeAll(async () => { + db = vi.mocked(knex.default({ client: MockClient })); + tracker = createTracker(db); + }); + + beforeEach(() => { + tracker.on.any('test').response({}); + }); + + afterEach(() => { + tracker.reset(); + }); + + describe('Services / Items', () => { + let service: ItemsService; + + beforeEach(() => { + service = new ItemsService('test', { + knex: db, + schema: testSchema, + }); + }); + + afterEach(() => { + vi.clearAllMocks(); + }); + + describe('createOne', () => { + it('should validate user count if requested', async () => { + await service.createOne({}, { userIntegrityCheckFlags: UserIntegrityCheckFlag.All }); + + expect(validateUserCountIntegrity).toHaveBeenCalled(); + }); + }); + + describe('createMany', () => { + it('should validate user count if requested', async () => { + await service.createMany([{}], { userIntegrityCheckFlags: UserIntegrityCheckFlag.All }); + + expect(validateUserCountIntegrity).toHaveBeenCalled(); + }); + }); + + describe('updateBatch', () => { + it('should validate user count if requested', async () => { + await service.updateBatch([{ id: 1 }], { userIntegrityCheckFlags: UserIntegrityCheckFlag.All }); + + expect(validateUserCountIntegrity).toHaveBeenCalled(); + }); + }); + + describe('updateMany', () => { + it('should validate user count if requested', async () => { + await service.updateMany([1], {}, { userIntegrityCheckFlags: UserIntegrityCheckFlag.All }); + + expect(validateUserCountIntegrity).toHaveBeenCalled(); + }); + }); + + describe('deleteMany', () => { + it('should validate user count if requested', async () => { + await service.deleteMany([1], { userIntegrityCheckFlags: UserIntegrityCheckFlag.All }); + + expect(validateUserCountIntegrity).toHaveBeenCalled(); + }); + }); + }); +}); diff --git a/api/src/services/items.ts b/api/src/services/items.ts index 31d2da46ad..7b9ae3830c 100644 --- a/api/src/services/items.ts +++ b/api/src/services/items.ts @@ -15,16 +15,19 @@ import type { Knex } from 'knex'; import { assign, clone, cloneDeep, omit, pick, without } from 'lodash-es'; import { getCache } from '../cache.js'; import { translateDatabaseError } from '../database/errors/translate.js'; +import { getAstFromQuery } from '../database/get-ast-from-query/get-ast-from-query.js'; import { getHelpers } from '../database/helpers/index.js'; import getDatabase from '../database/index.js'; -import runAST from '../database/run-ast.js'; +import { runAst } from '../database/run-ast/run-ast.js'; import emitter from '../emitter.js'; +import { processAst } from '../permissions/modules/process-ast/process-ast.js'; +import { processPayload } from '../permissions/modules/process-payload/process-payload.js'; +import { validateAccess } from '../permissions/modules/validate-access/validate-access.js'; import type { AbstractService, AbstractServiceOptions, ActionEventParams, MutationOptions } from '../types/index.js'; -import getASTFromQuery from '../utils/get-ast-from-query.js'; import { shouldClearCache } from '../utils/should-clear-cache.js'; import { transaction } from '../utils/transaction.js'; import { validateKeys } from '../utils/validate-keys.js'; -import { AuthorizationService } from './authorization.js'; +import { UserIntegrityCheckFlag, validateUserCountIntegrity } from '../utils/validate-user-count-integrity.js'; import { PayloadService } from './payload.js'; const env = useEnv(); @@ -142,18 +145,14 @@ export class ItemsService { - // We're creating new services instances so they can use the transaction as their Knex interface - const payloadService = new PayloadService(this.collection, { + const serviceOptions: AbstractServiceOptions = { accountability: this.accountability, knex: trx, schema: this.schema, - }); + }; - const authorizationService = new AuthorizationService({ - accountability: this.accountability, - knex: trx, - schema: this.schema, - }); + // We're creating new services instances so they can use the transaction as their Knex interface + const payloadService = new PayloadService(this.collection, serviceOptions); // Run all hooks that are attached to this event so the end user has the chance to augment the // item that is about to be saved @@ -176,7 +175,18 @@ export class ItemsService { const service = this.fork({ knex }); + let userIntegrityCheckFlags = opts.userIntegrityCheckFlags ?? UserIntegrityCheckFlag.None; + const primaryKeys: PrimaryKey[] = []; const nestedActionEvents: ActionEventParams[] = []; @@ -398,6 +426,7 @@ export class ItemsService (userIntegrityCheckFlags |= flags), bypassEmitAction: (params) => nestedActionEvents.push(params), mutationTracker: opts.mutationTracker, bypassAutoIncrementSequenceReset, @@ -406,6 +435,14 @@ export class ItemsService { const service = this.fork({ knex }); + let userIntegrityCheckFlags = opts.userIntegrityCheckFlags ?? UserIntegrityCheckFlag.None; + for (const item of data) { const primaryKey = item[primaryKeyField]; if (!primaryKey) throw new InvalidPayloadError({ reason: `Item in update misses primary key` }); - const combinedOpts = Object.assign({ autoPurgeCache: false }, opts); + const combinedOpts: MutationOptions = { + autoPurgeCache: false, + ...opts, + onRequireUserIntegrityCheck: (flags) => (userIntegrityCheckFlags |= flags), + }; + keys.push(await service.updateOne(primaryKey, omit(item, primaryKeyField), combinedOpts)); } + + if (userIntegrityCheckFlags) { + if (opts.onRequireUserIntegrityCheck) { + opts.onRequireUserIntegrityCheck(userIntegrityCheckFlags); + } else { + await validateUserCountIntegrity({ flags: userIntegrityCheckFlags, knex }); + } + } }); } finally { if (shouldClearCache(this.cache, opts, this.collection)) { @@ -645,12 +696,6 @@ export class ItemsService = cloneDeep(data); const nestedActionEvents: ActionEventParams[] = []; - const authorizationService = new AuthorizationService({ - accountability: this.accountability, - knex: this.knex, - schema: this.schema, - }); - // Run all hooks that are attached to this event so the end user has the chance to augment the // item that is about to be saved const payloadAfterHooks = @@ -676,11 +721,33 @@ export class ItemsService { await trx(this.collection).whereIn(primaryKeyField, keys).delete(); + if (opts.userIntegrityCheckFlags) { + if (opts.onRequireUserIntegrityCheck) { + opts.onRequireUserIntegrityCheck(opts.userIntegrityCheckFlags); + } else { + await validateUserCountIntegrity({ flags: opts.userIntegrityCheckFlags, knex: trx }); + } + } + if (this.accountability && this.schema.collections[this.collection]!.accountability !== null) { const activityService = new ActivityService({ knex: trx, diff --git a/api/src/services/meta.ts b/api/src/services/meta.ts index 6c1a955e58..55936d1e55 100644 --- a/api/src/services/meta.ts +++ b/api/src/services/meta.ts @@ -1,7 +1,10 @@ -import type { Accountability, Query, SchemaOverview } from '@directus/types'; +import type { Accountability, Filter, Query, SchemaOverview } from '@directus/types'; import type { Knex } from 'knex'; import getDatabase from '../database/index.js'; -import { ForbiddenError } from '@directus/errors'; +import { fetchPermissions } from '../permissions/lib/fetch-permissions.js'; +import { fetchPolicies } from '../permissions/lib/fetch-policies.js'; +import { dedupeAccess } from '../permissions/modules/process-ast/utils/dedupe-access.js'; +import { validateAccess } from '../permissions/modules/validate-access/validate-access.js'; import type { AbstractServiceOptions } from '../types/index.js'; import { applyFilter, applySearch } from '../utils/apply-query.js'; @@ -36,21 +39,54 @@ export class MetaService { } async totalCount(collection: string): Promise { - const dbQuery = this.knex(collection).count('*', { as: 'count' }).first(); + const dbQuery = this.knex(collection); - if (this.accountability?.admin !== true) { - const permissionsRecord = this.accountability?.permissions?.find((permission) => { - return permission.action === 'read' && permission.collection === collection; - }); + let hasJoins = false; - if (!permissionsRecord) throw new ForbiddenError(); + if (this.accountability && this.accountability.admin === false) { + const context = { knex: this.knex, schema: this.schema }; - const permissions = permissionsRecord.permissions ?? {}; + await validateAccess( + { + accountability: this.accountability, + action: 'read', + collection, + }, + context, + ); - applyFilter(this.knex, this.schema, dbQuery, permissions, collection, {}); + const policies = await fetchPolicies(this.accountability, context); + + const permissions = await fetchPermissions( + { + action: 'read', + policies, + accountability: this.accountability, + ...(collection ? { collections: [collection] } : {}), + }, + context, + ); + + const rules = dedupeAccess(permissions); + const cases = rules.map(({ rule }) => rule); + + const filter = { + _or: cases, + }; + + const result = applyFilter(this.knex, this.schema, dbQuery, filter, collection, {}, cases); + hasJoins = result.hasJoins; } - const result = await dbQuery; + if (hasJoins) { + const primaryKeyName = this.schema.collections[collection]!.primary; + + dbQuery.countDistinct({ count: [`${collection}.${primaryKeyName}`] }); + } else { + dbQuery.count('*', { as: 'count' }); + } + + const result = await dbQuery.first(); return Number(result?.count ?? 0); } @@ -60,25 +96,48 @@ export class MetaService { let filter = query.filter || {}; let hasJoins = false; + let cases: Filter[] = []; - if (this.accountability?.admin !== true) { - const permissionsRecord = this.accountability?.permissions?.find((permission) => { - return permission.action === 'read' && permission.collection === collection; - }); + if (this.accountability && this.accountability.admin === false) { + const context = { knex: this.knex, schema: this.schema }; - if (!permissionsRecord) throw new ForbiddenError(); + await validateAccess( + { + accountability: this.accountability, + action: 'read', + collection, + }, + context, + ); - const permissions = permissionsRecord.permissions ?? {}; + const policies = await fetchPolicies(this.accountability, context); + + const permissions = await fetchPermissions( + { + action: 'read', + policies, + accountability: this.accountability, + ...(collection ? { collections: [collection] } : {}), + }, + context, + ); + + const rules = dedupeAccess(permissions); + cases = rules.map(({ rule }) => rule); + + const permissionsFilter = { + _or: cases, + }; if (Object.keys(filter).length > 0) { - filter = { _and: [permissions, filter] }; + filter = { _and: [permissionsFilter, filter] }; } else { - filter = permissions; + filter = permissionsFilter; } } if (Object.keys(filter).length > 0) { - ({ hasJoins } = applyFilter(this.knex, this.schema, dbQuery, filter, collection, {})); + ({ hasJoins } = applyFilter(this.knex, this.schema, dbQuery, filter, collection, {}, cases)); } if (query.search) { @@ -93,8 +152,8 @@ export class MetaService { dbQuery.count('*', { as: 'count' }); } - const records = await dbQuery; + const result = await dbQuery.first(); - return Number(records[0]!['count']); + return Number(result?.count ?? 0); } } diff --git a/api/src/services/notifications.ts b/api/src/services/notifications.ts index c11aa028de..b7b0532b8b 100644 --- a/api/src/services/notifications.ts +++ b/api/src/services/notifications.ts @@ -1,6 +1,8 @@ import { useEnv } from '@directus/env'; import type { Notification, PrimaryKey } from '@directus/types'; import { useLogger } from '../logger/index.js'; +import { fetchRolesTree } from '../permissions/lib/fetch-roles-tree.js'; +import { fetchGlobalAccess } from '../permissions/modules/fetch-global-access/fetch-global-access.js'; import type { AbstractServiceOptions, MutationOptions } from '../types/index.js'; import { md } from '../utils/md.js'; import { Url } from '../utils/url.js'; @@ -32,21 +34,31 @@ export class NotificationsService extends ItemsService { async sendEmail(data: Partial) { if (data.recipient) { const user = await this.usersService.readOne(data.recipient, { - fields: ['id', 'email', 'email_notifications', 'role.app_access'], + fields: ['id', 'email', 'email_notifications', 'role'], }); - const manageUserAccountUrl = new Url(env['PUBLIC_URL'] as string) - .addPath('admin', 'users', user['id']) - .toString(); - - const html = data.message ? md(data.message) : ''; - if (user['email'] && user['email_notifications'] === true) { + const manageUserAccountUrl = new Url(env['PUBLIC_URL'] as string) + .addPath('admin', 'users', user['id']) + .toString(); + + const html = data.message ? md(data.message) : ''; + const roles = await fetchRolesTree(user['role'], this.knex); + + const { app: app_access } = await fetchGlobalAccess( + { + user: user['id'], + roles, + ip: null, + }, + this.knex, + ); + this.mailService .send({ template: { name: 'base', - data: user['role']?.app_access ? { url: manageUserAccountUrl, html } : { html }, + data: app_access ? { url: manageUserAccountUrl, html } : { html }, }, to: user['email'], subject: data.subject, diff --git a/api/src/services/payload.ts b/api/src/services/payload.ts index 3ae0ad6ec6..331ec7eadf 100644 --- a/api/src/services/payload.ts +++ b/api/src/services/payload.ts @@ -21,6 +21,7 @@ import { getHelpers } from '../database/helpers/index.js'; import getDatabase from '../database/index.js'; import type { AbstractServiceOptions, ActionEventParams, MutationOptions } from '../types/index.js'; import { generateHash } from '../utils/generate-hash.js'; +import { UserIntegrityCheckFlag } from '../utils/validate-user-count-integrity.js'; type Action = 'create' | 'read' | 'update'; @@ -35,6 +36,12 @@ type Transformers = { }) => Promise; }; +type PayloadServiceProcessRelationResult = { + revisions: PrimaryKey[]; + nestedActionEvents: ActionEventParams[]; + userIntegrityCheckFlags: UserIntegrityCheckFlag; +}; + /** * Process a given payload for a collection to ensure the special fields (hash, uuid, date etc) are * handled correctly. @@ -399,12 +406,17 @@ export class PayloadService { async processA2O( data: Partial, opts?: MutationOptions, - ): Promise<{ payload: Partial; revisions: PrimaryKey[]; nestedActionEvents: ActionEventParams[] }> { + ): Promise< + PayloadServiceProcessRelationResult & { + payload: Partial; + } + > { const relations = this.schema.relations.filter((relation) => { return relation.collection === this.collection; }); const revisions: PrimaryKey[] = []; + let userIntegrityCheckFlags = UserIntegrityCheckFlag.None; const nestedActionEvents: ActionEventParams[] = []; @@ -466,6 +478,7 @@ export class PayloadService { if (Object.keys(fieldsToUpdate).length > 0) { await service.updateOne(relatedPrimaryKey, relatedRecord, { onRevisionCreate: (pk) => revisions.push(pk), + onRequireUserIntegrityCheck: (flags) => (userIntegrityCheckFlags |= flags), bypassEmitAction: (params) => opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params), emitEvents: opts?.emitEvents, @@ -475,6 +488,7 @@ export class PayloadService { } else { relatedPrimaryKey = await service.createOne(relatedRecord, { onRevisionCreate: (pk) => revisions.push(pk), + onRequireUserIntegrityCheck: (flags) => (userIntegrityCheckFlags |= flags), bypassEmitAction: (params) => opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params), emitEvents: opts?.emitEvents, @@ -486,7 +500,7 @@ export class PayloadService { payload[relation.field] = relatedPrimaryKey; } - return { payload, revisions, nestedActionEvents }; + return { payload, revisions, nestedActionEvents, userIntegrityCheckFlags }; } /** @@ -495,11 +509,16 @@ export class PayloadService { async processM2O( data: Partial, opts?: MutationOptions, - ): Promise<{ payload: Partial; revisions: PrimaryKey[]; nestedActionEvents: ActionEventParams[] }> { + ): Promise< + PayloadServiceProcessRelationResult & { + payload: Partial; + } + > { const payload = cloneDeep(data); // All the revisions saved on this level const revisions: PrimaryKey[] = []; + let userIntegrityCheckFlags = UserIntegrityCheckFlag.None; const nestedActionEvents: ActionEventParams[] = []; @@ -548,6 +567,7 @@ export class PayloadService { if (Object.keys(fieldsToUpdate).length > 0) { await service.updateOne(relatedPrimaryKey, relatedRecord, { onRevisionCreate: (pk) => revisions.push(pk), + onRequireUserIntegrityCheck: (flags) => (userIntegrityCheckFlags |= flags), bypassEmitAction: (params) => opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params), emitEvents: opts?.emitEvents, @@ -557,6 +577,7 @@ export class PayloadService { } else { relatedPrimaryKey = await service.createOne(relatedRecord, { onRevisionCreate: (pk) => revisions.push(pk), + onRequireUserIntegrityCheck: (flags) => (userIntegrityCheckFlags |= flags), bypassEmitAction: (params) => opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params), emitEvents: opts?.emitEvents, @@ -568,7 +589,7 @@ export class PayloadService { payload[relation.field] = relatedPrimaryKey; } - return { payload, revisions, nestedActionEvents }; + return { payload, revisions, nestedActionEvents, userIntegrityCheckFlags }; } /** @@ -578,8 +599,9 @@ export class PayloadService { data: Partial, parent: PrimaryKey, opts?: MutationOptions, - ): Promise<{ revisions: PrimaryKey[]; nestedActionEvents: ActionEventParams[] }> { + ): Promise { const revisions: PrimaryKey[] = []; + let userIntegrityCheckFlags = UserIntegrityCheckFlag.None; const nestedActionEvents: ActionEventParams[] = []; @@ -669,6 +691,7 @@ export class PayloadService { savedPrimaryKeys.push( ...(await service.upsertMany(recordsToUpsert, { onRevisionCreate: (pk) => revisions.push(pk), + onRequireUserIntegrityCheck: (flags) => (userIntegrityCheckFlags |= flags), bypassEmitAction: (params) => opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params), emitEvents: opts?.emitEvents, @@ -697,6 +720,7 @@ export class PayloadService { if (relation.meta.one_deselect_action === 'delete') { // There's no revision for a deletion await service.deleteByQuery(query, { + onRequireUserIntegrityCheck: (flags) => (userIntegrityCheckFlags |= flags), bypassEmitAction: (params) => opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params), emitEvents: opts?.emitEvents, @@ -708,6 +732,7 @@ export class PayloadService { { [relation.field]: null }, { onRevisionCreate: (pk) => revisions.push(pk), + onRequireUserIntegrityCheck: (flags) => (userIntegrityCheckFlags |= flags), bypassEmitAction: (params) => opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params), emitEvents: opts?.emitEvents, @@ -757,6 +782,7 @@ export class PayloadService { await service.createMany(createPayload, { onRevisionCreate: (pk) => revisions.push(pk), + onRequireUserIntegrityCheck: (flags) => (userIntegrityCheckFlags |= flags), bypassEmitAction: (params) => opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params), emitEvents: opts?.emitEvents, @@ -776,6 +802,7 @@ export class PayloadService { }, { onRevisionCreate: (pk) => revisions.push(pk), + onRequireUserIntegrityCheck: (flags) => (userIntegrityCheckFlags |= flags), bypassEmitAction: (params) => opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params), emitEvents: opts?.emitEvents, @@ -805,6 +832,7 @@ export class PayloadService { if (relation.meta.one_deselect_action === 'delete') { await service.deleteByQuery(query, { + onRequireUserIntegrityCheck: (flags) => (userIntegrityCheckFlags |= flags), bypassEmitAction: (params) => opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params), emitEvents: opts?.emitEvents, @@ -816,6 +844,7 @@ export class PayloadService { { [relation.field]: null }, { onRevisionCreate: (pk) => revisions.push(pk), + onRequireUserIntegrityCheck: (flags) => (userIntegrityCheckFlags |= flags), bypassEmitAction: (params) => opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params), emitEvents: opts?.emitEvents, @@ -827,7 +856,7 @@ export class PayloadService { } } - return { revisions, nestedActionEvents }; + return { revisions, nestedActionEvents, userIntegrityCheckFlags }; } /** diff --git a/api/src/services/permissions/index.ts b/api/src/services/permissions.ts similarity index 59% rename from api/src/services/permissions/index.ts rename to api/src/services/permissions.ts index adaec96c1f..b7f0aa2f0b 100644 --- a/api/src/services/permissions/index.ts +++ b/api/src/services/permissions.ts @@ -1,47 +1,24 @@ import { ForbiddenError } from '@directus/errors'; -import type { Item, ItemPermissions, Permission, PermissionsAction, PrimaryKey, Query } from '@directus/types'; -import type Keyv from 'keyv'; -import { clearSystemCache, getCache } from '../../cache.js'; -import type { AbstractServiceOptions, MutationOptions } from '../../types/index.js'; -import { AuthorizationService } from '../authorization.js'; -import type { QueryOptions } from '../items.js'; -import { ItemsService } from '../items.js'; -import { withAppMinimalPermissions } from './lib/with-app-minimal-permissions.js'; +import type { Item, ItemPermissions, Permission, PrimaryKey, Query } from '@directus/types'; +import { clearSystemCache } from '../cache.js'; +import { withAppMinimalPermissions } from '../permissions/lib/with-app-minimal-permissions.js'; +import type { ValidateAccessOptions } from '../permissions/modules/validate-access/validate-access.js'; +import { validateAccess } from '../permissions/modules/validate-access/validate-access.js'; +import type { AbstractServiceOptions, MutationOptions } from '../types/index.js'; +import type { QueryOptions } from './items.js'; +import { ItemsService } from './items.js'; export class PermissionsService extends ItemsService { - systemCache: Keyv; - constructor(options: AbstractServiceOptions) { super('directus_permissions', options); - - const { systemCache } = getCache(); - - this.systemCache = systemCache; } - getAllowedFields(action: PermissionsAction, collection?: string): Record { - const results = - this.accountability?.permissions?.filter((permission) => { - let matchesCollection = true; + private async clearCaches(opts?: MutationOptions) { + await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache }); - if (collection) { - matchesCollection = permission.collection === collection; - } - - const matchesAction = permission.action === action; - - return collection ? matchesCollection && matchesAction : matchesAction; - }) ?? []; - - const fieldsPerCollection: Record = {}; - - for (const result of results) { - const { collection, fields } = result; - if (!fieldsPerCollection[collection]) fieldsPerCollection[collection] = []; - fieldsPerCollection[collection]!.push(...(fields ?? [])); + if (this.cache && opts?.autoPurgeCache !== false) { + await this.cache.clear(); } - - return fieldsPerCollection; } override async readByQuery(query: Query, opts?: QueryOptions): Promise[]> { @@ -52,66 +29,48 @@ export class PermissionsService extends ItemsService { override async createOne(data: Partial, opts?: MutationOptions) { const res = await super.createOne(data, opts); - await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache }); - if (this.cache && opts?.autoPurgeCache !== false) { - await this.cache.clear(); - } + await this.clearCaches(opts); return res; } override async createMany(data: Partial[], opts?: MutationOptions) { const res = await super.createMany(data, opts); - await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache }); - if (this.cache && opts?.autoPurgeCache !== false) { - await this.cache.clear(); - } + await this.clearCaches(opts); return res; } override async updateBatch(data: Partial[], opts?: MutationOptions) { const res = await super.updateBatch(data, opts); - await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache }); - if (this.cache && opts?.autoPurgeCache !== false) { - await this.cache.clear(); - } + await this.clearCaches(opts); return res; } override async updateMany(keys: PrimaryKey[], data: Partial, opts?: MutationOptions) { const res = await super.updateMany(keys, data, opts); - await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache }); - if (this.cache && opts?.autoPurgeCache !== false) { - await this.cache.clear(); - } + await this.clearCaches(opts); return res; } override async upsertMany(payloads: Partial[], opts?: MutationOptions) { const res = await super.upsertMany(payloads, opts); - await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache }); - if (this.cache && opts?.autoPurgeCache !== false) { - await this.cache.clear(); - } + await this.clearCaches(opts); return res; } override async deleteMany(keys: PrimaryKey[], opts?: MutationOptions) { const res = await super.deleteMany(keys, opts); - await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache }); - if (this.cache && opts?.autoPurgeCache !== false) { - await this.cache.clear(); - } + await this.clearCaches(opts); return res; } @@ -156,19 +115,30 @@ export class PermissionsService extends ItemsService { } } - const authorizationService = new AuthorizationService({ - knex: this.knex, - accountability: this.accountability, - schema: this.schema, - }); - await Promise.all( Object.keys(itemPermissions).map((key) => { const action = key as keyof ItemPermissions; const checkAction = action === 'update' ? updateAction : action; - return authorizationService - .checkAccess(checkAction, collection, primaryKey) + if (!this.accountability) { + itemPermissions[action].access = true; + return Promise.resolve(); + } + + const opts: ValidateAccessOptions = { + accountability: this.accountability, + action: checkAction, + collection, + }; + + if (primaryKey) { + opts.primaryKeys = [primaryKey]; + } + + return validateAccess(opts, { + schema: this.schema, + knex: this.knex, + }) .then(() => (itemPermissions[action].access = true)) .catch(() => {}); }), diff --git a/api/src/services/permissions/index.test.ts b/api/src/services/permissions/index.test.ts deleted file mode 100644 index 89cdc8fc2c..0000000000 --- a/api/src/services/permissions/index.test.ts +++ /dev/null @@ -1,501 +0,0 @@ -import { randomIdentifier, randomInteger, randomUUID } from '@directus/random'; -import type { - Accountability, - CollectionsOverview, - DeepPartial, - Filter, - ItemPermissions, - Permission, - PermissionsAction, - Query, - SchemaOverview, -} from '@directus/types'; -import type { Knex } from 'knex'; -import knex from 'knex'; -import { MockClient, Tracker, createTracker } from 'knex-mock-client'; -import { cloneDeep } from 'lodash-es'; -import type { MockedFunction } from 'vitest'; -import { afterEach, beforeAll, beforeEach, describe, expect, test, vi } from 'vitest'; -import { PermissionsService } from './index.js'; -import { withAppMinimalPermissions } from './lib/with-app-minimal-permissions.js'; - -vi.mock('../../database/index.js', () => ({ - default: vi.fn(), - getDatabaseClient: vi.fn().mockReturnValue('postgres'), -})); - -vi.mock('./lib/with-app-minimal-permissions.js'); - -let db: MockedFunction; -let tracker: Tracker; - -beforeAll(async () => { - db = vi.mocked(knex.default({ client: MockClient })); - tracker = createTracker(db); -}); - -afterEach(() => { - tracker.reset(); - vi.clearAllMocks(); -}); - -const directusPermissionsSchema: DeepPartial = { - primary: 'id', - fields: { - id: { type: 'integer', special: [] }, - role: { type: 'string', special: [] }, - collection: { type: 'string', special: [] }, - action: { type: 'string', special: [] }, - permissions: { type: 'json', special: ['cast-json'] }, - validation: { type: 'json', special: ['cast-json'] }, - presets: { type: 'json', special: ['cast-json'] }, - fields: { type: 'csv', special: ['cast-csv'] }, - }, -}; - -describe('Services / PermissionsService', () => { - describe('with app minimal permissions', async () => { - let service: PermissionsService; - - let sample: { - permissionId: number; - permissions: Permission[]; - accountability: Accountability; - query: Query; - result: Permission[]; - }; - - beforeEach(() => { - const permissionId = randomInteger(1, 100); - const role = randomUUID(); - - const permissions: Permission[] = [ - { - id: permissionId, - role, - collection: 'directus_permissions', - action: 'read', - permissions: {}, - validation: {}, - presets: {}, - fields: ['*'], - }, - ]; - - const accountability: Accountability = { - user: randomUUID(), - role, - app: true, - permissions, - }; - - const schema: DeepPartial = { - collections: { - directus_permissions: directusPermissionsSchema, - }, - relations: [], - }; - - const query: Query = { filter: { collection: { _eq: permissionId } } }; - - const result: Permission[] = [{} as Permission]; - - sample = { - permissionId, - permissions, - accountability, - query, - result, - }; - - service = new PermissionsService({ - knex: db, - schema: schema as SchemaOverview, - accountability, - }); - - tracker.on.select('select "directus_permissions"').response(sample.permissions); - vi.mocked(withAppMinimalPermissions).mockImplementation(() => result); - }); - - test('readByQuery', async () => { - const result = await service.readByQuery(sample.query); - - expect(withAppMinimalPermissions).toBeCalledWith(sample.accountability, sample.permissions, sample.query.filter); - expect(result).toEqual(sample.result); - }); - - test('readMany', async () => { - const result = await service.readMany([sample.permissionId], sample.query); - - expect(withAppMinimalPermissions).toBeCalledWith(sample.accountability, sample.permissions, { - _and: [ - { - id: { - _in: [sample.permissionId], - }, - }, - sample.query.filter, - ], - }); - - expect(result).toEqual(sample.result); - }); - - test('readOne', async () => { - const result = await service.readOne(sample.permissionId, sample.query); - - expect(withAppMinimalPermissions).toBeCalledWith(sample.accountability, sample.permissions, { - ...sample.query.filter, - id: { _eq: sample.permissionId }, - }); - - expect(result).toEqual(sample.result[0]); - }); - }); - - describe('#getItemPermissions', () => { - const collection = randomIdentifier(); - const primaryKeyField = 'id'; - const primaryKey = randomInteger(1, 100); - const permissionCheckField = 'title'; - const permissionCheck = randomIdentifier(); - - const baseSchema: DeepPartial = { - collections: { - directus_permissions: directusPermissionsSchema, - [collection]: { - collection: collection, - primary: primaryKeyField, - fields: { - [primaryKeyField]: { field: primaryKeyField, type: 'integer', special: [] }, - [permissionCheckField]: { field: permissionCheckField, type: 'string', special: [] }, - }, - }, - }, - relations: [], - }; - - const permissionPreset: Omit = { - role: null, - collection, - permissions: {}, - validation: {}, - presets: {}, - fields: ['*'], - }; - - const noAccess: ItemPermissions = { - update: { access: false }, - delete: { access: false }, - share: { access: false }, - }; - - const fullAccess: ItemPermissions = { update: { access: true }, delete: { access: true }, share: { access: true } }; - - type Scenario = [ - scenario: string, - { accountability: Accountability; itemPermissions: ItemPermissions; selectCount: number }, - ]; - - const adminScenario: Scenario = [ - 'admin', - { - accountability: { user: randomUUID(), role: randomUUID(), admin: true }, - itemPermissions: fullAccess, - selectCount: 0, - }, - ]; - - const user = { user: randomUUID(), role: randomUUID() } as Accountability; - - const actions: PermissionsAction[] = ['update', 'delete', 'share']; - - const userScenarios: Scenario[] = [ - [`user without permissions`, { accountability: user, itemPermissions: noAccess, selectCount: 0 }], - ...(actions.map((action) => [ - `user with ${action} permission`, - { - accountability: { ...user, permissions: [{ ...permissionPreset, action }] }, - itemPermissions: actions.reduce((a, v) => ({ ...a, [v]: { access: v === action } }), {}), - selectCount: 1, - }, - ]) as Scenario[]), - [ - `user with full permissions`, - { - accountability: { - ...user, - permissions: actions.map((action) => ({ ...permissionPreset, action })), - }, - itemPermissions: fullAccess, - selectCount: 3, - }, - ], - ]; - - const userConditionalScenarios = actions.map((action) => [ - `user with conditional ${action} permission`, - { - accountability: { - ...user, - permissions: [ - { - ...permissionPreset, - action, - permissions: { - _and: [{ [permissionCheckField]: { _eq: permissionCheck } }], - } as Filter, - }, - ], - }, - itemPermissions: actions.reduce((a, v) => ({ ...a, [v]: { access: v === action } }), {}), - selectCount: 1, - }, - ]) as Scenario[]; - - test('requires authentication', async () => { - const service = new PermissionsService({ - knex: db, - schema: baseSchema as SchemaOverview, - accountability: { user: null, role: null }, - }); - - const promise = service.getItemPermissions(collection, String(primaryKey)); - - await expect(promise).rejects.toThrow(`You don't have permission to access this.`); - }); - - const collectionTypes = ['collection', 'singleton']; - - describe.each(collectionTypes)('%s', (collectionType) => { - const schema = cloneDeep(baseSchema) as SchemaOverview; - if (collectionType === 'singleton') schema.collections[collection]!.singleton = true; - - describe('non-existing', () => { - const scenarios = [ - // full access for admin - adminScenario, - // no access for all other users - ...([...userScenarios, ...userConditionalScenarios].map(([scenario, config]) => [ - scenario, - { ...config, itemPermissions: noAccess }, - ]) as Scenario[]), - ]; - - describe.each(scenarios)('%s', (_, { accountability, itemPermissions }) => { - test('collection', async () => { - const service = new PermissionsService({ - knex: db, - schema: { collections: {}, relations: [] }, - accountability, - }); - - const result = await service.getItemPermissions( - collection, - collectionType === 'collection' ? String(primaryKey) : undefined, - ); - - expect(result).toEqual(itemPermissions); - }); - - test('item', async () => { - const service = new PermissionsService({ - knex: db, - schema, - accountability, - }); - - tracker.on.select(collection).response([]); - - const result = await service.getItemPermissions( - collection, - collectionType === 'collection' ? String(primaryKey) : undefined, - ); - - expect(result).toEqual(itemPermissions); - }); - }); - }); - - describe('existing item', () => { - beforeEach(() => { - if (collectionType === 'singleton') { - const checkSingletonStatement = `select "${collection}"."${primaryKeyField}" from "${collection}"`; - - tracker.on.select(checkSingletonStatement).responseOnce([{ [primaryKeyField]: primaryKey }]); - } - }); - - test.each([adminScenario, ...userScenarios])( - '%s', - async (_, { accountability, itemPermissions, selectCount }) => { - const service = new PermissionsService({ - knex: db, - schema, - accountability, - }); - - const checkPermissionStatement = - collectionType === 'collection' - ? `select "${collection}"."${primaryKeyField}", "${collection}"."${permissionCheckField}" from "${collection}" where ("${collection}"."${primaryKeyField}" = ?)` - : `select "${collection}"."${primaryKeyField}", "${collection}"."${permissionCheckField}" from "${collection}"`; - - tracker.on.select(checkPermissionStatement).response([{ [primaryKeyField]: primaryKey }]); - - const result = await service.getItemPermissions( - collection, - collectionType === 'collection' ? String(primaryKey) : undefined, - ); - - expect(tracker.history.all).toHaveLength( - collectionType === 'singleton' && !accountability.admin ? selectCount + 1 : selectCount, - ); - - expect(result).toEqual(itemPermissions); - }, - ); - - describe.each(userConditionalScenarios)('%s', (_, { accountability, itemPermissions, selectCount }) => { - const checkPermissionStatement = - collectionType === 'collection' - ? `select "${collection}"."${primaryKeyField}", "${collection}"."${permissionCheckField}" from "${collection}" where ("${collection}"."${primaryKeyField}" = ? and ("${collection}"."${permissionCheckField}" = ?))` - : `select "${collection}"."${primaryKeyField}", "${collection}"."${permissionCheckField}" from "${collection}" where (("${collection}"."${permissionCheckField}" = ?))`; - - test('matching condition', async () => { - const service = new PermissionsService({ - knex: db, - schema, - accountability, - }); - - tracker.on - .select(checkPermissionStatement) - .response([{ [primaryKeyField]: primaryKey, [permissionCheckField]: permissionCheck }]); - - const result = await service.getItemPermissions( - collection, - collectionType === 'collection' ? String(primaryKey) : undefined, - ); - - expect(tracker.history.all).toHaveLength(collectionType === 'singleton' ? selectCount + 1 : selectCount); - expect(result).toEqual(itemPermissions); - }); - - test('non-matching condition', async () => { - const service = new PermissionsService({ - knex: db, - schema, - accountability, - }); - - tracker.on.select(new RegExp(checkPermissionStatement)).response([]); - - const result = await service.getItemPermissions( - collection, - collectionType === 'collection' ? String(primaryKey) : undefined, - ); - - expect(tracker.history.all).toContainEqual( - expect.objectContaining({ sql: expect.stringContaining(checkPermissionStatement) }), - ); - - expect(result).toEqual(noAccess); - }); - }); - }); - }); - - describe('singleton', () => { - const schema = cloneDeep(baseSchema) as SchemaOverview; - schema.collections[collection]!.singleton = true; - - const permissionReadAccess = { ...permissionPreset, collection: 'directus_permissions', action: 'read' }; - - test('use create permission if singleton does not exist', async () => { - const permissions = [{ ...permissionPreset, action: 'create' }, permissionReadAccess] as Permission[]; - - const service = new PermissionsService({ - knex: db, - schema, - accountability: { - ...user, - permissions, - }, - }); - - tracker.on.select(collection).response([]); - - tracker.on.select('directus_permissions').response(permissions); - - vi.mocked(withAppMinimalPermissions).mockImplementation(() => permissions); - - const result = await service.getItemPermissions(collection); - - expect(result.update).toEqual({ - access: true, - presets: permissionPreset.presets, - fields: permissionPreset.fields, - }); - }); - - test('use update permission if singleton exists', async () => { - const permissions = [{ ...permissionPreset, action: 'update' }, permissionReadAccess] as Permission[]; - - const service = new PermissionsService({ - knex: db, - schema, - accountability: { - ...user, - permissions, - }, - }); - - const checkSingletonStatement = `select "${collection}"."${primaryKeyField}" from "${collection}"`; - tracker.on.select(checkSingletonStatement).responseOnce([{ [primaryKeyField]: primaryKey }]); - - const checkPermissionStatement = `select "${collection}"."${primaryKeyField}", "${collection}"."${permissionCheckField}" from "${collection}"`; - - tracker.on.select(checkPermissionStatement).response([{ [primaryKeyField]: primaryKey }]); - - tracker.on.select('directus_permissions').response(permissions); - - vi.mocked(withAppMinimalPermissions).mockImplementation(() => permissions); - - const result = await service.getItemPermissions(collection); - - expect(result.update).toEqual({ - access: true, - presets: permissionPreset.presets, - fields: permissionPreset.fields, - }); - }); - - test('requires permissions on directus_permissions to return presets and fields', async () => { - const permissions = [{ ...permissionPreset, action: 'update' }] as Permission[]; - - const service = new PermissionsService({ - knex: db, - schema, - accountability: { - ...user, - permissions, - }, - }); - - const checkSingletonStatement = `select "${collection}"."${primaryKeyField}" from "${collection}"`; - tracker.on.select(checkSingletonStatement).responseOnce([{ [primaryKeyField]: primaryKey }]); - - const checkPermissionStatement = `select "${collection}"."${primaryKeyField}", "${collection}"."${permissionCheckField}" from "${collection}"`; - - tracker.on.select(checkPermissionStatement).response([{ [primaryKeyField]: primaryKey }]); - - tracker.on.select('directus_permissions').response(permissions); - - const result = await service.getItemPermissions(collection); - - expect(result.update).toEqual({ access: true }); - }); - }); - }); -}); diff --git a/api/src/services/permissions/lib/with-app-minimal-permissions.ts b/api/src/services/permissions/lib/with-app-minimal-permissions.ts deleted file mode 100644 index 2b196771e1..0000000000 --- a/api/src/services/permissions/lib/with-app-minimal-permissions.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { appAccessMinimalPermissions } from '@directus/system-data'; -import type { Accountability, Permission, Query } from '@directus/types'; -import { filterItems } from '../../../utils/filter-items.js'; -import { mergePermissions } from '../../../utils/merge-permissions.js'; - -export function withAppMinimalPermissions( - accountability: Accountability | null, - permissions: Permission[], - filter: Query['filter'], -): Permission[] { - if (accountability?.app === true) { - const filteredAppMinimalPermissions = filterItems( - appAccessMinimalPermissions.map((permission) => ({ - ...permission, - role: accountability.role, - })), - filter, - ); - - return mergePermissions('or', permissions, filteredAppMinimalPermissions); - } - - return permissions; -} diff --git a/api/src/services/policies.test.ts b/api/src/services/policies.test.ts new file mode 100644 index 0000000000..40a8462a61 --- /dev/null +++ b/api/src/services/policies.test.ts @@ -0,0 +1,1099 @@ +import { test } from 'vitest'; + +// TODO Old tests for RolesServices - adapt for PoliciesService +test.todo('unimplemented test'); + +// import { ForbiddenError, UnprocessableContentError } from '@directus/errors'; +// import { randomUUID } from '@directus/random'; +// import type { SchemaOverview } from '@directus/types'; +// import type { Knex } from 'knex'; +// import knex from 'knex'; +// import { MockClient, Tracker, createTracker, type RawQuery } from 'knex-mock-client'; +// import { +// afterEach, +// beforeAll, +// beforeEach, +// describe, +// expect, +// it, +// vi, +// type MockInstance, +// type MockedFunction, +// } from 'vitest'; +// import { ItemsService, PermissionsService, PresetsService, RolesService, UsersService } from './index.js'; + +// vi.mock('../../src/database/index', () => ({ +// default: vi.fn(), +// getDatabaseClient: vi.fn().mockReturnValue('postgres'), +// })); + +// const testSchema = { +// collections: { +// directus_roles: { +// collection: 'directus_roles', +// primary: 'id', +// singleton: false, +// sortField: null, +// note: null, +// accountability: null, +// fields: { +// id: { +// field: 'id', +// defaultValue: null, +// nullable: false, +// generated: true, +// type: 'uuid', +// dbType: 'uuid', +// precision: null, +// scale: null, +// special: [], +// note: null, +// validation: null, +// alias: false, +// }, +// }, +// }, +// }, +// relations: [], +// } as SchemaOverview; + +// describe('Integration Tests', () => { +// let db: MockedFunction; +// let tracker: Tracker; + +// beforeAll(async () => { +// db = vi.mocked(knex.default({ client: MockClient })); +// tracker = createTracker(db); +// }); + +// beforeEach(() => { +// tracker.on.any('directus_roles').response({}); + +// tracker.on +// .select(/"directus_roles"."id" from "directus_roles" order by "directus_roles"."id" asc limit .*/) +// .response([]); + +// tracker.on +// .select( +// /(select count\("directus_users"\."id"\) as "count", "directus_roles"\."admin_access", "directus_roles"\."app_access" from "directus_users").*(where "directus_roles"\."id" = \?).*/, +// ) +// .response([{ count: 0, admin_access: true, app_access: true }]); +// }); + +// afterEach(() => { +// tracker.reset(); +// }); + +// describe('Services / RolesService', () => { +// describe('updateOne', () => { +// let service: RolesService; +// let superUpdateOne: MockInstance; +// const adminRoleId = 'cbfd1e77-b883-4090-93e4-5bcbfbd48aba'; +// const userId1 = '07a5fee0-c168-49e2-8e33-4bae280e0c48'; +// const userId2 = 'abedf9a4-6956-4a9c-8904-c1aa08a68173'; + +// beforeEach(() => { +// service = new RolesService({ +// knex: db, +// schema: testSchema, +// }); + +// superUpdateOne = vi.spyOn(ItemsService.prototype, 'updateOne'); +// }); + +// afterEach(() => { +// superUpdateOne.mockRestore(); +// }); + +// describe('checkForOtherAdminUsers', () => { +// describe('on an admin role', () => { +// const admin_access = true; + +// describe('with an array of user ids', () => { +// it('having an added user', async () => { +// const data: Record = { +// users: [userId1, userId2], +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); +// tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); + +// const result = await service.updateOne(adminRoleId, data); +// expect(result).toBe(adminRoleId); +// expect(superUpdateOne).toHaveBeenCalledOnce(); +// }); + +// it('having a removed user', async () => { +// const data: Record = { +// users: [userId1], +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); + +// tracker.on +// .select('select "id" from "directus_users" where "role" = ?') +// .responseOnce([{ id: userId1 }, { id: userId2 }]); + +// tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); + +// const result = await service.updateOne(adminRoleId, data); +// expect(result).toBe(adminRoleId); +// expect(superUpdateOne).toHaveBeenCalledOnce(); +// }); + +// it('having a removed last user that is not the last admin of system', async () => { +// const data: Record = { +// users: [], +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); +// tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); +// tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); + +// const result = await service.updateOne(adminRoleId, data); +// expect(result).toBe(adminRoleId); +// expect(superUpdateOne).toHaveBeenCalledOnce(); +// }); + +// it('having a removed a last user that is the last admin of system', async () => { +// const service = new RolesService({ +// knex: db, +// schema: testSchema, +// accountability: { role: 'test', admin: false }, +// }); + +// const data: Record = { +// users: [], +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); +// tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); +// tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 0 }); + +// const promise = service.updateOne(adminRoleId, data); + +// expect.assertions(5); // to ensure both assertions in the catch block are reached + +// try { +// await promise; +// } catch (err: any) { +// expect(err.message).toBe(`You don't have permission to access this.`); +// expect(err).toBeInstanceOf(ForbiddenError); +// } + +// expect(superUpdateOne).toHaveBeenCalled(); + +// expect(superUpdateOne.mock.lastCall![2].preMutationError.message).toBe( +// `Can't process content. You can't remove the last admin user from the admin role.`, +// ); + +// expect(superUpdateOne.mock.lastCall![2].preMutationError).toBeInstanceOf(UnprocessableContentError); +// }); +// }); + +// describe('with an array of user objects', () => { +// it('having an added user', async () => { +// const data: Record = { +// users: [{ id: userId1 }, { id: userId2 }], +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); +// tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); + +// tracker.on +// .select('select count(*) as "count" from "directus_users" where "id" in') +// .responseOnce({ count: 1 }); + +// const result = await service.updateOne(adminRoleId, data); +// expect(result).toBe(adminRoleId); +// expect(superUpdateOne).toHaveBeenCalledOnce(); +// }); + +// it('having a removed user', async () => { +// const data: Record = { +// users: [{ id: userId1 }], +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); + +// tracker.on +// .select('select "id" from "directus_users" where "role" = ?') +// .responseOnce([{ id: userId1 }, { id: userId2 }]); + +// tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); + +// const result = await service.updateOne(adminRoleId, data); +// expect(result).toBe(adminRoleId); +// expect(superUpdateOne).toHaveBeenCalledOnce(); +// }); + +// it('having a removed last user that is not the last admin of system', async () => { +// const data: Record = { +// users: [], +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); +// tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); +// tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); + +// const result = await service.updateOne(adminRoleId, data); +// expect(result).toBe(adminRoleId); +// expect(superUpdateOne).toHaveBeenCalledOnce(); +// }); + +// it('having a removed a last user that is the last admin of system', async () => { +// const service = new RolesService({ +// knex: db, +// schema: testSchema, +// accountability: { role: 'test', admin: false }, +// }); + +// const data: Record = { +// users: [], +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); +// tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); +// tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 0 }); + +// const promise = service.updateOne(adminRoleId, data); + +// expect.assertions(5); // to ensure both assertions in the catch block are reached + +// try { +// await promise; +// } catch (err: any) { +// expect(err.message).toBe(`You don't have permission to access this.`); +// expect(err).toBeInstanceOf(ForbiddenError); +// } + +// expect(superUpdateOne).toHaveBeenCalled(); + +// expect(superUpdateOne.mock.lastCall![2].preMutationError.message).toBe( +// `Can't process content. You can't remove the last admin user from the admin role.`, +// ); + +// expect(superUpdateOne.mock.lastCall![2].preMutationError).toBeInstanceOf(UnprocessableContentError); +// }); +// }); + +// describe('with an alterations object', () => { +// it('having a newly created user', async () => { +// const data: Record = { +// users: { +// create: [{ name: 'New User' }], +// update: [], +// delete: [], +// }, +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); +// tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); + +// const result = await service.updateOne(adminRoleId, data); +// expect(result).toBe(adminRoleId); +// expect(superUpdateOne).toHaveBeenCalledOnce(); +// }); + +// it('having an added user', async () => { +// const data: Record = { +// users: { +// create: [], +// update: [{ role: adminRoleId, id: userId2 }], +// delete: [], +// }, +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); +// tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); +// tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); + +// const result = await service.updateOne(adminRoleId, data); +// expect(result).toBe(adminRoleId); +// expect(superUpdateOne).toHaveBeenCalledOnce(); +// }); + +// it('having a removed user', async () => { +// const data: Record = { +// users: { +// create: [], +// update: [], +// delete: [userId2], +// }, +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); + +// tracker.on +// .select('select "id" from "directus_users" where "role" = ?') +// .responseOnce([{ id: userId1 }, { id: userId2 }]); + +// tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); + +// const result = await service.updateOne(adminRoleId, data); +// expect(result).toBe(adminRoleId); +// expect(superUpdateOne).toHaveBeenCalledOnce(); +// }); + +// it('having a removed last user that is not the last admin of system', async () => { +// const data: Record = { +// users: { +// create: [], +// update: [], +// delete: [userId1], +// }, +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); +// tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); +// tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); + +// const result = await service.updateOne(adminRoleId, data); +// expect(result).toBe(adminRoleId); +// expect(superUpdateOne).toHaveBeenCalledOnce(); +// }); + +// it('having a removed a last user that is the last admin of system', async () => { +// const service = new RolesService({ +// knex: db, +// schema: testSchema, +// accountability: { role: 'test', admin: false }, +// }); + +// const data: Record = { +// users: { +// create: [], +// update: [], +// delete: [userId1], +// }, +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); +// tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); +// tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 0 }); + +// const promise = service.updateOne(adminRoleId, data); + +// expect.assertions(5); // to ensure both assertions in the catch block are reached + +// try { +// await promise; +// } catch (err: any) { +// expect(err.message).toBe(`You don't have permission to access this.`); +// expect(err).toBeInstanceOf(ForbiddenError); +// } + +// expect(superUpdateOne).toHaveBeenCalled(); + +// expect(superUpdateOne.mock.lastCall![2].preMutationError.message).toBe( +// `Can't process content. You can't remove the last admin user from the admin role.`, +// ); + +// expect(superUpdateOne.mock.lastCall![2].preMutationError).toBeInstanceOf(UnprocessableContentError); +// }); +// }); +// }); + +// describe('on an non-admin role', () => { +// const admin_access = false; + +// describe('with an array of user ids', () => { +// it('having an added user', async () => { +// const data: Record = { +// users: [userId1, userId2], +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); +// tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); + +// tracker.on +// .select( +// ({ sql, bindings }: RawQuery) => +// sql.startsWith('select count(*) as "count" from "directus_users"') && bindings.includes(userId2), +// ) +// .response({ count: 1 }); + +// const result = await service.updateOne(adminRoleId, data); + +// expect(result).toBe(adminRoleId); +// expect(superUpdateOne).toHaveBeenCalledOnce(); +// }); + +// it('having an added user that is the last admin', async () => { +// const service = new RolesService({ +// knex: db, +// schema: testSchema, +// accountability: { role: 'test', admin: false }, +// }); + +// const data: Record = { +// users: [userId1, userId2], +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); +// tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); +// tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 0 }); + +// const promise = service.updateOne(adminRoleId, data); + +// expect.assertions(5); // to ensure both assertions in the catch block are reached + +// try { +// await promise; +// } catch (err: any) { +// expect(err.message).toBe(`You don't have permission to access this.`); +// expect(err).toBeInstanceOf(ForbiddenError); +// } + +// expect(superUpdateOne).toHaveBeenCalled(); + +// expect(superUpdateOne.mock.lastCall![2].preMutationError.message).toBe( +// `Can't process content. You can't remove the last admin user from the admin role.`, +// ); + +// expect(superUpdateOne.mock.lastCall![2].preMutationError).toBeInstanceOf(UnprocessableContentError); +// }); + +// it('having a removed user', async () => { +// const data: Record = { +// users: [userId1], +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); + +// tracker.on +// .select('select "id" from "directus_users" where "role" = ?') +// .responseOnce([{ id: userId1 }, { id: userId2 }]); + +// tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); + +// const result = await service.updateOne(adminRoleId, data); +// expect(result).toBe(adminRoleId); +// expect(superUpdateOne).toHaveBeenCalledOnce(); +// }); +// }); + +// describe('with an array of user objects', () => { +// it('having an added user', async () => { +// const data: Record = { +// users: [{ id: userId1 }, { id: userId2 }], +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); +// tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); +// tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); + +// const result = await service.updateOne(adminRoleId, data); +// expect(result).toBe(adminRoleId); +// expect(superUpdateOne).toHaveBeenCalledOnce(); +// }); + +// it('having an added user that is the last admin', async () => { +// const service = new RolesService({ +// knex: db, +// schema: testSchema, +// accountability: { role: 'test', admin: false }, +// }); + +// const data: Record = { +// users: [{ id: userId1 }, { id: userId2 }], +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); +// tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); +// tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 0 }); + +// const promise = service.updateOne(adminRoleId, data); + +// expect.assertions(5); // to ensure both assertions in the catch block are reached + +// try { +// await promise; +// } catch (err: any) { +// expect(err.message).toBe(`You don't have permission to access this.`); +// expect(err).toBeInstanceOf(ForbiddenError); +// } + +// expect(superUpdateOne).toHaveBeenCalled(); + +// expect(superUpdateOne.mock.lastCall![2].preMutationError.message).toBe( +// `Can't process content. You can't remove the last admin user from the admin role.`, +// ); + +// expect(superUpdateOne.mock.lastCall![2].preMutationError).toBeInstanceOf(UnprocessableContentError); +// }); + +// it('having a removed user', async () => { +// const data: Record = { +// users: [{ id: userId1 }], +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); + +// tracker.on +// .select('select "id" from "directus_users" where "role" = ?') +// .responseOnce([{ id: userId1 }, { id: userId2 }]); + +// tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); + +// const result = await service.updateOne(adminRoleId, data); +// expect(result).toBe(adminRoleId); +// expect(superUpdateOne).toHaveBeenCalledOnce(); +// }); +// }); + +// describe('with an alterations object', () => { +// it('having a newly created user', async () => { +// const data: Record = { +// users: { +// create: [{ name: 'New User' }], +// update: [], +// delete: [], +// }, +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); +// tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); +// tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); + +// const result = await service.updateOne(adminRoleId, data); +// expect(result).toBe(adminRoleId); +// expect(superUpdateOne).toHaveBeenCalledOnce(); +// }); + +// it('having an added user', async () => { +// const data: Record = { +// users: { +// create: [], +// update: [{ role: adminRoleId, id: userId2 }], +// delete: [], +// }, +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); +// tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); +// tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); + +// const result = await service.updateOne(adminRoleId, data); +// expect(result).toBe(adminRoleId); +// expect(superUpdateOne).toHaveBeenCalledOnce(); +// }); + +// it('having an added user that is the last admin', async () => { +// const service = new RolesService({ +// knex: db, +// schema: testSchema, +// accountability: { role: 'test', admin: false }, +// }); + +// const data: Record = { +// users: { +// create: [], +// update: [{ role: adminRoleId, id: userId2 }], +// delete: [], +// }, +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); +// tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); +// tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 0 }); + +// const promise = service.updateOne(adminRoleId, data); + +// expect.assertions(5); // to ensure both assertions in the catch block are reached + +// try { +// await promise; +// } catch (err: any) { +// expect(err.message).toBe(`You don't have permission to access this.`); +// expect(err).toBeInstanceOf(ForbiddenError); +// } + +// expect(superUpdateOne).toHaveBeenCalled(); + +// expect(superUpdateOne.mock.lastCall![2].preMutationError.message).toBe( +// `Can't process content. You can't remove the last admin user from the admin role.`, +// ); + +// expect(superUpdateOne.mock.lastCall![2].preMutationError).toBeInstanceOf(UnprocessableContentError); +// }); + +// it('having a removed user', async () => { +// const data: Record = { +// users: { +// create: [], +// update: [], +// delete: [userId2], +// }, +// }; + +// tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); + +// tracker.on +// .select('select "id" from "directus_users" where "role" = ?') +// .responseOnce([{ id: userId1 }, { id: userId2 }]); + +// tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); + +// const result = await service.updateOne(adminRoleId, data); +// expect(result).toBe(adminRoleId); +// expect(superUpdateOne).toHaveBeenCalledOnce(); +// }); +// }); +// }); +// }); +// }); +// }); + +// describe('Services / Roles', () => { +// let service: RolesService; +// let checkForOtherAdminRolesSpy: MockInstance; +// let checkForOtherAdminUsersSpy: MockInstance; + +// beforeEach(() => { +// service = new RolesService({ +// knex: db, +// schema: testSchema +// }); + +// vi.spyOn(PermissionsService.prototype, 'deleteByQuery').mockResolvedValueOnce([]); +// vi.spyOn(PresetsService.prototype, 'deleteByQuery').mockResolvedValueOnce([]); +// vi.spyOn(UsersService.prototype, 'updateByQuery').mockResolvedValueOnce([]); +// vi.spyOn(UsersService.prototype, 'deleteByQuery').mockResolvedValueOnce([]); + +// // "as any" are needed since these are private methods +// checkForOtherAdminRolesSpy = vi +// .spyOn(RolesService.prototype as any, 'checkForOtherAdminRoles') +// .mockResolvedValue(true); + +// checkForOtherAdminUsersSpy = vi +// .spyOn(RolesService.prototype as any, 'checkForOtherAdminUsers') +// .mockResolvedValue(true); +// }); + +// afterEach(() => { +// checkForOtherAdminRolesSpy.mockRestore(); +// checkForOtherAdminUsersSpy.mockRestore(); +// }); + +// describe('createOne', () => { +// it('should not checkForOtherAdminRoles', async () => { +// await service.createOne({}); +// expect(checkForOtherAdminRolesSpy).not.toBeCalled(); +// }); + +// it('should throw due to invalid ip_access', async () => { +// await expect(service.createOne({ ip_access: ['invalid_ip'] })).rejects.toThrow( +// 'IP Access contains an incorrect value. Valid values are: IP addresses, IP ranges and CIDR blocks', +// ); +// }); +// }); + +// describe('createMany', () => { +// it('should not checkForOtherAdminRoles', async () => { +// await service.createMany([{}]); +// expect(checkForOtherAdminRolesSpy).not.toBeCalled(); +// }); + +// it('should throw due to invalid ip_access', async () => { +// await expect(service.createMany([{ ip_access: ['invalid_ip'] }])).rejects.toThrow( +// 'IP Access contains an incorrect value. Valid values are: IP addresses, IP ranges and CIDR blocks', +// ); +// }); +// }); + +// describe('updateOne', () => { +// it('should not checkForOtherAdminRoles', async () => { +// await service.updateOne(1, {}); +// expect(checkForOtherAdminRolesSpy).not.toBeCalled(); +// }); + +// it('should checkForOtherAdminRoles once and not checkForOtherAdminUsersSpy', async () => { +// await service.updateOne(1, { admin_access: false }); +// expect(checkForOtherAdminRolesSpy).toBeCalledTimes(1); +// expect(checkForOtherAdminUsersSpy).not.toBeCalled(); +// }); + +// it('should checkForOtherAdminRoles and checkForOtherAdminUsersSpy once', async () => { +// await service.updateOne(1, { admin_access: false, users: [1] }); +// expect(checkForOtherAdminRolesSpy).toBeCalledTimes(1); +// expect(checkForOtherAdminUsersSpy).toBeCalledTimes(1); +// }); + +// it('should throw due to invalid ip_access', async () => { +// await expect(service.updateOne(1, { ip_access: ['invalid_ip'] })).rejects.toThrow( +// 'IP Access contains an incorrect value. Valid values are: IP addresses, IP ranges and CIDR blocks', +// ); +// }); +// }); + +// describe('updateMany', () => { +// it('should not checkForOtherAdminRoles', async () => { +// await service.updateMany([1], {}); +// expect(checkForOtherAdminRolesSpy).not.toBeCalled(); +// }); + +// it('should checkForOtherAdminRoles once', async () => { +// await service.updateMany([1], { admin_access: false }); +// expect(checkForOtherAdminRolesSpy).toBeCalledTimes(1); +// }); + +// it('should throw due to invalid ip_access', async () => { +// await expect(service.updateMany([1], { ip_access: ['invalid_ip'] })).rejects.toThrow( +// 'IP Access contains an incorrect value. Valid values are: IP addresses, IP ranges and CIDR blocks', +// ); +// }); +// }); + +// describe('updateBatch', () => { +// it('should not checkForOtherAdminRoles', async () => { +// await service.updateBatch([{ id: 1 }]); +// expect(checkForOtherAdminRolesSpy).not.toBeCalled(); +// }); + +// it('should checkForOtherAdminRoles once', async () => { +// await service.updateBatch([{ id: 1, admin_access: false }]); +// expect(checkForOtherAdminRolesSpy).toBeCalledTimes(1); +// }); + +// it('should throw due to invalid ip_access', async () => { +// await expect(service.updateBatch([{ id: 1, ip_access: ['invalid_ip'] }])).rejects.toThrow( +// 'IP Access contains an incorrect value. Valid values are: IP addresses, IP ranges and CIDR blocks', +// ); +// }); +// }); + +// describe('updateByQuery', () => { +// it('should not checkForOtherAdminRoles', async () => { +// // mock return value for the following empty query +// vi.spyOn(ItemsService.prototype, 'getKeysByQuery').mockResolvedValueOnce([1]); +// await service.updateByQuery({}, {}); +// expect(checkForOtherAdminRolesSpy).not.toBeCalled(); +// }); + +// it('should checkForOtherAdminRoles once', async () => { +// // mock return value for the following empty query +// vi.spyOn(ItemsService.prototype, 'getKeysByQuery').mockResolvedValueOnce([1]); +// await service.updateByQuery({}, { admin_access: false }); +// expect(checkForOtherAdminRolesSpy).toBeCalledTimes(1); +// }); + +// it('should throw due to invalid ip_access', async () => { +// await expect(service.updateByQuery({}, { ip_access: ['invalid_ip'] })).rejects.toThrow( +// 'IP Access contains an incorrect value. Valid values are: IP addresses, IP ranges and CIDR blocks', +// ); +// }); +// }); + +// describe('deleteOne', () => { +// it('should checkForOtherAdminRoles once', async () => { +// await service.deleteOne(1); +// expect(checkForOtherAdminRolesSpy).toBeCalledTimes(1); +// }); +// }); + +// describe('deleteMany', () => { +// it('should checkForOtherAdminRoles once', async () => { +// await service.deleteMany([1]); +// expect(checkForOtherAdminRolesSpy).toBeCalledTimes(1); +// }); +// }); + +// describe('deleteByQuery', () => { +// it('should checkForOtherAdminRoles once', async () => { +// // mock return value for the following empty query +// vi.spyOn(ItemsService.prototype, 'getKeysByQuery').mockResolvedValueOnce([1]); +// await service.deleteByQuery({}); +// expect(checkForOtherAdminRolesSpy).toBeCalledTimes(1); +// }); +// }); +// }); + +// describe('User Limits Tests', () => { +// let service: RolesService; +// let checkForOtherAdminRolesSpy: MockInstance; +// let checkForOtherAdminUsersSpy: MockInstance; + +// beforeEach(() => { +// service = new RolesService({ +// knex: db, +// schema: testSchema, +// }); + +// vi.mocked(checkUserLimits).mockReset(); +// vi.mocked(getRoleCountsByUsers).mockReset(); +// vi.mocked(getUserCountsByRoles).mockReset(); + +// // "as any" are needed since these are private methods +// checkForOtherAdminRolesSpy = vi +// .spyOn(RolesService.prototype as any, 'checkForOtherAdminRoles') +// .mockResolvedValueOnce(true); + +// checkForOtherAdminUsersSpy = vi +// .spyOn(RolesService.prototype as any, 'checkForOtherAdminUsers') +// .mockResolvedValueOnce(true); +// }); + +// afterEach(() => { +// checkForOtherAdminRolesSpy.mockRestore(); +// checkForOtherAdminUsersSpy.mockRestore(); +// }); + +// describe('createOne', () => { +// it('calculates the number of increased admin users', async () => { +// await service.createOne({ admin_access: true, app_access: true, users: [1, 2, 3] }); + +// expect(checkUserLimits).toBeCalledWith(db, { admin: 3, app: 0, api: 0 }, []); +// }); + +// it('calculates the number of increased app users', async () => { +// await service.createOne({ admin_access: false, app_access: true, users: [1, 2, 3] }); + +// expect(checkUserLimits).toBeCalledWith(db, { admin: 0, app: 3, api: 0 }, []); +// }); + +// it('calculates the number of increased api users', async () => { +// await service.createOne({ admin_access: false, app_access: false, users: [1, 2, 3] }); + +// expect(checkUserLimits).toBeCalledWith(db, { admin: 0, app: 0, api: 3 }, []); +// }); + +// it('skips user limits check when no limit is set', async () => { +// vi.mocked(shouldCheckUserLimits).mockReturnValue(false); + +// await service.createOne({ admin_access: true, app_access: true, users: [1, 2, 3] }); + +// expect(checkUserLimits).not.toBeCalled(); +// }); +// }); + +// describe('createMany', () => { +// it('calculates the number of increased admin users', async () => { +// await service.createMany([ +// { admin_access: true, app_access: true, users: [1] }, +// { admin_access: true, app_access: true, users: [2, 3] }, +// { admin_access: true, app_access: true, users: [4, 5, 6] }, +// ]); + +// expect(checkUserLimits).toBeCalledWith(db, { admin: 6, app: 0, api: 0 }, []); +// }); + +// it('calculates the number of increased app users', async () => { +// await service.createMany([ +// { admin_access: false, app_access: true, users: [1] }, +// { admin_access: false, app_access: true, users: [2, 3] }, +// { admin_access: false, app_access: true, users: [4, 5, 6] }, +// ]); + +// expect(checkUserLimits).toBeCalledWith(db, { admin: 0, app: 6, api: 0 }, []); +// }); + +// it('calculates the number of increased api users', async () => { +// await service.createMany([ +// { admin_access: false, app_access: false, users: [1] }, +// { admin_access: false, app_access: false, users: [2, 3] }, +// { admin_access: false, app_access: false, users: [4, 5, 6] }, +// ]); + +// expect(checkUserLimits).toBeCalledWith(db, { admin: 0, app: 0, api: 6 }, []); +// }); + +// it('skips user limits check when no limit is set', async () => { +// vi.mocked(shouldCheckUserLimits).mockReturnValue(false); + +// await service.createMany([ +// { admin_access: true, app_access: true, users: [1] }, +// { admin_access: true, app_access: true, users: [2, 3] }, +// { admin_access: true, app_access: true, users: [4, 5, 6] }, +// ]); + +// expect(checkUserLimits).not.toBeCalled(); +// }); +// }); + +// describe('updateOne', () => { +// it('calculates the number of increased admin users', async () => { +// tracker.resetHandlers(); + +// tracker.on +// .select( +// /(select count\("directus_users"\."id"\) as "count", "directus_roles"\."admin_access", "directus_roles"\."app_access" from "directus_users").*(where "directus_roles"\."id" = \?).*/, +// ) +// .responseOnce([{ count: 2, admin_access: true, app_access: true }]); + +// vi.mocked(getRoleCountsByUsers).mockResolvedValue({ admin: 2, app: 0, api: 0 }); +// vi.mocked(getUserCountsByRoles).mockResolvedValue({ admin: 11, app: 0, api: 0 }); + +// await service.updateOne(randomUUID(), { +// admin_access: true, +// app_access: true, +// users: [1, 2, 3, 4, 5], +// }); + +// expect(checkUserLimits).toBeCalledWith(db, { admin: 3, app: 0, api: 0 }, []); +// }); + +// it('calculates the number of increased admin users with access change', async () => { +// tracker.resetHandlers(); + +// tracker.on +// .select( +// /(select count\("directus_users"\."id"\) as "count", "directus_roles"\."admin_access", "directus_roles"\."app_access" from "directus_users").*(where "directus_roles"\."id" = \?).*/, +// ) +// .responseOnce([{ count: 2, admin_access: false, app_access: true }]); + +// vi.mocked(getRoleCountsByUsers).mockResolvedValue({ admin: 2, app: 0, api: 0 }); +// vi.mocked(getUserCountsByRoles).mockResolvedValue({ admin: 11, app: 0, api: 0 }); + +// await service.updateOne(randomUUID(), { +// admin_access: true, +// app_access: true, +// users: [1, 2, 3, 4, 5], +// }); + +// expect(checkUserLimits).toBeCalledWith(db, { admin: 5, app: 0, api: 0 }, []); +// }); + +// it('calculates the number of increased app users', async () => { +// tracker.resetHandlers(); + +// tracker.on +// .select( +// /(select count\("directus_users"\."id"\) as "count", "directus_roles"\."admin_access", "directus_roles"\."app_access" from "directus_users").*(where "directus_roles"\."id" = \?).*/, +// ) +// .responseOnce([{ count: 2, admin_access: false, app_access: true }]); + +// vi.mocked(getUserCountsByRoles).mockResolvedValue({ admin: 0, app: 22, api: 0 }); + +// await service.updateOne(randomUUID(), { +// admin_access: false, +// app_access: true, +// users: [1, 2, 3, 4, 5], +// }); + +// expect(checkUserLimits).toBeCalledWith(db, { admin: 0, app: 3, api: 0 }, []); +// }); + +// it('calculates the number of increased app users with access change', async () => { +// tracker.resetHandlers(); + +// tracker.on +// .select( +// /(select count\("directus_users"\."id"\) as "count", "directus_roles"\."admin_access", "directus_roles"\."app_access" from "directus_users").*(where "directus_roles"\."id" = \?).*/, +// ) +// .responseOnce([{ count: 2, admin_access: false, app_access: false }]); + +// vi.mocked(getRoleCountsByUsers).mockResolvedValue({ admin: 0, app: 2, api: 0 }); +// vi.mocked(getUserCountsByRoles).mockResolvedValue({ admin: 0, app: 22, api: 0 }); + +// await service.updateOne(randomUUID(), { +// admin_access: false, +// app_access: true, +// users: [1, 2, 3, 4, 5], +// }); + +// expect(checkUserLimits).toBeCalledWith(db, { admin: 0, app: 5, api: 0 }, []); +// }); + +// it('calculates the number of increased api users', async () => { +// tracker.resetHandlers(); + +// tracker.on +// .select( +// /(select count\("directus_users"\."id"\) as "count", "directus_roles"\."admin_access", "directus_roles"\."app_access" from "directus_users").*(where "directus_roles"\."id" = \?).*/, +// ) +// .responseOnce([{ count: 2, admin_access: false, app_access: false }]); + +// vi.mocked(getRoleCountsByUsers).mockResolvedValue({ admin: 0, app: 0, api: 2 }); +// vi.mocked(getUserCountsByRoles).mockResolvedValue({ admin: 0, app: 0, api: 33 }); + +// await service.updateOne(randomUUID(), { +// admin_access: false, +// app_access: false, +// users: [1, 2, 3, 4, 5], +// }); + +// expect(checkUserLimits).toBeCalledWith(db, { admin: 0, app: 0, api: 3 }, []); +// }); + +// it('calculates the number of increased api users with access change', async () => { +// tracker.resetHandlers(); + +// tracker.on +// .select( +// /(select count\("directus_users"\."id"\) as "count", "directus_roles"\."admin_access", "directus_roles"\."app_access" from "directus_users").*(where "directus_roles"\."id" = \?).*/, +// ) +// .responseOnce([{ count: 2, admin_access: false, app_access: true }]); + +// vi.mocked(getRoleCountsByUsers).mockResolvedValue({ admin: 0, app: 0, api: 2 }); +// vi.mocked(getUserCountsByRoles).mockResolvedValue({ admin: 0, app: 0, api: 33 }); + +// await service.updateOne(randomUUID(), { +// admin_access: false, +// app_access: false, +// users: [1, 2, 3, 4, 5], +// }); + +// expect(checkUserLimits).toBeCalledWith(db, { admin: 0, app: 0, api: 5 }, []); +// }); + +// it('skips user limits check when no limit is set', async () => { +// vi.mocked(shouldCheckUserLimits).mockReturnValue(false); + +// await service.updateOne(randomUUID(), { +// admin_access: false, +// app_access: true, +// users: [1, 2, 3, 4, 5], +// }); + +// expect(checkUserLimits).not.toBeCalled(); +// }); +// }); + +// describe('updateMany', () => { +// it('calculates the number of increased admin users', async () => { +// vi.mocked(getUserCountsByRoles).mockResolvedValue({ admin: 11, app: 22, api: 33 }); + +// await service.updateMany([randomUUID(), randomUUID(), randomUUID()], { +// admin_access: true, +// app_access: true, +// }); + +// expect(checkUserLimits).toBeCalledWith(db, { admin: 55, app: 0, api: 0 }); +// }); + +// it('calculates the number of increased app users', async () => { +// vi.mocked(getUserCountsByRoles).mockResolvedValue({ admin: 11, app: 22, api: 33 }); + +// await service.updateMany([randomUUID(), randomUUID(), randomUUID()], { +// admin_access: false, +// app_access: true, +// }); + +// expect(checkUserLimits).toBeCalledWith(db, { admin: 0, app: 44, api: 0 }); +// }); + +// it('calculates the number of increased api users', async () => { +// vi.mocked(getUserCountsByRoles).mockResolvedValue({ admin: 11, app: 22, api: 33 }); + +// await service.updateMany([randomUUID(), randomUUID(), randomUUID()], { +// admin_access: false, +// app_access: false, +// }); + +// expect(checkUserLimits).toBeCalledWith(db, { admin: 0, app: 0, api: 33 }); +// }); + +// it('skips user limits check when no limit is set', async () => { +// vi.mocked(shouldCheckUserLimits).mockReturnValue(false); + +// await service.updateMany([randomUUID(), randomUUID(), randomUUID()], { +// admin_access: false, +// app_access: true, +// }); + +// expect(checkUserLimits).not.toBeCalled(); +// }); +// }); +// }); +// }); diff --git a/api/src/services/policies.ts b/api/src/services/policies.ts new file mode 100644 index 0000000000..1a7a1623f8 --- /dev/null +++ b/api/src/services/policies.ts @@ -0,0 +1,112 @@ +import { InvalidPayloadError } from '@directus/errors'; +import type { Policy, PrimaryKey } from '@directus/types'; +import { getMatch } from 'ip-matching'; +import { clearSystemCache } from '../cache.js'; +import { clearCache as clearPermissionsCache } from '../permissions/cache.js'; +import type { AbstractServiceOptions, MutationOptions } from '../types/index.js'; +import { UserIntegrityCheckFlag } from '../utils/validate-user-count-integrity.js'; +import { ItemsService } from './items.js'; + +export class PoliciesService extends ItemsService { + constructor(options: AbstractServiceOptions) { + super('directus_policies', options); + } + + private async clearCaches(opts?: MutationOptions) { + await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache }); + + if (this.cache && opts?.autoPurgeCache !== false) { + await this.cache.clear(); + } + } + + private isIpAccessValid(value?: any[] | null): boolean { + if (value === undefined) return false; + if (value === null) return true; + if (Array.isArray(value) && value.length === 0) return true; + + for (const ip of value) { + if (typeof ip !== 'string' || ip.includes('*')) return false; + + try { + const match = getMatch(ip); + if (match.type == 'IPMask') return false; + } catch { + return false; + } + } + + return true; + } + + private assertValidIpAccess(partialItem: Partial): void { + if ('ip_access' in partialItem && !this.isIpAccessValid(partialItem['ip_access'])) { + throw new InvalidPayloadError({ + reason: 'IP Access contains an incorrect value. Valid values are: IP addresses, IP ranges and CIDR blocks', + }); + } + } + + override async createOne(data: Partial, opts: MutationOptions = {}): Promise { + this.assertValidIpAccess(data); + + // A policy has been created, but the attachment to a user/role happens in the AccessService, + // so no need to check user integrity + + const result = await super.createOne(data, opts); + + // TODO is this necessary? Since the attachment should be handled in the AccessService + // A new policy has created, clear the permissions cache + await clearPermissionsCache(); + + return result; + } + + override async updateMany( + keys: PrimaryKey[], + data: Partial, + opts: MutationOptions = {}, + ): Promise { + this.assertValidIpAccess(data); + + if ('admin_access' in data) { + let flags = UserIntegrityCheckFlag.RemainingAdmins; + + if (data['admin_access'] === true) { + // Only need to perform a full user count if the policy allows admin access + flags |= UserIntegrityCheckFlag.All; + } + + opts.userIntegrityCheckFlags = (opts.userIntegrityCheckFlags ?? UserIntegrityCheckFlag.None) | flags; + } + + if ('app_access' in data) { + opts.userIntegrityCheckFlags = + (opts.userIntegrityCheckFlags ?? UserIntegrityCheckFlag.None) | UserIntegrityCheckFlag.UserLimits; + } + + if (opts.userIntegrityCheckFlags) opts.onRequireUserIntegrityCheck?.(opts.userIntegrityCheckFlags); + + const result = await super.updateMany(keys, data, opts); + + if ('admin_access' in data || 'app_access' in data || 'ip_access' in data || 'enforce_tfa' in data) { + // Some relevant properties on policies have been updated, clear the caches + await this.clearCaches(opts); + } + + return result; + } + + override async deleteMany(keys: PrimaryKey[], opts: MutationOptions = {}): Promise { + opts.userIntegrityCheckFlags = UserIntegrityCheckFlag.All; + opts.onRequireUserIntegrityCheck?.(opts.userIntegrityCheckFlags); + + const result = await super.deleteMany(keys, opts); + + // TODO is this necessary? Since the detachment should be handled in the AccessService + // Some policies have been deleted, clear the permissions cache + await this.clearCaches(opts); + + return result; + } +} diff --git a/api/src/services/relations.ts b/api/src/services/relations.ts index a209f3c0e4..f93e5faf29 100644 --- a/api/src/services/relations.ts +++ b/api/src/services/relations.ts @@ -1,3 +1,4 @@ +import { useEnv } from '@directus/env'; import { ForbiddenError, InvalidPayloadError } from '@directus/errors'; import type { ForeignKey, SchemaInspector } from '@directus/schema'; import { createInspector } from '@directus/schema'; @@ -11,19 +12,19 @@ import type { Helpers } from '../database/helpers/index.js'; import { getHelpers } from '../database/helpers/index.js'; import getDatabase, { getSchemaInspector } from '../database/index.js'; import emitter from '../emitter.js'; +import { fetchAllowedFieldMap } from '../permissions/modules/fetch-allowed-field-map/fetch-allowed-field-map.js'; +import { fetchAllowedFields } from '../permissions/modules/fetch-allowed-fields/fetch-allowed-fields.js'; +import { validateAccess } from '../permissions/modules/validate-access/validate-access.js'; import type { AbstractServiceOptions, ActionEventParams, MutationOptions } from '../types/index.js'; import { getDefaultIndexName } from '../utils/get-default-index-name.js'; import { getSchema } from '../utils/get-schema.js'; import { transaction } from '../utils/transaction.js'; import { ItemsService, type QueryOptions } from './items.js'; -import { PermissionsService } from './permissions/index.js'; -import { useEnv } from '@directus/env'; const env = useEnv(); export class RelationsService { knex: Knex; - permissionsService: PermissionsService; schemaInspector: SchemaInspector; accountability: Accountability | null; schema: SchemaOverview; @@ -34,7 +35,6 @@ export class RelationsService { constructor(options: AbstractServiceOptions) { this.knex = options.knex || getDatabase(); - this.permissionsService = new PermissionsService(options); this.schemaInspector = options.knex ? createInspector(options.knex) : getSchemaInspector(); this.schema = options.schema; this.accountability = options.accountability || null; @@ -78,8 +78,18 @@ export class RelationsService { } async readAll(collection?: string, opts?: QueryOptions): Promise { - if (this.accountability && this.accountability.admin !== true && this.hasReadAccess === false) { - throw new ForbiddenError(); + if (this.accountability) { + await validateAccess( + { + accountability: this.accountability, + action: 'read', + collection: 'directus_relations', + }, + { + knex: this.knex, + schema: this.schema, + }, + ); } const metaReadQuery: Query = { @@ -109,20 +119,26 @@ export class RelationsService { async readOne(collection: string, field: string): Promise { if (this.accountability && this.accountability.admin !== true) { - if (this.hasReadAccess === false) { + await validateAccess( + { + accountability: this.accountability, + action: 'read', + collection: 'directus_relations', + }, + { + schema: this.schema, + knex: this.knex, + }, + ); + + const allowedFields = await fetchAllowedFields( + { collection, action: 'read', accountability: this.accountability }, + { schema: this.schema, knex: this.knex }, + ); + + if (allowedFields.includes('*') === false && allowedFields.includes(field) === false) { throw new ForbiddenError(); } - - const permissions = this.accountability.permissions?.find((permission) => { - return permission.action === 'read' && permission.collection === collection; - }); - - if (!permissions || !permissions.fields) throw new ForbiddenError(); - - if (permissions.fields.includes('*') === false) { - const allowedFields = permissions.fields; - if (allowedFields.includes(field) === false) throw new ForbiddenError(); - } } const metaRow = await this.relationsItemService.readByQuery({ @@ -489,15 +505,6 @@ export class RelationsService { } } - /** - * Whether or not the current user has read access to relations - */ - private get hasReadAccess() { - return !!this.accountability?.permissions?.find((permission) => { - return permission.collection === 'directus_relations' && permission.action === 'read'; - }); - } - /** * Combine raw schema foreign key information with Directus relations meta rows to form final * Relation objects @@ -548,14 +555,15 @@ export class RelationsService { private async filterForbidden(relations: Relation[]): Promise { if (this.accountability === null || this.accountability?.admin === true) return relations; - const allowedCollections = - this.accountability.permissions - ?.filter((permission) => { - return permission.action === 'read'; - }) - .map(({ collection }) => collection) ?? []; + const allowedFields = await fetchAllowedFieldMap( + { + accountability: this.accountability, + action: 'read', + }, + { schema: this.schema, knex: this.knex }, + ); - const allowedFields = this.permissionsService.getAllowedFields('read'); + const allowedCollections = Object.keys(allowedFields); relations = toArray(relations); diff --git a/api/src/services/roles.test.ts b/api/src/services/roles.test.ts index 09d2194877..a0a13570a9 100644 --- a/api/src/services/roles.test.ts +++ b/api/src/services/roles.test.ts @@ -1,45 +1,17 @@ -import { ForbiddenError, UnprocessableContentError } from '@directus/errors'; import { randomUUID } from '@directus/random'; import type { SchemaOverview } from '@directus/types'; -import type { Knex } from 'knex'; import knex from 'knex'; -import { MockClient, Tracker, createTracker, type RawQuery } from 'knex-mock-client'; -import { - afterEach, - beforeAll, - beforeEach, - describe, - expect, - it, - vi, - type MockInstance, - type MockedFunction, -} from 'vitest'; +import { MockClient, createTracker } from 'knex-mock-client'; +import { afterEach, describe, expect, it, vi } from 'vitest'; +import type { MutationOptions } from '../types/items.js'; +import { UserIntegrityCheckFlag } from '../utils/validate-user-count-integrity.js'; +import { AccessService, ItemsService, PresetsService, RolesService, UsersService } from './index.js'; -import { checkIncreasedUserLimits } from '../telemetry/utils/check-increased-user-limits.js'; -import { getRoleCountsByUsers } from '../telemetry/utils/get-role-counts-by-users.js'; -import { getUserCountsByRoles } from '../telemetry/utils/get-user-counts-by-roles.js'; -import { shouldCheckUserLimits } from '../telemetry/utils/should-check-user-limits.js'; -import { ItemsService, PermissionsService, PresetsService, RolesService, UsersService } from './index.js'; - -vi.mock('../../src/database/index', () => { - return { __esModule: true, default: vi.fn(), getDatabaseClient: vi.fn().mockReturnValue('postgres') }; -}); - -vi.mock('@directus/env', () => ({ - useEnv: vi.fn().mockReturnValue({ - EMAIL_TEMPLATES_PATH: './templates', - USERS_ADMIN_ACCESS_LIMIT: 3, - USERS_APP_ACCESS_LIMIT: 3, - USERS_API_ACCESS_LIMIT: 3, - }), +vi.mock('../../src/database/index', () => ({ + default: vi.fn(), + getDatabaseClient: vi.fn().mockReturnValue('postgres'), })); -vi.mock('../telemetry/utils/check-increased-user-limits.js'); -vi.mock('../telemetry/utils/get-role-counts-by-users.js'); -vi.mock('../telemetry/utils/get-user-counts-by-roles.js'); -vi.mock('../telemetry/utils/should-check-user-limits.js'); - const testSchema = { collections: { directus_roles: { @@ -71,1073 +43,129 @@ const testSchema = { } as SchemaOverview; describe('Integration Tests', () => { - let db: MockedFunction; - let tracker: Tracker; - - beforeAll(async () => { - db = vi.mocked(knex.default({ client: MockClient })); - tracker = createTracker(db); - }); - - beforeEach(() => { - tracker.on.any('directus_roles').response({}); - - tracker.on - .select(/"directus_roles"."id" from "directus_roles" order by "directus_roles"."id" asc limit .*/) - .response([]); - - tracker.on - .select( - /(select count\("directus_users"\."id"\) as "count", "directus_roles"\."admin_access", "directus_roles"\."app_access" from "directus_users").*(where "directus_roles"\."id" = \?).*/, - ) - .response([{ count: 0, admin_access: true, app_access: true }]); - - vi.mocked(getRoleCountsByUsers).mockResolvedValue({ admin: 0, api: 0, app: 0 }); - vi.mocked(getUserCountsByRoles).mockResolvedValueOnce({ admin: 0, app: 0, api: 0 }); - vi.mocked(shouldCheckUserLimits).mockReturnValue(true); - }); - - afterEach(() => { - tracker.reset(); - }); - - describe('Services / RolesService', () => { - describe('updateOne', () => { - let service: RolesService; - let superUpdateOne: MockInstance; - const adminRoleId = 'cbfd1e77-b883-4090-93e4-5bcbfbd48aba'; - const userId1 = '07a5fee0-c168-49e2-8e33-4bae280e0c48'; - const userId2 = 'abedf9a4-6956-4a9c-8904-c1aa08a68173'; - - beforeEach(() => { - service = new RolesService({ - knex: db, - schema: testSchema, - }); - - superUpdateOne = vi.spyOn(ItemsService.prototype, 'updateOne'); - }); - - afterEach(() => { - superUpdateOne.mockRestore(); - }); - - describe('checkForOtherAdminUsers', () => { - describe('on an admin role', () => { - const admin_access = true; - - describe('with an array of user ids', () => { - it('having an added user', async () => { - const data: Record = { - users: [userId1, userId2], - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); - - const result = await service.updateOne(adminRoleId, data); - expect(result).toBe(adminRoleId); - expect(superUpdateOne).toHaveBeenCalledOnce(); - }); - - it('having a removed user', async () => { - const data: Record = { - users: [userId1], - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - - tracker.on - .select('select "id" from "directus_users" where "role" = ?') - .responseOnce([{ id: userId1 }, { id: userId2 }]); - - tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); - - const result = await service.updateOne(adminRoleId, data); - expect(result).toBe(adminRoleId); - expect(superUpdateOne).toHaveBeenCalledOnce(); - }); - - it('having a removed last user that is not the last admin of system', async () => { - const data: Record = { - users: [], - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); - tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); - - const result = await service.updateOne(adminRoleId, data); - expect(result).toBe(adminRoleId); - expect(superUpdateOne).toHaveBeenCalledOnce(); - }); - - it('having a removed a last user that is the last admin of system', async () => { - const service = new RolesService({ - knex: db, - schema: testSchema, - accountability: { role: 'test', admin: false }, - }); - - const data: Record = { - users: [], - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); - tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 0 }); - - const promise = service.updateOne(adminRoleId, data); - - expect.assertions(5); // to ensure both assertions in the catch block are reached - - try { - await promise; - } catch (err: any) { - expect(err.message).toBe(`You don't have permission to access this.`); - expect(err).toBeInstanceOf(ForbiddenError); - } - - expect(superUpdateOne).toHaveBeenCalled(); - - expect(superUpdateOne.mock.lastCall![2].preMutationError.message).toBe( - `Can't process content. You can't remove the last admin user from the admin role.`, - ); - - expect(superUpdateOne.mock.lastCall![2].preMutationError).toBeInstanceOf(UnprocessableContentError); - }); - }); - - describe('with an array of user objects', () => { - it('having an added user', async () => { - const data: Record = { - users: [{ id: userId1 }, { id: userId2 }], - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); - - tracker.on - .select('select count(*) as "count" from "directus_users" where "id" in') - .responseOnce({ count: 1 }); - - const result = await service.updateOne(adminRoleId, data); - expect(result).toBe(adminRoleId); - expect(superUpdateOne).toHaveBeenCalledOnce(); - }); - - it('having a removed user', async () => { - const data: Record = { - users: [{ id: userId1 }], - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - - tracker.on - .select('select "id" from "directus_users" where "role" = ?') - .responseOnce([{ id: userId1 }, { id: userId2 }]); - - tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); - - const result = await service.updateOne(adminRoleId, data); - expect(result).toBe(adminRoleId); - expect(superUpdateOne).toHaveBeenCalledOnce(); - }); - - it('having a removed last user that is not the last admin of system', async () => { - const data: Record = { - users: [], - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); - tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); - - const result = await service.updateOne(adminRoleId, data); - expect(result).toBe(adminRoleId); - expect(superUpdateOne).toHaveBeenCalledOnce(); - }); - - it('having a removed a last user that is the last admin of system', async () => { - const service = new RolesService({ - knex: db, - schema: testSchema, - accountability: { role: 'test', admin: false }, - }); - - const data: Record = { - users: [], - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); - tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 0 }); - - const promise = service.updateOne(adminRoleId, data); - - expect.assertions(5); // to ensure both assertions in the catch block are reached - - try { - await promise; - } catch (err: any) { - expect(err.message).toBe(`You don't have permission to access this.`); - expect(err).toBeInstanceOf(ForbiddenError); - } - - expect(superUpdateOne).toHaveBeenCalled(); - - expect(superUpdateOne.mock.lastCall![2].preMutationError.message).toBe( - `Can't process content. You can't remove the last admin user from the admin role.`, - ); - - expect(superUpdateOne.mock.lastCall![2].preMutationError).toBeInstanceOf(UnprocessableContentError); - }); - }); - - describe('with an alterations object', () => { - it('having a newly created user', async () => { - const data: Record = { - users: { - create: [{ name: 'New User' }], - update: [], - delete: [], - }, - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); - - const result = await service.updateOne(adminRoleId, data); - expect(result).toBe(adminRoleId); - expect(superUpdateOne).toHaveBeenCalledOnce(); - }); - - it('having an added user', async () => { - const data: Record = { - users: { - create: [], - update: [{ role: adminRoleId, id: userId2 }], - delete: [], - }, - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); - tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); - - const result = await service.updateOne(adminRoleId, data); - expect(result).toBe(adminRoleId); - expect(superUpdateOne).toHaveBeenCalledOnce(); - }); - - it('having a removed user', async () => { - const data: Record = { - users: { - create: [], - update: [], - delete: [userId2], - }, - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - - tracker.on - .select('select "id" from "directus_users" where "role" = ?') - .responseOnce([{ id: userId1 }, { id: userId2 }]); - - tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); - - const result = await service.updateOne(adminRoleId, data); - expect(result).toBe(adminRoleId); - expect(superUpdateOne).toHaveBeenCalledOnce(); - }); - - it('having a removed last user that is not the last admin of system', async () => { - const data: Record = { - users: { - create: [], - update: [], - delete: [userId1], - }, - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); - tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); - - const result = await service.updateOne(adminRoleId, data); - expect(result).toBe(adminRoleId); - expect(superUpdateOne).toHaveBeenCalledOnce(); - }); - - it('having a removed a last user that is the last admin of system', async () => { - const service = new RolesService({ - knex: db, - schema: testSchema, - accountability: { role: 'test', admin: false }, - }); - - const data: Record = { - users: { - create: [], - update: [], - delete: [userId1], - }, - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); - tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 0 }); - - const promise = service.updateOne(adminRoleId, data); - - expect.assertions(5); // to ensure both assertions in the catch block are reached - - try { - await promise; - } catch (err: any) { - expect(err.message).toBe(`You don't have permission to access this.`); - expect(err).toBeInstanceOf(ForbiddenError); - } - - expect(superUpdateOne).toHaveBeenCalled(); - - expect(superUpdateOne.mock.lastCall![2].preMutationError.message).toBe( - `Can't process content. You can't remove the last admin user from the admin role.`, - ); - - expect(superUpdateOne.mock.lastCall![2].preMutationError).toBeInstanceOf(UnprocessableContentError); - }); - }); - }); - - describe('on an non-admin role', () => { - const admin_access = false; - - describe('with an array of user ids', () => { - it('having an added user', async () => { - const data: Record = { - users: [userId1, userId2], - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); - - tracker.on - .select( - ({ sql, bindings }: RawQuery) => - sql.startsWith('select count(*) as "count" from "directus_users"') && bindings.includes(userId2), - ) - .response({ count: 1 }); - - const result = await service.updateOne(adminRoleId, data); - - expect(result).toBe(adminRoleId); - expect(superUpdateOne).toHaveBeenCalledOnce(); - }); - - it('having an added user that is the last admin', async () => { - const service = new RolesService({ - knex: db, - schema: testSchema, - accountability: { role: 'test', admin: false }, - }); - - const data: Record = { - users: [userId1, userId2], - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); - tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 0 }); - - const promise = service.updateOne(adminRoleId, data); - - expect.assertions(5); // to ensure both assertions in the catch block are reached - - try { - await promise; - } catch (err: any) { - expect(err.message).toBe(`You don't have permission to access this.`); - expect(err).toBeInstanceOf(ForbiddenError); - } - - expect(superUpdateOne).toHaveBeenCalled(); - - expect(superUpdateOne.mock.lastCall![2].preMutationError.message).toBe( - `Can't process content. You can't remove the last admin user from the admin role.`, - ); - - expect(superUpdateOne.mock.lastCall![2].preMutationError).toBeInstanceOf(UnprocessableContentError); - }); - - it('having a removed user', async () => { - const data: Record = { - users: [userId1], - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - - tracker.on - .select('select "id" from "directus_users" where "role" = ?') - .responseOnce([{ id: userId1 }, { id: userId2 }]); - - tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); - - const result = await service.updateOne(adminRoleId, data); - expect(result).toBe(adminRoleId); - expect(superUpdateOne).toHaveBeenCalledOnce(); - }); - }); - - describe('with an array of user objects', () => { - it('having an added user', async () => { - const data: Record = { - users: [{ id: userId1 }, { id: userId2 }], - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); - tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); - - const result = await service.updateOne(adminRoleId, data); - expect(result).toBe(adminRoleId); - expect(superUpdateOne).toHaveBeenCalledOnce(); - }); - - it('having an added user that is the last admin', async () => { - const service = new RolesService({ - knex: db, - schema: testSchema, - accountability: { role: 'test', admin: false }, - }); - - const data: Record = { - users: [{ id: userId1 }, { id: userId2 }], - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); - tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 0 }); - - const promise = service.updateOne(adminRoleId, data); - - expect.assertions(5); // to ensure both assertions in the catch block are reached - - try { - await promise; - } catch (err: any) { - expect(err.message).toBe(`You don't have permission to access this.`); - expect(err).toBeInstanceOf(ForbiddenError); - } - - expect(superUpdateOne).toHaveBeenCalled(); - - expect(superUpdateOne.mock.lastCall![2].preMutationError.message).toBe( - `Can't process content. You can't remove the last admin user from the admin role.`, - ); - - expect(superUpdateOne.mock.lastCall![2].preMutationError).toBeInstanceOf(UnprocessableContentError); - }); - - it('having a removed user', async () => { - const data: Record = { - users: [{ id: userId1 }], - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - - tracker.on - .select('select "id" from "directus_users" where "role" = ?') - .responseOnce([{ id: userId1 }, { id: userId2 }]); - - tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); - - const result = await service.updateOne(adminRoleId, data); - expect(result).toBe(adminRoleId); - expect(superUpdateOne).toHaveBeenCalledOnce(); - }); - }); - - describe('with an alterations object', () => { - it('having a newly created user', async () => { - const data: Record = { - users: { - create: [{ name: 'New User' }], - update: [], - delete: [], - }, - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); - tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); - - const result = await service.updateOne(adminRoleId, data); - expect(result).toBe(adminRoleId); - expect(superUpdateOne).toHaveBeenCalledOnce(); - }); - - it('having an added user', async () => { - const data: Record = { - users: { - create: [], - update: [{ role: adminRoleId, id: userId2 }], - delete: [], - }, - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); - tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); - - const result = await service.updateOne(adminRoleId, data); - expect(result).toBe(adminRoleId); - expect(superUpdateOne).toHaveBeenCalledOnce(); - }); - - it('having an added user that is the last admin', async () => { - const service = new RolesService({ - knex: db, - schema: testSchema, - accountability: { role: 'test', admin: false }, - }); - - const data: Record = { - users: { - create: [], - update: [{ role: adminRoleId, id: userId2 }], - delete: [], - }, - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - tracker.on.select('select "id" from "directus_users" where "role" = ?').responseOnce([{ id: userId1 }]); - tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 0 }); - - const promise = service.updateOne(adminRoleId, data); - - expect.assertions(5); // to ensure both assertions in the catch block are reached - - try { - await promise; - } catch (err: any) { - expect(err.message).toBe(`You don't have permission to access this.`); - expect(err).toBeInstanceOf(ForbiddenError); - } - - expect(superUpdateOne).toHaveBeenCalled(); - - expect(superUpdateOne.mock.lastCall![2].preMutationError.message).toBe( - `Can't process content. You can't remove the last admin user from the admin role.`, - ); - - expect(superUpdateOne.mock.lastCall![2].preMutationError).toBeInstanceOf(UnprocessableContentError); - }); - - it('having a removed user', async () => { - const data: Record = { - users: { - create: [], - update: [], - delete: [userId2], - }, - }; - - tracker.on.select('select "admin_access" from "directus_roles"').responseOnce({ admin_access }); - - tracker.on - .select('select "id" from "directus_users" where "role" = ?') - .responseOnce([{ id: userId1 }, { id: userId2 }]); - - tracker.on.select('select count(*) as "count" from "directus_users"').responseOnce({ count: 1 }); - - const result = await service.updateOne(adminRoleId, data); - expect(result).toBe(adminRoleId); - expect(superUpdateOne).toHaveBeenCalledOnce(); - }); - }); - }); - }); - }); - }); + const db = vi.mocked(knex.default({ client: MockClient })); + createTracker(db); describe('Services / Roles', () => { - let service: RolesService; - let checkForOtherAdminRolesSpy: MockInstance; - let checkForOtherAdminUsersSpy: MockInstance; - - beforeEach(() => { - service = new RolesService({ - knex: db, - schema: { - collections: { - directus_roles: { - collection: 'directus_roles', - primary: 'id', - singleton: false, - sortField: null, - note: null, - accountability: null, - fields: { - id: { - field: 'id', - defaultValue: null, - nullable: false, - generated: true, - type: 'integer', - dbType: 'integer', - precision: null, - scale: null, - special: [], - note: null, - validation: null, - alias: false, - }, - }, - }, - }, - relations: [], - }, - }); - - vi.spyOn(PermissionsService.prototype, 'deleteByQuery').mockResolvedValueOnce([]); - vi.spyOn(PresetsService.prototype, 'deleteByQuery').mockResolvedValueOnce([]); - vi.spyOn(UsersService.prototype, 'updateByQuery').mockResolvedValueOnce([]); - vi.spyOn(UsersService.prototype, 'deleteByQuery').mockResolvedValueOnce([]); - - // "as any" are needed since these are private methods - checkForOtherAdminRolesSpy = vi - .spyOn(RolesService.prototype as any, 'checkForOtherAdminRoles') - .mockResolvedValue(true); - - checkForOtherAdminUsersSpy = vi - .spyOn(RolesService.prototype as any, 'checkForOtherAdminUsers') - .mockResolvedValue(true); + const service = new RolesService({ + knex: db, + schema: testSchema, }); afterEach(() => { - checkForOtherAdminRolesSpy.mockRestore(); - checkForOtherAdminUsersSpy.mockRestore(); - }); - - describe('createOne', () => { - it('should not checkForOtherAdminRoles', async () => { - await service.createOne({}); - expect(checkForOtherAdminRolesSpy).not.toBeCalled(); - }); - - it('should throw due to invalid ip_access', async () => { - await expect(service.createOne({ ip_access: ['invalid_ip'] })).rejects.toThrow( - 'IP Access contains an incorrect value. Valid values are: IP addresses, IP ranges and CIDR blocks', - ); - }); - }); - - describe('createMany', () => { - it('should not checkForOtherAdminRoles', async () => { - await service.createMany([{}]); - expect(checkForOtherAdminRolesSpy).not.toBeCalled(); - }); - - it('should throw due to invalid ip_access', async () => { - await expect(service.createMany([{ ip_access: ['invalid_ip'] }])).rejects.toThrow( - 'IP Access contains an incorrect value. Valid values are: IP addresses, IP ranges and CIDR blocks', - ); - }); - }); - - describe('updateOne', () => { - it('should not checkForOtherAdminRoles', async () => { - await service.updateOne(1, {}); - expect(checkForOtherAdminRolesSpy).not.toBeCalled(); - }); - - it('should checkForOtherAdminRoles once and not checkForOtherAdminUsersSpy', async () => { - await service.updateOne(1, { admin_access: false }); - expect(checkForOtherAdminRolesSpy).toBeCalledTimes(1); - expect(checkForOtherAdminUsersSpy).not.toBeCalled(); - }); - - it('should checkForOtherAdminRoles and checkForOtherAdminUsersSpy once', async () => { - await service.updateOne(1, { admin_access: false, users: [1] }); - expect(checkForOtherAdminRolesSpy).toBeCalledTimes(1); - expect(checkForOtherAdminUsersSpy).toBeCalledTimes(1); - }); - - it('should throw due to invalid ip_access', async () => { - await expect(service.updateOne(1, { ip_access: ['invalid_ip'] })).rejects.toThrow( - 'IP Access contains an incorrect value. Valid values are: IP addresses, IP ranges and CIDR blocks', - ); - }); + vi.clearAllMocks(); }); describe('updateMany', () => { - it('should not checkForOtherAdminRoles', async () => { - await service.updateMany([1], {}); - expect(checkForOtherAdminRolesSpy).not.toBeCalled(); + vi.spyOn(ItemsService.prototype, 'updateMany').mockResolvedValue([randomUUID()]); + + const validateRoleNestingSpy = vi + .spyOn(RolesService.prototype as any, 'validateRoleNesting') + .mockImplementation(vi.fn()); + + it('should not request user integrity checks if no relevant fields are changed', async () => { + const opts: MutationOptions = {}; + + await service.updateMany([randomUUID()], {}, opts); + + expect(opts.userIntegrityCheckFlags).toBe(undefined); }); - it('should checkForOtherAdminRoles once', async () => { - await service.updateMany([1], { admin_access: false }); - expect(checkForOtherAdminRolesSpy).toBeCalledTimes(1); + it('should request all user integrity checks if parent is changed', async () => { + const opts: MutationOptions = {}; + + await service.updateMany([randomUUID()], { parent: randomUUID() }, opts); + + expect(opts.userIntegrityCheckFlags).toBe(UserIntegrityCheckFlag.All); }); - it('should throw due to invalid ip_access', async () => { - await expect(service.updateMany([1], { ip_access: ['invalid_ip'] })).rejects.toThrow( - 'IP Access contains an incorrect value. Valid values are: IP addresses, IP ranges and CIDR blocks', - ); - }); - }); + it('should validate role nesting if parent is changed', async () => { + const opts: MutationOptions = {}; - describe('updateBatch', () => { - it('should not checkForOtherAdminRoles', async () => { - await service.updateBatch([{ id: 1 }]); - expect(checkForOtherAdminRolesSpy).not.toBeCalled(); + await service.updateMany([randomUUID()], { parent: randomUUID() }, opts); + + expect(validateRoleNestingSpy).toHaveBeenCalled(); }); - it('should checkForOtherAdminRoles once', async () => { - await service.updateBatch([{ id: 1, admin_access: false }]); - expect(checkForOtherAdminRolesSpy).toBeCalledTimes(1); - }); + it('should clear caches if parent is changed', async () => { + const clearCacheSpy = vi.spyOn(RolesService.prototype as any, 'clearCaches'); - it('should throw due to invalid ip_access', async () => { - await expect(service.updateBatch([{ id: 1, ip_access: ['invalid_ip'] }])).rejects.toThrow( - 'IP Access contains an incorrect value. Valid values are: IP addresses, IP ranges and CIDR blocks', - ); - }); - }); + await service.updateMany([randomUUID()], { parent: randomUUID() }); - describe('updateByQuery', () => { - it('should not checkForOtherAdminRoles', async () => { - // mock return value for the following empty query - vi.spyOn(ItemsService.prototype, 'getKeysByQuery').mockResolvedValueOnce([1]); - await service.updateByQuery({}, {}); - expect(checkForOtherAdminRolesSpy).not.toBeCalled(); - }); - - it('should checkForOtherAdminRoles once', async () => { - // mock return value for the following empty query - vi.spyOn(ItemsService.prototype, 'getKeysByQuery').mockResolvedValueOnce([1]); - await service.updateByQuery({}, { admin_access: false }); - expect(checkForOtherAdminRolesSpy).toBeCalledTimes(1); - }); - - it('should throw due to invalid ip_access', async () => { - await expect(service.updateByQuery({}, { ip_access: ['invalid_ip'] })).rejects.toThrow( - 'IP Access contains an incorrect value. Valid values are: IP addresses, IP ranges and CIDR blocks', - ); - }); - }); - - describe('deleteOne', () => { - it('should checkForOtherAdminRoles once', async () => { - await service.deleteOne(1); - expect(checkForOtherAdminRolesSpy).toBeCalledTimes(1); + expect(clearCacheSpy).toHaveBeenCalled(); }); }); describe('deleteMany', () => { - it('should checkForOtherAdminRoles once', async () => { - await service.deleteMany([1]); - expect(checkForOtherAdminRolesSpy).toBeCalledTimes(1); - }); - }); + db.isTransaction = false; - describe('deleteByQuery', () => { - it('should checkForOtherAdminRoles once', async () => { - // mock return value for the following empty query - vi.spyOn(ItemsService.prototype, 'getKeysByQuery').mockResolvedValueOnce([1]); - await service.deleteByQuery({}); - expect(checkForOtherAdminRolesSpy).toBeCalledTimes(1); - }); - }); - }); + const accessDeleteByQuerySpy = vi + .spyOn(AccessService.prototype, 'deleteByQuery') + .mockResolvedValue([randomUUID()]); - describe('User Limits Tests', () => { - let service: RolesService; - let checkForOtherAdminRolesSpy: MockInstance; - let checkForOtherAdminUsersSpy: MockInstance; + const presetsDeleteByQuerySpy = vi + .spyOn(PresetsService.prototype, 'deleteByQuery') + .mockResolvedValue([randomUUID()]); - beforeEach(() => { - service = new RolesService({ - knex: db, - schema: testSchema, + const usersUpdateByQuerySpy = vi.spyOn(UsersService.prototype, 'updateByQuery').mockResolvedValue([randomUUID()]); + const rolesUpdateByQuerySpy = vi.spyOn(RolesService.prototype, 'updateByQuery').mockResolvedValue([randomUUID()]); + const itemsDeleteManySpy = vi.spyOn(ItemsService.prototype, 'deleteMany').mockResolvedValue([randomUUID()]); + + it('should call associated service methods, with user integrity check flag', async () => { + const keys = [randomUUID()]; + + await service.deleteMany(keys); + + const opts: MutationOptions = { userIntegrityCheckFlags: UserIntegrityCheckFlag.All, bypassLimits: true }; + + expect(accessDeleteByQuerySpy).toHaveBeenCalledWith( + { + filter: { role: { _in: keys } }, + }, + opts, + ); + + expect(presetsDeleteByQuerySpy).toHaveBeenCalledWith( + { + filter: { role: { _in: keys } }, + }, + opts, + ); + + expect(presetsDeleteByQuerySpy).toHaveBeenCalledWith( + { + filter: { role: { _in: keys } }, + }, + opts, + ); + + expect(usersUpdateByQuerySpy).toHaveBeenCalledWith( + { + filter: { role: { _in: keys } }, + }, + { + status: 'suspended', + role: null, + }, + opts, + ); + + expect(rolesUpdateByQuerySpy).toHaveBeenCalledWith( + { + filter: { parent: { _in: keys } }, + }, + { parent: null }, + ); + + expect(itemsDeleteManySpy).toHaveBeenCalledWith(keys, { userIntegrityCheckFlags: UserIntegrityCheckFlag.All }); }); - vi.mocked(checkIncreasedUserLimits).mockReset(); - vi.mocked(getRoleCountsByUsers).mockReset(); - vi.mocked(getUserCountsByRoles).mockReset(); + it('should clear caches', async () => { + const clearCacheSpy = vi.spyOn(RolesService.prototype as any, 'clearCaches'); - // "as any" are needed since these are private methods - checkForOtherAdminRolesSpy = vi - .spyOn(RolesService.prototype as any, 'checkForOtherAdminRoles') - .mockResolvedValueOnce(true); + await service.deleteMany([randomUUID()]); - checkForOtherAdminUsersSpy = vi - .spyOn(RolesService.prototype as any, 'checkForOtherAdminUsers') - .mockResolvedValueOnce(true); - }); - - afterEach(() => { - checkForOtherAdminRolesSpy.mockRestore(); - checkForOtherAdminUsersSpy.mockRestore(); - }); - - describe('createOne', () => { - it('calculates the number of increased admin users', async () => { - await service.createOne({ admin_access: true, app_access: true, users: [1, 2, 3] }); - - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 3, app: 0, api: 0 }, []); - }); - - it('calculates the number of increased app users', async () => { - await service.createOne({ admin_access: false, app_access: true, users: [1, 2, 3] }); - - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 0, app: 3, api: 0 }, []); - }); - - it('calculates the number of increased api users', async () => { - await service.createOne({ admin_access: false, app_access: false, users: [1, 2, 3] }); - - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 0, app: 0, api: 3 }, []); - }); - - it('skips user limits check when no limit is set', async () => { - vi.mocked(shouldCheckUserLimits).mockReturnValue(false); - - await service.createOne({ admin_access: true, app_access: true, users: [1, 2, 3] }); - - expect(checkIncreasedUserLimits).not.toBeCalled(); - }); - }); - - describe('createMany', () => { - it('calculates the number of increased admin users', async () => { - await service.createMany([ - { admin_access: true, app_access: true, users: [1] }, - { admin_access: true, app_access: true, users: [2, 3] }, - { admin_access: true, app_access: true, users: [4, 5, 6] }, - ]); - - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 6, app: 0, api: 0 }, []); - }); - - it('calculates the number of increased app users', async () => { - await service.createMany([ - { admin_access: false, app_access: true, users: [1] }, - { admin_access: false, app_access: true, users: [2, 3] }, - { admin_access: false, app_access: true, users: [4, 5, 6] }, - ]); - - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 0, app: 6, api: 0 }, []); - }); - - it('calculates the number of increased api users', async () => { - await service.createMany([ - { admin_access: false, app_access: false, users: [1] }, - { admin_access: false, app_access: false, users: [2, 3] }, - { admin_access: false, app_access: false, users: [4, 5, 6] }, - ]); - - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 0, app: 0, api: 6 }, []); - }); - - it('skips user limits check when no limit is set', async () => { - vi.mocked(shouldCheckUserLimits).mockReturnValue(false); - - await service.createMany([ - { admin_access: true, app_access: true, users: [1] }, - { admin_access: true, app_access: true, users: [2, 3] }, - { admin_access: true, app_access: true, users: [4, 5, 6] }, - ]); - - expect(checkIncreasedUserLimits).not.toBeCalled(); - }); - }); - - describe('updateOne', () => { - it('calculates the number of increased admin users', async () => { - tracker.resetHandlers(); - - tracker.on - .select( - /(select count\("directus_users"\."id"\) as "count", "directus_roles"\."admin_access", "directus_roles"\."app_access" from "directus_users").*(where "directus_roles"\."id" = \?).*/, - ) - .responseOnce([{ count: 2, admin_access: true, app_access: true }]); - - vi.mocked(getRoleCountsByUsers).mockResolvedValue({ admin: 2, app: 0, api: 0 }); - vi.mocked(getUserCountsByRoles).mockResolvedValue({ admin: 11, app: 0, api: 0 }); - - await service.updateOne(randomUUID(), { - admin_access: true, - app_access: true, - users: [1, 2, 3, 4, 5], - }); - - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 3, app: 0, api: 0 }, []); - }); - - it('calculates the number of increased admin users with access change', async () => { - tracker.resetHandlers(); - - tracker.on - .select( - /(select count\("directus_users"\."id"\) as "count", "directus_roles"\."admin_access", "directus_roles"\."app_access" from "directus_users").*(where "directus_roles"\."id" = \?).*/, - ) - .responseOnce([{ count: 2, admin_access: false, app_access: true }]); - - vi.mocked(getRoleCountsByUsers).mockResolvedValue({ admin: 2, app: 0, api: 0 }); - vi.mocked(getUserCountsByRoles).mockResolvedValue({ admin: 11, app: 0, api: 0 }); - - await service.updateOne(randomUUID(), { - admin_access: true, - app_access: true, - users: [1, 2, 3, 4, 5], - }); - - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 5, app: 0, api: 0 }, []); - }); - - it('calculates the number of increased app users', async () => { - tracker.resetHandlers(); - - tracker.on - .select( - /(select count\("directus_users"\."id"\) as "count", "directus_roles"\."admin_access", "directus_roles"\."app_access" from "directus_users").*(where "directus_roles"\."id" = \?).*/, - ) - .responseOnce([{ count: 2, admin_access: false, app_access: true }]); - - vi.mocked(getUserCountsByRoles).mockResolvedValue({ admin: 0, app: 22, api: 0 }); - - await service.updateOne(randomUUID(), { - admin_access: false, - app_access: true, - users: [1, 2, 3, 4, 5], - }); - - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 0, app: 3, api: 0 }, []); - }); - - it('calculates the number of increased app users with access change', async () => { - tracker.resetHandlers(); - - tracker.on - .select( - /(select count\("directus_users"\."id"\) as "count", "directus_roles"\."admin_access", "directus_roles"\."app_access" from "directus_users").*(where "directus_roles"\."id" = \?).*/, - ) - .responseOnce([{ count: 2, admin_access: false, app_access: false }]); - - vi.mocked(getRoleCountsByUsers).mockResolvedValue({ admin: 0, app: 2, api: 0 }); - vi.mocked(getUserCountsByRoles).mockResolvedValue({ admin: 0, app: 22, api: 0 }); - - await service.updateOne(randomUUID(), { - admin_access: false, - app_access: true, - users: [1, 2, 3, 4, 5], - }); - - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 0, app: 5, api: 0 }, []); - }); - - it('calculates the number of increased api users', async () => { - tracker.resetHandlers(); - - tracker.on - .select( - /(select count\("directus_users"\."id"\) as "count", "directus_roles"\."admin_access", "directus_roles"\."app_access" from "directus_users").*(where "directus_roles"\."id" = \?).*/, - ) - .responseOnce([{ count: 2, admin_access: false, app_access: false }]); - - vi.mocked(getRoleCountsByUsers).mockResolvedValue({ admin: 0, app: 0, api: 2 }); - vi.mocked(getUserCountsByRoles).mockResolvedValue({ admin: 0, app: 0, api: 33 }); - - await service.updateOne(randomUUID(), { - admin_access: false, - app_access: false, - users: [1, 2, 3, 4, 5], - }); - - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 0, app: 0, api: 3 }, []); - }); - - it('calculates the number of increased api users with access change', async () => { - tracker.resetHandlers(); - - tracker.on - .select( - /(select count\("directus_users"\."id"\) as "count", "directus_roles"\."admin_access", "directus_roles"\."app_access" from "directus_users").*(where "directus_roles"\."id" = \?).*/, - ) - .responseOnce([{ count: 2, admin_access: false, app_access: true }]); - - vi.mocked(getRoleCountsByUsers).mockResolvedValue({ admin: 0, app: 0, api: 2 }); - vi.mocked(getUserCountsByRoles).mockResolvedValue({ admin: 0, app: 0, api: 33 }); - - await service.updateOne(randomUUID(), { - admin_access: false, - app_access: false, - users: [1, 2, 3, 4, 5], - }); - - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 0, app: 0, api: 5 }, []); - }); - - it('skips user limits check when no limit is set', async () => { - vi.mocked(shouldCheckUserLimits).mockReturnValue(false); - - await service.updateOne(randomUUID(), { - admin_access: false, - app_access: true, - users: [1, 2, 3, 4, 5], - }); - - expect(checkIncreasedUserLimits).not.toBeCalled(); - }); - }); - - describe('updateMany', () => { - it('calculates the number of increased admin users', async () => { - vi.mocked(getUserCountsByRoles).mockResolvedValue({ admin: 11, app: 22, api: 33 }); - - await service.updateMany([randomUUID(), randomUUID(), randomUUID()], { - admin_access: true, - app_access: true, - }); - - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 55, app: 0, api: 0 }); - }); - - it('calculates the number of increased app users', async () => { - vi.mocked(getUserCountsByRoles).mockResolvedValue({ admin: 11, app: 22, api: 33 }); - - await service.updateMany([randomUUID(), randomUUID(), randomUUID()], { - admin_access: false, - app_access: true, - }); - - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 0, app: 44, api: 0 }); - }); - - it('calculates the number of increased api users', async () => { - vi.mocked(getUserCountsByRoles).mockResolvedValue({ admin: 11, app: 22, api: 33 }); - - await service.updateMany([randomUUID(), randomUUID(), randomUUID()], { - admin_access: false, - app_access: false, - }); - - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 0, app: 0, api: 33 }); - }); - - it('skips user limits check when no limit is set', async () => { - vi.mocked(shouldCheckUserLimits).mockReturnValue(false); - - await service.updateMany([randomUUID(), randomUUID(), randomUUID()], { - admin_access: false, - app_access: true, - }); - - expect(checkIncreasedUserLimits).not.toBeCalled(); + expect(clearCacheSpy).toHaveBeenCalled(); }); }); }); diff --git a/api/src/services/roles.ts b/api/src/services/roles.ts index 97d67d906b..1576c305fa 100644 --- a/api/src/services/roles.ts +++ b/api/src/services/roles.ts @@ -1,521 +1,69 @@ -import { InvalidPayloadError, UnprocessableContentError } from '@directus/errors'; -import type { Alterations, Item, PrimaryKey, Query, User } from '@directus/types'; -import { getMatch } from 'ip-matching'; -import { omit } from 'lodash-es'; -import { checkIncreasedUserLimits } from '../telemetry/utils/check-increased-user-limits.js'; -import { getRoleCountsByUsers } from '../telemetry/utils/get-role-counts-by-users.js'; -import { type AccessTypeCount } from '../telemetry/utils/get-user-count.js'; -import { getUserCountsByRoles } from '../telemetry/utils/get-user-counts-by-roles.js'; -import { shouldCheckUserLimits } from '../telemetry/utils/should-check-user-limits.js'; +import { InvalidPayloadError } from '@directus/errors'; +import type { Item, PrimaryKey } from '@directus/types'; +import { clearSystemCache } from '../cache.js'; +import { fetchRolesTree } from '../permissions/lib/fetch-roles-tree.js'; import type { AbstractServiceOptions, MutationOptions } from '../types/index.js'; -import { shouldClearCache } from '../utils/should-clear-cache.js'; import { transaction } from '../utils/transaction.js'; +import { UserIntegrityCheckFlag } from '../utils/validate-user-count-integrity.js'; import { ItemsService } from './items.js'; -import { PermissionsService } from './permissions/index.js'; +import { AccessService } from './access.js'; import { PresetsService } from './presets.js'; import { UsersService } from './users.js'; -type RoleCount = { - count: number | string; - admin_access: number | boolean | null; - app_access: number | boolean | null; -}; - export class RolesService extends ItemsService { constructor(options: AbstractServiceOptions) { super('directus_roles', options); } - private async checkForOtherAdminRoles(excludeKeys: PrimaryKey[]): Promise { - // Make sure there's at least one admin role left after this deletion is done - const otherAdminRoles = await this.knex - .count('*', { as: 'count' }) - .from('directus_roles') - .whereNotIn('id', excludeKeys) - .andWhere({ admin_access: true }) - .first(); + // No need to check user integrity in createOne, as the creation of a role itself does not influence the number of + // users, as the role of a user is actually updated in the UsersService on the user, which will make sure to + // initiate a user integrity check if necessary. Same goes for role nesting check as well as cache clearing. - const otherAdminRolesCount = Number(otherAdminRoles?.count ?? 0); - - if (otherAdminRolesCount === 0) { - throw new UnprocessableContentError({ reason: `You can't delete the last admin role` }); - } - } - - private async checkForOtherAdminUsers( - key: PrimaryKey, - users: Alterations | (string | Partial)[], - ): Promise { - const role = await this.knex.select('admin_access').from('directus_roles').where('id', '=', key).first(); - - // No-op if role doesn't exist - if (!role) return; - - const usersBefore = (await this.knex.select('id').from('directus_users').where('role', '=', key)).map( - (user) => user.id, - ); - - const usersAdded: (Partial & Pick)[] = []; - const usersUpdated: (Partial & Pick)[] = []; - const usersCreated: Partial[] = []; - const usersRemoved: string[] = []; - - if (Array.isArray(users)) { - const usersKept: string[] = []; - - for (const user of users) { - if (typeof user === 'string') { - if (usersBefore.includes(user)) { - usersKept.push(user); - } else { - usersAdded.push({ id: user }); - } - } else if (user.id) { - if (usersBefore.includes(user.id)) { - usersKept.push(user.id); - usersUpdated.push(user as Partial & Pick); - } else { - usersAdded.push(user as Partial & Pick); - } - } else { - usersCreated.push(user); - } - } - - usersRemoved.push(...usersBefore.filter((user) => !usersKept.includes(user))); - } else { - for (const user of users.update) { - if (usersBefore.includes(user['id'])) { - usersUpdated.push(user); - } else { - usersAdded.push(user); - } - } - - usersCreated.push(...users.create); - usersRemoved.push(...users.delete); - } - - if (role.admin_access === false || role.admin_access === 0) { - // Admin users might have moved in from other role, thus becoming non-admin - if (usersAdded.length > 0) { - const otherAdminUsers = await this.knex - .count('*', { as: 'count' }) - .from('directus_users') - .leftJoin('directus_roles', 'directus_users.role', 'directus_roles.id') - .whereNotIn( - 'directus_users.id', - usersAdded.map((user) => user.id), - ) - .andWhere({ 'directus_roles.admin_access': true, status: 'active' }) - .first(); - - const otherAdminUsersCount = Number(otherAdminUsers?.count ?? 0); - - if (otherAdminUsersCount === 0) { - throw new UnprocessableContentError({ reason: `You can't remove the last admin user from the admin role` }); - } - } - - return; - } - - // Only added or created new users - if (usersUpdated.length === 0 && usersRemoved.length === 0) return; - - // Active admin user(s) about to be created - if (usersCreated.some((user) => !('status' in user) || user.status === 'active')) return; - - const usersDeactivated = [...usersAdded, ...usersUpdated] - .filter((user) => 'status' in user && user.status !== 'active') - .map((user) => user.id); - - const usersAddedNonDeactivated = usersAdded - .filter((user) => !usersDeactivated.includes(user.id)) - .map((user) => user.id); - - // Active user(s) about to become admin - if (usersAddedNonDeactivated.length > 0) { - const userCount = await this.knex - .count('*', { as: 'count' }) - .from('directus_users') - .whereIn('id', usersAddedNonDeactivated) - .andWhere({ status: 'active' }) - .first(); - - if (Number(userCount?.count ?? 0) > 0) { - return; - } - } - - const otherAdminUsers = await this.knex - .count('*', { as: 'count' }) - .from('directus_users') - .leftJoin('directus_roles', 'directus_users.role', 'directus_roles.id') - .whereNotIn('directus_users.id', [...usersDeactivated, ...usersRemoved]) - .andWhere({ 'directus_roles.admin_access': true, status: 'active' }) - .first(); - - const otherAdminUsersCount = Number(otherAdminUsers?.count ?? 0); - - if (otherAdminUsersCount === 0) { - throw new UnprocessableContentError({ reason: `You can't remove the last admin user from the admin role` }); - } - - return; - } - - private isIpAccessValid(value?: any[] | null): boolean { - if (value === undefined) return false; - if (value === null) return true; - if (Array.isArray(value) && value.length === 0) return true; - - for (const ip of value) { - if (typeof ip !== 'string' || ip.includes('*')) return false; - - try { - const match = getMatch(ip); - if (match.type == 'IPMask') return false; - } catch { - return false; - } - } - - return true; - } - - private assertValidIpAccess(partialItem: Partial): void { - if ('ip_access' in partialItem && !this.isIpAccessValid(partialItem['ip_access'])) { - throw new InvalidPayloadError({ - reason: 'IP Access contains an incorrect value. Valid values are: IP addresses, IP ranges and CIDR blocks', - }); - } - } - - private getRoleAccessType(data: Partial) { - if ('admin_access' in data && data['admin_access'] === true) { - return 'admin'; - } else if (('app_access' in data && data['app_access'] === true) || 'app_access' in data === false) { - return 'app'; - } else { - return 'api'; - } - } - - override async createOne(data: Partial, opts?: MutationOptions): Promise { - this.assertValidIpAccess(data); - - if (shouldCheckUserLimits()) { - const increasedCounts: AccessTypeCount = { - admin: 0, - app: 0, - api: 0, - }; - - const existingIds: PrimaryKey[] = []; - - if ('users' in data) { - const type = this.getRoleAccessType(data); - increasedCounts[type] += data['users'].length; - - for (const user of data['users']) { - if (typeof user === 'string') { - existingIds.push(user); - } else if (typeof user === 'object' && 'id' in user) { - existingIds.push(user['id']); - } - } - } - - await checkIncreasedUserLimits(this.knex, increasedCounts, existingIds); - } - - return super.createOne(data, opts); - } - - override async createMany(data: Partial[], opts?: MutationOptions): Promise { - const needsUserLimitCheck = shouldCheckUserLimits(); - - const increasedCounts: AccessTypeCount = { - admin: 0, - app: 0, - api: 0, - }; - - const existingIds: PrimaryKey[] = []; - - for (const partialItem of data) { - this.assertValidIpAccess(partialItem); - - if (needsUserLimitCheck && 'users' in partialItem) { - const type = this.getRoleAccessType(partialItem); - increasedCounts[type] += partialItem['users'].length; - - for (const user of partialItem['users']) { - if (typeof user === 'string') { - existingIds.push(user); - } else if (typeof user === 'object' && 'id' in user) { - existingIds.push(user['id']); - } - } - } - } - - if (needsUserLimitCheck) { - await checkIncreasedUserLimits(this.knex, increasedCounts, existingIds); - } - - return super.createMany(data, opts); - } - - override async updateOne(key: PrimaryKey, data: Partial, opts?: MutationOptions): Promise { - this.assertValidIpAccess(data); - - try { - if ('users' in data) { - await this.checkForOtherAdminUsers(key, data['users']); - } - - if (shouldCheckUserLimits()) { - const increasedCounts: AccessTypeCount = { - admin: 0, - app: 0, - api: 0, - }; - - let increasedUsers = 0; - - const existingIds: PrimaryKey[] = []; - - let existingRole: RoleCount | undefined = await this.knex - .count('directus_users.id', { as: 'count' }) - .select('directus_roles.admin_access', 'directus_roles.app_access') - .from('directus_users') - .where('directus_roles.id', '=', key) - .andWhere('directus_users.status', '=', 'active') - .leftJoin('directus_roles', 'directus_users.role', '=', 'directus_roles.id') - .groupBy('directus_roles.admin_access', 'directus_roles.app_access') - .first(); - - if (!existingRole) { - try { - const role = (await this.knex - .select('admin_access', 'app_access') - .from('directus_roles') - .where('id', '=', key) - .first()) ?? { admin_access: null, app_access: null }; - - existingRole = { count: 0, ...role } as RoleCount; - } catch { - existingRole = { count: 0, admin_access: null, app_access: null } as RoleCount; - } - } - - if ('users' in data) { - const users: Alterations | (string | Partial)[] = data['users']; - - if (Array.isArray(users)) { - increasedUsers = users.length - Number(existingRole.count); - - for (const user of users) { - if (typeof user === 'string') { - existingIds.push(user); - } else if (typeof user === 'object' && 'id' in user) { - existingIds.push(user['id']); - } - } - } else { - increasedUsers += users.create.length; - increasedUsers -= users.delete.length; - - const userIds = []; - - for (const user of users.update) { - if ('status' in user) { - // account for users being activated and deactivated - if (user['status'] === 'active') { - increasedUsers++; - } else { - increasedUsers--; - } - } - - userIds.push(user.id); - } - - try { - const existingCounts = await getRoleCountsByUsers(this.knex, userIds); - - if (existingRole.admin_access) { - increasedUsers += existingCounts.app + existingCounts.api; - } else if (existingRole.app_access) { - increasedUsers += existingCounts.admin + existingCounts.api; - } else { - increasedUsers += existingCounts.admin + existingCounts.app; - } - } catch { - // ignore failed user call - } - } - } - - let isAccessChanged = false; - let accessType: 'admin' | 'app' | 'api' = 'api'; - - if ('app_access' in data) { - if (data['app_access'] === true) { - accessType = 'app'; - - if (!existingRole.app_access) isAccessChanged = true; - } else if (existingRole.app_access) { - isAccessChanged = true; - } - } else if (existingRole.app_access) { - accessType = 'app'; - } - - if ('admin_access' in data) { - if (data['admin_access'] === true) { - accessType = 'admin'; - - if (!existingRole.admin_access) isAccessChanged = true; - } else if (existingRole.admin_access) { - isAccessChanged = true; - } - } else if (existingRole.admin_access) { - accessType = 'admin'; - } - - if (isAccessChanged) { - increasedCounts[accessType] += Number(existingRole.count); - } - - increasedCounts[accessType] += increasedUsers; - - await checkIncreasedUserLimits(this.knex, increasedCounts, existingIds); - } - } catch (err: any) { - (opts || (opts = {})).preMutationError = err; - } - - return super.updateOne(key, data, opts); - } - - override async updateBatch(data: Partial[], opts: MutationOptions = {}): Promise { - for (const partialItem of data) { - this.assertValidIpAccess(partialItem); - } - - const primaryKeyField = this.schema.collections[this.collection]!.primary; - - if (!opts.mutationTracker) { - opts.mutationTracker = this.createMutationTracker(); - } - - const keys: PrimaryKey[] = []; - - try { - await transaction(this.knex, async (trx) => { - const service = new RolesService({ - accountability: this.accountability, - knex: trx, - schema: this.schema, - }); - - for (const item of data) { - const combinedOpts = Object.assign({ autoPurgeCache: false }, opts); - keys.push(await service.updateOne(item[primaryKeyField]!, omit(item, primaryKeyField), combinedOpts)); - } - }); - } finally { - if (shouldClearCache(this.cache, opts, this.collection)) { - await this.cache.clear(); - } - } - - return keys; - } - - override async updateMany(keys: PrimaryKey[], data: Partial, opts?: MutationOptions): Promise { - this.assertValidIpAccess(data); - - try { - if ('admin_access' in data && data['admin_access'] === false) { - await this.checkForOtherAdminRoles(keys); - } - - if (shouldCheckUserLimits() && ('admin_access' in data || 'app_access' in data)) { - const existingCounts: AccessTypeCount = await getUserCountsByRoles(this.knex, keys); - - const increasedCounts: AccessTypeCount = { - admin: 0, - app: 0, - api: 0, - }; - - const type = this.getRoleAccessType(data); - - for (const [existingType, existingCount] of Object.entries(existingCounts)) { - if (existingType === type) continue; - increasedCounts[type] += existingCount; - } - - await checkIncreasedUserLimits(this.knex, increasedCounts); - } - } catch (err: any) { - (opts || (opts = {})).preMutationError = err; - } - - return super.updateMany(keys, data, opts); - } - - override async updateByQuery( - query: Query, + override async updateMany( + keys: PrimaryKey[], data: Partial, - opts?: MutationOptions | undefined, + opts: MutationOptions = {}, ): Promise { - this.assertValidIpAccess(data); + if ('parent' in data) { + // If the parent of a role changed we need to make a full integrity check. + // Anything related to policies will be checked in the AccessService, where the policies are attached to roles + opts.userIntegrityCheckFlags = UserIntegrityCheckFlag.All; + opts.onRequireUserIntegrityCheck?.(opts.userIntegrityCheckFlags); - return super.updateByQuery(query, data, opts); + await this.validateRoleNesting(keys as string[], data['parent']); + } + + const result = await super.updateMany(keys, data, opts); + + // Only clear the permissions cache if the parent role has changed + // If anything policies related has changed, the cache will be cleared in the AccessService as well + if ('parent' in data) { + await this.clearCaches(); + } + + return result; } - override async deleteMany(keys: PrimaryKey[]): Promise { - const opts: MutationOptions = {}; - - try { - await this.checkForOtherAdminRoles(keys); - } catch (err: any) { - opts.preMutationError = err; - } + override async deleteMany(keys: PrimaryKey[], opts: MutationOptions = {}): Promise { + opts.userIntegrityCheckFlags = UserIntegrityCheckFlag.All; + opts.onRequireUserIntegrityCheck?.(opts.userIntegrityCheckFlags); await transaction(this.knex, async (trx) => { - const itemsService = new ItemsService('directus_roles', { + const options: AbstractServiceOptions = { knex: trx, accountability: this.accountability, schema: this.schema, - }); + }; - const permissionsService = new PermissionsService({ - knex: trx, - accountability: this.accountability, - schema: this.schema, - }); - - const presetsService = new PresetsService({ - knex: trx, - accountability: this.accountability, - schema: this.schema, - }); - - const usersService = new UsersService({ - knex: trx, - accountability: this.accountability, - schema: this.schema, - }); + const itemsService = new ItemsService('directus_roles', options); + const rolesService = new RolesService(options); + const accessService = new AccessService(options); + const presetsService = new PresetsService(options); + const usersService = new UsersService(options); // Delete permissions/presets for this role, suspend all remaining users in role - await permissionsService.deleteByQuery( + await accessService.deleteByQuery( { filter: { role: { _in: keys } }, }, @@ -540,9 +88,42 @@ export class RolesService extends ItemsService { { ...opts, bypassLimits: true }, ); + // If the about to be deleted roles are the parent of other roles set those parents to null + // Use a newly created RolesService here that works within the current transaction + await rolesService.updateByQuery( + { + filter: { parent: { _in: keys } }, + }, + { parent: null }, + ); + await itemsService.deleteMany(keys, opts); }); + // Since nested roles could be updated, clear caches + await this.clearCaches(); + return keys; } + + private async validateRoleNesting(ids: string[], parent: string) { + if (ids.includes(parent)) { + throw new InvalidPayloadError({ reason: 'A role cannot be a parent of itself' }); + } + + const roles = await fetchRolesTree(parent, this.knex); + + if (ids.some((id) => roles.includes(id))) { + // The role tree up from the parent already includes this role, so it would create a circular reference + throw new InvalidPayloadError({ reason: 'A role cannot have a parent that is already a descendant of itself' }); + } + } + + private async clearCaches(opts?: MutationOptions) { + await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache }); + + if (this.cache && opts?.autoPurgeCache !== false) { + await this.cache.clear(); + } + } } diff --git a/api/src/services/shares.ts b/api/src/services/shares.ts index 2f69591423..d519237d9f 100644 --- a/api/src/services/shares.ts +++ b/api/src/services/shares.ts @@ -4,6 +4,7 @@ import type { Item, PrimaryKey } from '@directus/types'; import argon2 from 'argon2'; import jwt from 'jsonwebtoken'; import { useLogger } from '../logger/index.js'; +import { validateAccess } from '../permissions/modules/validate-access/validate-access.js'; import type { AbstractServiceOptions, DirectusTokenPayload, @@ -16,7 +17,6 @@ import { getSecret } from '../utils/get-secret.js'; import { md } from '../utils/md.js'; import { Url } from '../utils/url.js'; import { userName } from '../utils/user-name.js'; -import { AuthorizationService } from './authorization.js'; import { ItemsService } from './items.js'; import { MailService } from './mail/index.js'; import { UsersService } from './users.js'; @@ -25,20 +25,26 @@ const env = useEnv(); const logger = useLogger(); export class SharesService extends ItemsService { - authorizationService: AuthorizationService; - constructor(options: AbstractServiceOptions) { super('directus_shares', options); - - this.authorizationService = new AuthorizationService({ - accountability: this.accountability, - knex: this.knex, - schema: this.schema, - }); } override async createOne(data: Partial, opts?: MutationOptions): Promise { - await this.authorizationService.checkAccess('share', data['collection'], data['item']); + if (this.accountability) { + await validateAccess( + { + accountability: this.accountability, + action: 'share', + collection: data['collection'], + primaryKeys: [data['item']], + }, + { + schema: this.schema, + knex: this.knex, + }, + ); + } + return super.createOne(data, opts); } diff --git a/api/src/services/specifications.ts b/api/src/services/specifications.ts index 3f1802e9f1..f5616f4be2 100644 --- a/api/src/services/specifications.ts +++ b/api/src/services/specifications.ts @@ -1,6 +1,7 @@ import { useEnv } from '@directus/env'; import formatTitle from '@directus/format-title'; import { spec } from '@directus/specs'; +import { isSystemCollection } from '@directus/system-data'; import type { Accountability, FieldOverview, Permission, SchemaOverview, Type } from '@directus/types'; import { version } from 'directus/version'; import type { Knex } from 'knex'; @@ -15,11 +16,13 @@ import type { } from 'openapi3-ts/oas30'; import { OAS_REQUIRED_SCHEMAS } from '../constants.js'; import getDatabase from '../database/index.js'; +import { fetchPermissions } from '../permissions/lib/fetch-permissions.js'; +import { fetchPolicies } from '../permissions/lib/fetch-policies.js'; +import { fetchAllowedFieldMap } from '../permissions/modules/fetch-allowed-field-map/fetch-allowed-field-map.js'; import type { AbstractServiceOptions } from '../types/index.js'; import { getRelationType } from '../utils/get-relation-type.js'; import { reduceSchema } from '../utils/reduce-schema.js'; import { GraphQLService } from './graphql/index.js'; -import { isSystemCollection } from '@directus/system-data'; const env = useEnv(); @@ -31,13 +34,13 @@ export class SpecificationService { oas: OASSpecsService; graphql: GraphQLSpecsService; - constructor({ accountability, knex, schema }: AbstractServiceOptions) { - this.accountability = accountability || null; - this.knex = knex || getDatabase(); - this.schema = schema; + constructor(options: AbstractServiceOptions) { + this.accountability = options.accountability || null; + this.knex = options.knex || getDatabase(); + this.schema = options.schema; - this.oas = new OASSpecsService({ knex, schema, accountability }); - this.graphql = new GraphQLSpecsService({ knex, schema, accountability }); + this.oas = new OASSpecsService(options); + this.graphql = new GraphQLSpecsService(options); } } @@ -50,20 +53,39 @@ class OASSpecsService implements SpecificationSubService { knex: Knex; schema: SchemaOverview; - constructor({ knex, schema, accountability }: AbstractServiceOptions) { - this.accountability = accountability || null; - this.knex = knex || getDatabase(); + constructor(options: AbstractServiceOptions) { + this.accountability = options.accountability || null; + this.knex = options.knex || getDatabase(); - this.schema = - this.accountability?.admin === true ? schema : reduceSchema(schema, accountability?.permissions || null); + this.schema = options.schema; } async generate(host?: string) { - const permissions = this.accountability?.permissions ?? []; + let schema = this.schema; + let permissions: Permission[] = []; - const tags = await this.generateTags(); + if (this.accountability && this.accountability.admin !== true) { + const allowedFields = await fetchAllowedFieldMap( + { + accountability: this.accountability, + action: 'read', + }, + { schema, knex: this.knex }, + ); + + schema = reduceSchema(schema, allowedFields); + + const policies = await fetchPolicies(this.accountability, { schema, knex: this.knex }); + + permissions = await fetchPermissions( + { action: 'read', policies, accountability: this.accountability }, + { schema, knex: this.knex }, + ); + } + + const tags = await this.generateTags(schema); const paths = await this.generatePaths(permissions, tags); - const components = await this.generateComponents(tags); + const components = await this.generateComponents(schema, tags); const isDefaultPublicUrl = env['PUBLIC_URL'] === '/'; const url = isDefaultPublicUrl && host ? host : (env['PUBLIC_URL'] as string); @@ -91,9 +113,10 @@ class OASSpecsService implements SpecificationSubService { return spec; } - private async generateTags(): Promise { + private async generateTags(schema: SchemaOverview): Promise { const systemTags = cloneDeep(spec.tags)!; - const collections = Object.values(this.schema.collections); + + const collections = Object.values(schema.collections); const tags: OpenAPIObject['tags'] = []; for (const systemTag of systemTags) { @@ -303,7 +326,10 @@ class OASSpecsService implements SpecificationSubService { return paths; } - private async generateComponents(tags: OpenAPIObject['tags']): Promise { + private async generateComponents( + schema: SchemaOverview, + tags: OpenAPIObject['tags'], + ): Promise { if (!tags) return; let components: OpenAPIObject['components'] = cloneDeep(spec.components); @@ -330,7 +356,7 @@ class OASSpecsService implements SpecificationSubService { } } - const collections = Object.values(this.schema.collections); + const collections = Object.values(schema.collections); for (const collection of collections) { const tag = tags.find((tag) => tag['x-collection'] === collection.collection); @@ -351,7 +377,7 @@ class OASSpecsService implements SpecificationSubService { schemaComponent.properties[field.field] = (cloneDeep( (spec.components!.schemas![tag.name] as SchemaObject).properties![field.field], - ) as SchemaObject) || this.generateField(collection.collection, field, tags); + ) as SchemaObject) || this.generateField(schema, collection.collection, field, tags); } components.schemas[tag.name] = schemaComponent; @@ -363,7 +389,7 @@ class OASSpecsService implements SpecificationSubService { }; for (const field of fieldsInCollection) { - schemaComponent.properties![field.field] = this.generateField(collection.collection, field, tags); + schemaComponent.properties![field.field] = this.generateField(schema, collection.collection, field, tags); } components.schemas[tag.name] = schemaComponent; @@ -393,7 +419,12 @@ class OASSpecsService implements SpecificationSubService { } } - private generateField(collection: string, field: FieldOverview, tags: TagObject[]): SchemaObject { + private generateField( + schema: SchemaOverview, + collection: string, + field: FieldOverview, + tags: TagObject[], + ): SchemaObject { let propertyObject: SchemaObject = {}; propertyObject.nullable = field.nullable; @@ -402,7 +433,7 @@ class OASSpecsService implements SpecificationSubService { propertyObject.description = field.note; } - const relation = this.schema.relations.find( + const relation = schema.relations.find( (relation) => (relation.collection === collection && relation.field === field.field) || (relation.related_collection === collection && relation.meta?.one_field === field.field), @@ -426,12 +457,12 @@ class OASSpecsService implements SpecificationSubService { if ( !relatedTag || !relation.related_collection || - relation.related_collection in this.schema.collections === false + relation.related_collection in schema.collections === false ) { return propertyObject; } - const relatedCollection = this.schema.collections[relation.related_collection]!; + const relatedCollection = schema.collections[relation.related_collection]!; const relatedPrimaryKeyField = relatedCollection.fields[relatedCollection.primary]!; propertyObject.oneOf = [ @@ -445,11 +476,11 @@ class OASSpecsService implements SpecificationSubService { } else if (relationType === 'o2m') { const relatedTag = tags.find((tag) => tag['x-collection'] === relation.collection); - if (!relatedTag || !relation.related_collection || relation.collection in this.schema.collections === false) { + if (!relatedTag || !relation.related_collection || relation.collection in schema.collections === false) { return propertyObject; } - const relatedCollection = this.schema.collections[relation.collection]!; + const relatedCollection = schema.collections[relation.collection]!; const relatedPrimaryKeyField = relatedCollection.fields[relatedCollection.primary]!; if (!relatedTag || !relatedPrimaryKeyField) return propertyObject; diff --git a/api/src/services/users.test.ts b/api/src/services/users.test.ts index 76790a25dd..4748fb0253 100644 --- a/api/src/services/users.test.ts +++ b/api/src/services/users.test.ts @@ -1,24 +1,13 @@ -import { ForbiddenError, InvalidPayloadError, RecordNotUniqueError } from '@directus/errors'; -import type { SchemaOverview } from '@directus/types'; -import { randomUUID } from 'crypto'; +import { InvalidPayloadError, RecordNotUniqueError } from '@directus/errors'; +import type { Accountability, SchemaOverview } from '@directus/types'; import knex, { type Knex } from 'knex'; import { MockClient, Tracker, createTracker } from 'knex-mock-client'; -import { - afterEach, - beforeAll, - beforeEach, - describe, - expect, - it, - vi, - type MockInstance, - type MockedFunction, -} from 'vitest'; -import { checkIncreasedUserLimits } from '../telemetry/utils/check-increased-user-limits.js'; -import { getRoleCountsByRoles } from '../telemetry/utils/get-role-counts-by-roles.js'; -import { getRoleCountsByUsers } from '../telemetry/utils/get-role-counts-by-users.js'; -import { shouldCheckUserLimits } from '../telemetry/utils/should-check-user-limits.js'; +import { afterEach, beforeAll, beforeEach, describe, expect, it, vi, type MockedFunction } from 'vitest'; +import { validateRemainingAdminUsers } from '../permissions/modules/validate-remaining-admin/validate-remaining-admin-users.js'; +import type { MutationOptions } from '../types/items.js'; +import { UserIntegrityCheckFlag } from '../utils/validate-user-count-integrity.js'; import { ItemsService, MailService, UsersService } from './index.js'; +import { randomUUID } from '@directus/random'; vi.mock('../../src/database/index', () => ({ default: vi.fn(), @@ -41,10 +30,7 @@ vi.mock('@directus/env', () => ({ }), })); -vi.mock('../telemetry/utils/check-increased-user-limits.js'); -vi.mock('../telemetry/utils/get-role-counts-by-roles.js'); -vi.mock('../telemetry/utils/get-role-counts-by-users.js'); -vi.mock('../telemetry/utils/should-check-user-limits.js'); +vi.mock('../permissions/modules/validate-remaining-admin/validate-remaining-admin-users.js'); const testRoleId = '4ccdb196-14b3-4ed1-b9da-c1978be07ca2'; @@ -63,8 +49,8 @@ const testSchema = { defaultValue: null, nullable: false, generated: true, - type: 'integer', - dbType: 'integer', + type: 'uuid', + dbType: 'uuid', precision: null, scale: null, special: [], @@ -79,110 +65,30 @@ const testSchema = { } as SchemaOverview; describe('Integration Tests', () => { - let db: MockedFunction; - let tracker: Tracker; - - beforeAll(async () => { - db = vi.mocked(knex.default({ client: MockClient })); - tracker = createTracker(db); - }); - - beforeEach(() => { - tracker.on.any('directus_users').response({}); - - // mock notifications update query in deleteOne/deleteMany/deleteByQuery methods - tracker.on.update('directus_notifications').response({}); - - // mock versions update query in deleteOne/deleteMany/deleteByQuery methods - tracker.on.update('directus_versions').response({}); - - // mock user counts in updateOne/updateMany/updateByQuery methods - tracker.on - .select( - /(select count\("directus_users"\."id"\) as "count", "directus_roles"\."admin_access", "directus_roles"\."app_access" from "directus_users").*(group by "directus_roles"\."admin_access", "directus_roles"\."app_access")/, - ) - .response([{ count: 0, admin_access: true, app_access: true }]); - - vi.mocked(getRoleCountsByRoles).mockResolvedValueOnce({ admin: 0, app: 0, api: 0 }); - vi.mocked(getRoleCountsByUsers).mockResolvedValue({ admin: 0, api: 0, app: 0 }); - vi.mocked(shouldCheckUserLimits).mockResolvedValue(true); - }); + const db = vi.mocked(knex.default({ client: MockClient })); + const tracker = createTracker(db); afterEach(() => { tracker.reset(); }); describe('Services / Users', () => { - let service: UsersService; - let mailService: MailService; - let superCreateOneSpy: MockInstance; - let superUpdateManySpy: MockInstance; - let checkUniqueEmailsSpy: MockInstance; - let checkPasswordPolicySpy: MockInstance; - let checkRemainingAdminExistenceSpy: MockInstance; - let checkRemainingActiveAdminSpy: MockInstance; - - beforeEach(() => { - service = new UsersService({ - knex: db, - schema: { - collections: { - directus_users: { - collection: 'directus_users', - primary: 'id', - singleton: false, - sortField: null, - note: null, - accountability: null, - fields: { - id: { - field: 'id', - defaultValue: null, - nullable: false, - generated: true, - type: 'integer', - dbType: 'integer', - precision: null, - scale: null, - special: [], - note: null, - validation: null, - alias: false, - }, - }, - }, - }, - relations: [], - }, - }); - - superCreateOneSpy = vi.spyOn(ItemsService.prototype as any, 'createOne'); - superUpdateManySpy = vi.spyOn(ItemsService.prototype as any, 'updateMany'); - - // "as any" are needed since these are private methods - checkUniqueEmailsSpy = vi - .spyOn(UsersService.prototype as any, 'checkUniqueEmails') - .mockImplementation(() => vi.fn()); - - checkPasswordPolicySpy = vi - .spyOn(UsersService.prototype as any, 'checkPasswordPolicy') - .mockResolvedValue(() => vi.fn()); - - checkRemainingAdminExistenceSpy = vi - .spyOn(UsersService.prototype as any, 'checkRemainingAdminExistence') - .mockResolvedValue(() => vi.fn()); - - checkRemainingActiveAdminSpy = vi - .spyOn(UsersService.prototype as any, 'checkRemainingActiveAdmin') - .mockResolvedValue(() => vi.fn()); - - vi.spyOn(UsersService.prototype as any, 'inviteUrl').mockImplementation(() => vi.fn()); - - mailService = new MailService({ - schema: testSchema, - }); + const service = new UsersService({ + knex: db, + schema: testSchema, }); + const superCreateOneSpy = vi.spyOn(ItemsService.prototype, 'createOne').mockResolvedValue(randomUUID()); + const superUpdateManySpy = vi.spyOn(ItemsService.prototype, 'updateMany').mockResolvedValue([randomUUID()]); + + const checkUniqueEmailsSpy = vi + .spyOn(UsersService.prototype as any, 'checkUniqueEmails') + .mockImplementation(() => vi.fn()); + + const checkPasswordPolicySpy = vi + .spyOn(UsersService.prototype as any, 'checkPasswordPolicy') + .mockResolvedValue(() => vi.fn()); + afterEach(() => { vi.clearAllMocks(); }); @@ -190,618 +96,230 @@ describe('Integration Tests', () => { describe('createOne', () => { it('should not checkUniqueEmails', async () => { await service.createOne({}); + expect(checkUniqueEmailsSpy).not.toBeCalled(); }); it('should checkUniqueEmails once', async () => { await service.createOne({ email: 'test@example.com' }); + expect(checkUniqueEmailsSpy).toBeCalledTimes(1); }); it('should not checkPasswordPolicy', async () => { await service.createOne({}); + expect(checkPasswordPolicySpy).not.toBeCalled(); }); it('should checkPasswordPolicy once', async () => { await service.createOne({ password: 'testpassword' }); + expect(checkPasswordPolicySpy).toBeCalledTimes(1); }); + + it('should request user limits checks', async () => { + const opts: MutationOptions = {}; + + await service.createOne({}, opts); + + expect(opts.userIntegrityCheckFlags).toBe(UserIntegrityCheckFlag.UserLimits); + }); }); describe('createMany', () => { + vi.spyOn(ItemsService.prototype, 'createMany').mockResolvedValue([1]); + it('should not checkUniqueEmails', async () => { await service.createMany([{}]); + expect(checkUniqueEmailsSpy).not.toBeCalled(); }); it('should checkUniqueEmails once', async () => { await service.createMany([{ email: 'test@example.com' }]); - expect(checkUniqueEmailsSpy).toBeCalledTimes(2); + + expect(checkUniqueEmailsSpy).toBeCalledTimes(1); }); it('should not checkPasswordPolicy', async () => { await service.createMany([{}]); + expect(checkPasswordPolicySpy).not.toBeCalled(); }); it('should checkPasswordPolicy once', async () => { await service.createMany([{ password: 'testpassword' }]); - expect(checkPasswordPolicySpy).toBeCalledTimes(2); - }); - it('should process user limits for new roles', async () => { - await service.createMany([{ role: { admin_access: true } }, { role: { app_access: true } }, { role: {} }]); - expect(getRoleCountsByRoles).toBeCalledWith(db, []); - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 1, app: 1, api: 1 }); - }); - - it('should process user limits for existing roles', async () => { - vi.mocked(getRoleCountsByRoles).mockReset(); - vi.mocked(getRoleCountsByRoles).mockResolvedValue({ admin: 1, app: 2, api: 3 }); - await service.createMany([{ role: randomUUID() }, { role: randomUUID() }, { role: randomUUID() }]); - expect(getRoleCountsByRoles).toBeCalledWith(db, expect.any(Array)); - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 1, app: 2, api: 3 }); - }); - - it('should process user limits for new and existing roles', async () => { - vi.mocked(getRoleCountsByRoles).mockReset(); - vi.mocked(getRoleCountsByRoles).mockResolvedValue({ admin: 1, app: 2, api: 3 }); - - await service.createMany([ - { role: randomUUID() }, - { role: randomUUID() }, - { role: randomUUID() }, - { role: { admin_access: true } }, - { role: { app_access: true } }, - { role: {} }, - ]); - - expect(getRoleCountsByRoles).toBeCalledWith(db, expect.any(Array)); - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 2, app: 3, api: 4 }); - }); - - it('skips user limits check when no limit is set', async () => { - vi.mocked(shouldCheckUserLimits).mockReturnValue(false); - - await service.createMany([{ role: randomUUID() }, { role: randomUUID() }, { role: randomUUID() }]); - - expect(checkIncreasedUserLimits).not.toBeCalled(); - }); - }); - - describe('updateOne', () => { - it('should not checkRemainingAdminExistence', async () => { - // mock newRole query in updateMany (called by ItemsService updateOne) - tracker.on - .select(/select "admin_access", "app_access" from "directus_roles"/) - .response({ admin_access: true, app_access: true }); - - await service.updateOne(1, { role: testRoleId }); - expect(checkRemainingAdminExistenceSpy).not.toBeCalled(); - }); - - it('should checkRemainingAdminExistence once', async () => { - // mock newRole query in updateMany (called by ItemsService updateOne) - tracker.on - .select(/select "admin_access", "app_access" from "directus_roles"/) - .response({ admin_access: false }); - - await service.updateOne(1, { role: testRoleId }); - expect(checkRemainingAdminExistenceSpy).toBeCalledTimes(1); - }); - - it('should not checkRemainingActiveAdmin', async () => { - await service.updateOne(1, {}); - expect(checkRemainingActiveAdminSpy).not.toBeCalled(); - }); - - it('should checkRemainingActiveAdmin once', async () => { - await service.updateOne(1, { status: 'inactive' }); - expect(checkRemainingActiveAdminSpy).toBeCalledTimes(1); - }); - - it('should not checkUniqueEmails', async () => { - await service.updateOne(1, {}); - expect(checkUniqueEmailsSpy).not.toBeCalled(); - }); - - it('should checkUniqueEmails once', async () => { - await service.updateOne(1, { email: 'test@example.com' }); - expect(checkUniqueEmailsSpy).toBeCalledTimes(1); - }); - - it('should not checkPasswordPolicy', async () => { - await service.updateOne(1, {}); - expect(checkPasswordPolicySpy).not.toBeCalled(); - }); - - it('should checkPasswordPolicy once', async () => { - await service.updateOne(1, { password: 'testpassword' }); expect(checkPasswordPolicySpy).toBeCalledTimes(1); }); - it.each(['provider', 'external_identifier'])( - 'should throw InvalidPayloadError for non-admin users when updating "%s" field', - async (field) => { - const service = new UsersService({ - knex: db, - schema: testSchema, - accountability: { role: 'test', admin: false }, - }); + it('should request user limits checks', async () => { + const opts: MutationOptions = {}; - const promise = service.updateOne(1, { [field]: 'test' }); + await service.createMany([{}], opts); - expect.assertions(5); // to ensure both assertions in the catch block are reached - - try { - await promise; - } catch (err: any) { - expect(err.message).toBe(`You don't have permission to access this.`); - expect(err).toBeInstanceOf(ForbiddenError); - } - - expect(superUpdateManySpy).toHaveBeenCalled(); - - expect(superUpdateManySpy.mock.lastCall![2].preMutationError.message).toBe( - `Invalid payload. You can't change the "${field}" value manually.`, - ); - - expect(superUpdateManySpy.mock.lastCall![2].preMutationError).toBeInstanceOf(InvalidPayloadError); - }, - ); - - it.each(['provider', 'external_identifier'])('should allow admin users to update "%s" field', async (field) => { - const service = new UsersService({ - knex: db, - schema: testSchema, - accountability: { role: 'admin', admin: true }, - }); - - const promise = service.updateOne(1, { [field]: 'test' }); - - await expect(promise).resolves.not.toThrow(); - expect(superUpdateManySpy).toBeCalledWith([1], expect.objectContaining({ auth_data: null }), undefined); + expect(opts.userIntegrityCheckFlags).toBe(UserIntegrityCheckFlag.UserLimits); }); - - it.each(['provider', 'external_identifier'])( - 'should allow null accountability to update "%s" field', - async (field) => { - const service = new UsersService({ - knex: db, - schema: testSchema, - }); - - const promise = service.updateOne(1, { [field]: 'test' }); - - await expect(promise).resolves.not.toThrow(); - expect(superUpdateManySpy).toBeCalledWith([1], expect.objectContaining({ auth_data: null }), undefined); - }, - ); }); describe('updateMany', () => { - it('should not checkRemainingAdminExistence', async () => { - // mock newRole query in updateMany - tracker.on - .select(/select "admin_access", "app_access" from "directus_roles"/) - .response({ admin_access: true, app_access: true }); + it('should not request user integrity checks if no relevant fields are changed', async () => { + const opts: MutationOptions = {}; - await service.updateMany([1], { role: testRoleId }); - expect(checkRemainingAdminExistenceSpy).not.toBeCalled(); + await service.updateMany([randomUUID()], {}, opts); + + expect(opts.userIntegrityCheckFlags).toBe(undefined); }); - it('should checkRemainingAdminExistence once', async () => { - // mock newRole query in updateMany - tracker.on - .select(/select "admin_access", "app_access" from "directus_roles"/) - .response({ admin_access: false }); + it('should request all user integrity checks if role is changed', async () => { + const opts: MutationOptions = {}; - await service.updateMany([1], { role: testRoleId }); - expect(checkRemainingAdminExistenceSpy).toBeCalledTimes(1); + await service.updateMany([randomUUID()], { role: testRoleId }, opts); + + expect(opts.userIntegrityCheckFlags).toBe(UserIntegrityCheckFlag.All); }); - it('should checkRemainingAdminExistence once for new non admin role', async () => { - await service.updateMany([1], { role: { name: 'test' } }); - expect(checkRemainingAdminExistenceSpy).toBeCalledTimes(1); + it('should request all user integrity checks if status is changed to not "active"', async () => { + const opts: MutationOptions = {}; + + await service.updateMany([randomUUID()], { status: 'inactive' }, opts); + + expect(opts.userIntegrityCheckFlags).toBe(UserIntegrityCheckFlag.All); }); - it('should not checkRemainingAdminExistence for new admin role', async () => { - await service.updateMany([1], { role: { name: 'test', admin_access: true } }); - expect(checkRemainingAdminExistenceSpy).not.toBeCalled(); + it('should request user limit checks if status is changed to "active"', async () => { + const opts: MutationOptions = {}; + + await service.updateMany([randomUUID()], { status: 'active' }, opts); + + expect(opts.userIntegrityCheckFlags).toBe(UserIntegrityCheckFlag.UserLimits); }); - it('should not checkRemainingActiveAdmin', async () => { - await service.updateMany([1], {}); - expect(checkRemainingActiveAdminSpy).not.toBeCalled(); - }); + it('should clear caches if role is changed', async () => { + const clearCacheSpy = vi.spyOn(UsersService.prototype as any, 'clearCaches'); - it('should checkRemainingActiveAdmin once', async () => { - await service.updateMany([1], { status: 'inactive' }); - expect(checkRemainingActiveAdminSpy).toBeCalledTimes(1); + await service.updateMany([randomUUID()], { role: testRoleId }); + + expect(clearCacheSpy).toHaveBeenCalled(); }); it('should not checkUniqueEmails', async () => { - await service.updateMany([1], {}); + await service.updateMany([randomUUID()], {}); + expect(checkUniqueEmailsSpy).not.toBeCalled(); }); it('should checkUniqueEmails once', async () => { - await service.updateMany([1], { email: 'test@example.com' }); + await service.updateMany([randomUUID()], { email: 'test@example.com' }); + expect(checkUniqueEmailsSpy).toBeCalledTimes(1); }); - it('should throw RecordNotUniqueError for multiple keys with same email', async () => { - expect.assertions(2); // to ensure both assertions in the catch block are reached + it('should disallow updating multiple items to same email', async () => { + const opts: MutationOptions = {}; - try { - await service.updateMany([1, 2], { email: 'test@example.com' }); - } catch (err: any) { - expect(err.message).toBe(`Value for field "email" in collection "directus_users" has to be unique.`); - expect(err).toBeInstanceOf(RecordNotUniqueError); - } + await service.updateMany([randomUUID(), randomUUID()], { email: 'test@example.com' }, opts); + + expect(opts.preMutationError).toStrictEqual( + new RecordNotUniqueError({ + collection: 'directus_users', + field: 'email', + }), + ); }); it('should not checkPasswordPolicy', async () => { - await service.updateMany([1], {}); + await service.updateMany([randomUUID()], {}); + expect(checkPasswordPolicySpy).not.toBeCalled(); }); it('should checkPasswordPolicy once', async () => { - await service.updateMany([1], { password: 'testpassword' }); + await service.updateMany([randomUUID()], { password: 'testpassword' }); + expect(checkPasswordPolicySpy).toBeCalledTimes(1); }); - it.each(['provider', 'external_identifier'])( - 'should throw InvalidPayloadError for non-admin users when updating "%s" field', - async (field) => { + describe('restricted auth fields', () => { + describe('should disallow updates for non-admin users', () => { const service = new UsersService({ knex: db, schema: testSchema, - accountability: { role: 'test', admin: false }, + accountability: { role: 'test', admin: false } as Accountability, }); - const promise = service.updateMany([1], { [field]: 'test' }); + it.each(['tfa_secret', 'provider', 'external_identifier'])('%s', async (field) => { + const opts: MutationOptions = {}; - expect.assertions(5); // to ensure both assertions in the catch block are reached + await service.updateMany([1], { [field]: 'test' }, opts); - try { - await promise; - } catch (err: any) { - expect(err.message).toBe(`You don't have permission to access this.`); - expect(err).toBeInstanceOf(ForbiddenError); - } + expect(superUpdateManySpy).toHaveBeenCalled(); - expect(superUpdateManySpy).toHaveBeenCalled(); - - expect(superUpdateManySpy.mock.lastCall![2].preMutationError.message).toBe( - `Invalid payload. You can't change the "${field}" value manually.`, - ); - - expect(superUpdateManySpy.mock.lastCall![2].preMutationError).toBeInstanceOf(InvalidPayloadError); - }, - ); - - it.each(['provider', 'external_identifier'])('should allow admin users to update "%s" field', async (field) => { - const service = new UsersService({ - knex: db, - schema: testSchema, - accountability: { role: 'admin', admin: true }, + expect(opts.preMutationError).toStrictEqual( + new InvalidPayloadError({ reason: `You can't change the "${field}" value manually` }), + ); + }); }); - const promise = service.updateMany([1], { [field]: 'test' }); - - await expect(promise).resolves.not.toThrow(); - expect(superUpdateManySpy).toBeCalledWith([1], expect.objectContaining({ auth_data: null }), undefined); - }); - - it.each(['provider', 'external_identifier'])( - 'should allow null accountability to update "%s" field', - async (field) => { + describe.each([ + ['admin users', { role: 'admin', admin: true } as Accountability], + ['null accountability', null], + ])('should allow updates for %s', (_, accountability) => { const service = new UsersService({ knex: db, schema: testSchema, + accountability, }); - const promise = service.updateMany([1], { [field]: 'test' }); + it.each(['provider', 'external_identifier'])('%s', async (field) => { + const promise = service.updateMany([randomUUID()], { [field]: 'test' }); - await expect(promise).resolves.not.toThrow(); - expect(superUpdateManySpy).toBeCalledWith([1], expect.objectContaining({ auth_data: null }), undefined); - }, - ); + await expect(promise).resolves.not.toThrow(); - it('should process user limits for new admin role', async () => { - await service.updateMany([1, 2, 3], { role: { admin_access: true } }); - expect(getRoleCountsByUsers).toBeCalledWith(db, [1, 2, 3]); - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 3, app: 0, api: 0 }); - }); - - it('should process user limits for existing admin role', async () => { - tracker.on - .select(/select "admin_access", "app_access" from "directus_roles"/) - .response({ admin_access: true, app_access: true }); - - await service.updateMany([1, 2, 3], { role: randomUUID() }); - - expect(getRoleCountsByUsers).toBeCalledWith(db, [1, 2, 3]); - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 3, app: 0, api: 0 }); - }); - - it('should process user limits for new app role', async () => { - await service.updateMany([1, 2, 3], { role: { app_access: true } }); - expect(getRoleCountsByUsers).toBeCalledWith(db, [1, 2, 3]); - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 0, app: 3, api: 0 }); - }); - - it('should process user limits for existing app role', async () => { - tracker.on - .select(/select "admin_access", "app_access" from "directus_roles"/) - .response({ admin_access: false, app_access: true }); - - await service.updateMany([1, 2, 3], { role: randomUUID() }); - - expect(getRoleCountsByUsers).toBeCalledWith(db, [1, 2, 3]); - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 0, app: 3, api: 0 }); - }); - - it('should process user limits for new api role', async () => { - await service.updateMany([1, 2, 3], { role: {} }); - expect(getRoleCountsByUsers).toBeCalledWith(db, [1, 2, 3]); - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 0, app: 0, api: 3 }); - }); - - it('should process user limits for existing api role', async () => { - tracker.on - .select(/select "admin_access", "app_access" from "directus_roles"/) - .response({ admin_access: false, app_access: false }); - - await service.updateMany([1, 2, 3], { role: randomUUID() }); - - expect(getRoleCountsByUsers).toBeCalledWith(db, [1, 2, 3]); - expect(checkIncreasedUserLimits).toBeCalledWith(db, { admin: 0, app: 0, api: 3 }); - }); - - it('skips user limits check when no limit is set', async () => { - vi.mocked(shouldCheckUserLimits).mockReturnValue(false); - - tracker.on - .select(/select "admin_access", "app_access" from "directus_roles"/) - .response({ admin_access: true, app_access: true }); - - await service.updateMany([1, 2, 3], { role: randomUUID() }); - - expect(checkIncreasedUserLimits).not.toBeCalled(); - }); - }); - - describe('updateByQuery', () => { - it('should not checkRemainingAdminExistence', async () => { - // mock newRole query in updateMany (called by ItemsService updateByQuery) - tracker.on - .select(/select "admin_access", "app_access" from "directus_roles"/) - .response({ admin_access: true, app_access: true }); - - vi.spyOn(ItemsService.prototype, 'getKeysByQuery').mockResolvedValue([1]); - - await service.updateByQuery({}, { role: testRoleId }); - expect(checkRemainingAdminExistenceSpy).not.toBeCalled(); - }); - - it('should checkRemainingAdminExistence once', async () => { - // mock newRole query in updateMany (called by ItemsService updateByQuery) - tracker.on - .select(/select "admin_access", "app_access" from "directus_roles"/) - .response({ admin_access: false }); - - vi.spyOn(ItemsService.prototype, 'getKeysByQuery').mockResolvedValue([1]); - - await service.updateByQuery({}, { role: testRoleId }); - expect(checkRemainingAdminExistenceSpy).toBeCalledTimes(1); - }); - - it('should not checkRemainingActiveAdmin', async () => { - vi.spyOn(ItemsService.prototype, 'getKeysByQuery').mockResolvedValue([1]); - - await service.updateByQuery({}, {}); - expect(checkRemainingActiveAdminSpy).not.toBeCalled(); - }); - - it('should checkRemainingActiveAdmin once', async () => { - vi.spyOn(ItemsService.prototype, 'getKeysByQuery').mockResolvedValue([1]); - - await service.updateByQuery({}, { status: 'inactive' }); - expect(checkRemainingActiveAdminSpy).toBeCalledTimes(1); - }); - - it('should not checkUniqueEmails', async () => { - vi.spyOn(ItemsService.prototype, 'getKeysByQuery').mockResolvedValue([1]); - - await service.updateByQuery({}, {}); - expect(checkUniqueEmailsSpy).not.toBeCalled(); - }); - - it('should checkUniqueEmails once', async () => { - vi.spyOn(ItemsService.prototype, 'getKeysByQuery').mockResolvedValue([1]); - - await service.updateByQuery({}, { email: 'test@example.com' }); - expect(checkUniqueEmailsSpy).toBeCalledTimes(1); - }); - - it('should throw RecordNotUniqueError for multiple keys with same email', async () => { - vi.spyOn(ItemsService.prototype, 'getKeysByQuery').mockResolvedValue([1, 2]); - - expect.assertions(2); // to ensure both assertions in the catch block are reached - - try { - await service.updateByQuery({}, { email: 'test@example.com' }); - } catch (err: any) { - expect(err.message).toBe(`Value for field "email" in collection "directus_users" has to be unique.`); - expect(err).toBeInstanceOf(RecordNotUniqueError); - } - }); - - it('should not checkPasswordPolicy', async () => { - vi.spyOn(ItemsService.prototype, 'getKeysByQuery').mockResolvedValue([1]); - - await service.updateByQuery({}, {}); - expect(checkPasswordPolicySpy).not.toBeCalled(); - }); - - it('should checkPasswordPolicy once', async () => { - vi.spyOn(ItemsService.prototype, 'getKeysByQuery').mockResolvedValue([1]); - - await service.updateByQuery({}, { password: 'testpassword' }); - expect(checkPasswordPolicySpy).toBeCalledTimes(1); - }); - - it.each(['provider', 'external_identifier'])( - 'should throw InvalidPayloadError for non-admin users when updating "%s" field', - async (field) => { - const service = new UsersService({ - knex: db, - schema: testSchema, - accountability: { role: 'test', admin: false }, + expect(superUpdateManySpy.mock.lastCall![1]).toEqual( + expect.objectContaining({ [field]: 'test', auth_data: null }), + ); }); - - vi.spyOn(ItemsService.prototype, 'getKeysByQuery').mockResolvedValue([1]); - - const promise = service.updateByQuery({}, { [field]: 'test' }); - - expect.assertions(5); // to ensure both assertions in the catch block are reached - - try { - await promise; - } catch (err: any) { - expect(err.message).toBe(`You don't have permission to access this.`); - expect(err).toBeInstanceOf(ForbiddenError); - } - - expect(superUpdateManySpy).toHaveBeenCalled(); - - expect(superUpdateManySpy.mock.lastCall![2].preMutationError.message).toBe( - `Invalid payload. You can't change the "${field}" value manually.`, - ); - - expect(superUpdateManySpy.mock.lastCall![2].preMutationError).toBeInstanceOf(InvalidPayloadError); - }, - ); - - it.each(['provider', 'external_identifier'])('should allow admin users to update "%s" field', async (field) => { - const service = new UsersService({ - knex: db, - schema: testSchema, - accountability: { role: 'admin', admin: true }, }); - - vi.spyOn(ItemsService.prototype, 'getKeysByQuery').mockResolvedValue([1]); - - const promise = service.updateByQuery({}, { [field]: 'test' }); - - await expect(promise).resolves.not.toThrow(); - expect(superUpdateManySpy).toBeCalledWith([1], expect.objectContaining({ auth_data: null }), undefined); - }); - - it.each(['provider', 'external_identifier'])( - 'should allow null accountability to update "%s" field', - async (field) => { - const service = new UsersService({ - knex: db, - schema: testSchema, - }); - - vi.spyOn(ItemsService.prototype, 'getKeysByQuery').mockResolvedValue([1]); - - const promise = service.updateByQuery({}, { [field]: 'test' }); - - await expect(promise).resolves.not.toThrow(); - expect(superUpdateManySpy).toBeCalledWith([1], expect.objectContaining({ auth_data: null }), undefined); - }, - ); - }); - - describe('deleteOne', () => { - it('should checkRemainingAdminExistence once', async () => { - const service = new UsersService({ - knex: db, - schema: testSchema, - accountability: { role: 'test', admin: false }, - }); - - const promise = service.deleteOne(1); - - expect.assertions(3); // to ensure both assertions in the catch block are reached - - try { - await promise; - } catch (err: any) { - expect(err.message).toBe(`You don't have permission to access this.`); - expect(err).toBeInstanceOf(ForbiddenError); - } - - expect(checkRemainingAdminExistenceSpy).toBeCalledTimes(1); }); }); describe('deleteMany', () => { - it('should checkRemainingAdminExistence once', async () => { + vi.spyOn(ItemsService.prototype, 'deleteMany').mockResolvedValue([randomUUID()]); + + it('should validate remaining admin users', async () => { + // mock notifications update query in deleteOne/deleteMany/deleteByQuery methods + tracker.on.update('directus_notifications').response({}); + // mock versions update query in deleteOne/deleteMany/deleteByQuery methods + tracker.on.update('directus_versions').response({}); + const service = new UsersService({ knex: db, schema: testSchema, - accountability: { role: 'test', admin: false }, + accountability: { role: 'test', admin: false } as Accountability, }); - const promise = service.deleteMany([1]); + await service.deleteMany([randomUUID()]); - expect.assertions(3); // to ensure both assertions in the catch block are reached - - try { - await promise; - } catch (err: any) { - expect(err.message).toBe(`You don't have permission to access this.`); - expect(err).toBeInstanceOf(ForbiddenError); - } - - expect(checkRemainingAdminExistenceSpy).toBeCalledTimes(1); - }); - }); - - describe('deleteByQuery', () => { - it('should checkRemainingAdminExistence once', async () => { - const service = new UsersService({ - knex: db, - schema: testSchema, - accountability: { role: 'test', admin: false }, - }); - - // mock return value for the following empty query - vi.spyOn(ItemsService.prototype, 'getKeysByQuery').mockResolvedValueOnce([1]); - - const promise = service.deleteByQuery({ filter: { id: { _eq: 1 } } }); - - expect.assertions(3); // to ensure both assertions in the catch block are reached - - try { - await promise; - } catch (err: any) { - expect(err.message).toBe(`You don't have permission to access this.`); - expect(err).toBeInstanceOf(ForbiddenError); - } - - expect(checkRemainingAdminExistenceSpy).toBeCalledTimes(1); + expect(validateRemainingAdminUsers).toHaveBeenCalled(); }); }); describe('invite', () => { + const mailService = new MailService({ + schema: testSchema, + }); + + vi.spyOn(UsersService.prototype as any, 'inviteUrl').mockImplementation(() => vi.fn()); + it('should invite new users', async () => { - // mock newRole query in updateMany - tracker.on - .select(/select "id", "admin_access", "app_access" from "directus_roles"/) - .response({ id: 'invite-role', admin_access: false, app_access: true }); + vi.spyOn(UsersService.prototype as any, 'getUserByEmail').mockResolvedValueOnce(undefined); const service = new UsersService({ knex: db, schema: testSchema, - accountability: { role: 'test', admin: true }, + accountability: { role: 'test', admin: true } as Accountability, }); const promise = service.inviteUser('user@example.com', 'invite-role', null); @@ -823,7 +341,7 @@ describe('Integration Tests', () => { const service = new UsersService({ knex: db, schema: testSchema, - accountability: { role: 'test', admin: true }, + accountability: { role: 'test', admin: true } as Accountability, }); // mock an invited user @@ -843,7 +361,7 @@ describe('Integration Tests', () => { const service = new UsersService({ knex: db, schema: testSchema, - accountability: { role: 'test', admin: true }, + accountability: { role: 'test', admin: true } as Accountability, }); // mock an active user @@ -863,24 +381,22 @@ describe('Integration Tests', () => { const service = new UsersService({ knex: db, schema: testSchema, - accountability: { role: 'test', admin: true }, + accountability: { role: 'test', admin: true } as Accountability, }); - tracker.on - .select(/select "admin_access", "app_access" from "directus_roles"/) - .response({ admin_access: true, app_access: true }); - - // mock an invited user with different role - vi.spyOn(UsersService.prototype as any, 'getUserByEmail').mockResolvedValueOnce({ - id: 1, + const mockUser = { + id: randomUUID(), status: 'invited', role: 'existing-role', - }); + }; + + // mock an invited user with different role + vi.spyOn(UsersService.prototype as any, 'getUserByEmail').mockResolvedValueOnce(mockUser); const promise = service.inviteUser('user@example.com', 'invite-role', null); await expect(promise).resolves.not.toThrow(); - expect(superUpdateManySpy.mock.lastCall![0]).toEqual([1]); + expect(superUpdateManySpy.mock.lastCall![0]).toEqual([mockUser.id]); expect(superUpdateManySpy.mock.lastCall![1]).toEqual({ role: 'invite-role' }); }); }); diff --git a/api/src/services/users.ts b/api/src/services/users.ts index d7538578ce..6a02a208dc 100644 --- a/api/src/services/users.ts +++ b/api/src/services/users.ts @@ -1,25 +1,24 @@ import { useEnv } from '@directus/env'; -import { ForbiddenError, InvalidPayloadError, RecordNotUniqueError, UnprocessableContentError } from '@directus/errors'; +import { ForbiddenError, InvalidPayloadError, RecordNotUniqueError } from '@directus/errors'; import type { Item, PrimaryKey, RegisterUserInput, User } from '@directus/types'; -import { getSimpleHash, toArray, toBoolean, validatePayload } from '@directus/utils'; +import { getSimpleHash, toArray, validatePayload } from '@directus/utils'; import { FailedValidationError, joiValidationErrorItemToErrorExtensions } from '@directus/validation'; import Joi from 'joi'; import jwt from 'jsonwebtoken'; -import { isEmpty, mergeWith } from 'lodash-es'; +import { isEmpty } from 'lodash-es'; import { performance } from 'perf_hooks'; +import { clearSystemCache } from '../cache.js'; import getDatabase from '../database/index.js'; import { useLogger } from '../logger/index.js'; -import { checkIncreasedUserLimits } from '../telemetry/utils/check-increased-user-limits.js'; -import { getRoleCountsByRoles } from '../telemetry/utils/get-role-counts-by-roles.js'; -import { getRoleCountsByUsers } from '../telemetry/utils/get-role-counts-by-users.js'; -import { type AccessTypeCount } from '../telemetry/utils/get-user-count.js'; -import { shouldCheckUserLimits } from '../telemetry/utils/should-check-user-limits.js'; +import { validateRemainingAdminUsers } from '../permissions/modules/validate-remaining-admin/validate-remaining-admin-users.js'; +import { createDefaultAccountability } from '../permissions/utils/create-default-accountability.js'; import type { AbstractServiceOptions, MutationOptions } from '../types/index.js'; import { getSecret } from '../utils/get-secret.js'; import isUrlAllowed from '../utils/is-url-allowed.js'; import { verifyJWT } from '../utils/jwt.js'; import { stall } from '../utils/stall.js'; import { Url } from '../utils/url.js'; +import { UserIntegrityCheckFlag } from '../utils/validate-user-count-integrity.js'; import { ItemsService } from './items.js'; import { MailService } from './mail/index.js'; import { SettingsService } from './settings.js'; @@ -108,50 +107,13 @@ export class UsersService extends ItemsService { } } - private async checkRemainingAdminExistence(excludeKeys: PrimaryKey[]) { - // Make sure there's at least one admin user left after this deletion is done - const otherAdminUsers = await this.knex - .count('*', { as: 'count' }) - .from('directus_users') - .whereNotIn('directus_users.id', excludeKeys) - .andWhere({ 'directus_roles.admin_access': true }) - .leftJoin('directus_roles', 'directus_users.role', 'directus_roles.id') - .first(); - - const otherAdminUsersCount = +(otherAdminUsers?.count || 0); - - if (otherAdminUsersCount === 0) { - throw new UnprocessableContentError({ reason: `You can't remove the last admin user from the role` }); - } - } - - /** - * Make sure there's at least one active admin user when updating user status - */ - private async checkRemainingActiveAdmin(excludeKeys: PrimaryKey[]): Promise { - const otherAdminUsers = await this.knex - .count('*', { as: 'count' }) - .from('directus_users') - .whereNotIn('directus_users.id', excludeKeys) - .andWhere({ 'directus_roles.admin_access': true }) - .andWhere({ 'directus_users.status': 'active' }) - .leftJoin('directus_roles', 'directus_users.role', 'directus_roles.id') - .first(); - - const otherAdminUsersCount = +(otherAdminUsers?.count || 0); - - if (otherAdminUsersCount === 0) { - throw new UnprocessableContentError({ reason: `You can't change the active status of the last admin user` }); - } - } - /** * Get basic information of user identified by email */ private async getUserByEmail( email: string, ): Promise<{ id: string; role: string; status: string; password: string; email: string } | undefined> { - return await this.knex + return this.knex .select('id', 'role', 'status', 'password', 'email') .from('directus_users') .whereRaw(`LOWER(??) = ?`, ['email', email.toLowerCase()]) @@ -194,41 +156,26 @@ export class UsersService extends ItemsService { /** * Create a new user */ - override async createOne(data: Partial, opts?: MutationOptions): Promise { + override async createOne(data: Partial, opts: MutationOptions = {}): Promise { try { - if (data['email']) { + if ('email' in data) { this.validateEmail(data['email']); await this.checkUniqueEmails([data['email']]); } - if (data['password']) { + if ('password' in data) { await this.checkPasswordPolicy([data['password']]); } - - if (shouldCheckUserLimits() && data['role']) { - const increasedCounts: AccessTypeCount = { - admin: 0, - app: 0, - api: 0, - }; - - if (typeof data['role'] === 'object') { - if ('admin_access' in data['role'] && data['role']['admin_access'] === true) { - increasedCounts.admin++; - } else if ('app_access' in data['role'] && data['role']['app_access'] === true) { - increasedCounts.app++; - } else { - increasedCounts.api++; - } - } else { - const existingRoleCounts = await getRoleCountsByRoles(this.knex, [data['role']]); - mergeWith(increasedCounts, existingRoleCounts, (x, y) => x + y); - } - - await checkIncreasedUserLimits(this.knex, increasedCounts); - } } catch (err: any) { - (opts || (opts = {})).preMutationError = err; + opts.preMutationError = err; + } + + if (!('status' in data) || data['status'] === 'active') { + // Creating a user only requires checking user limits if the user is active, no need to care about the role + opts.userIntegrityCheckFlags = + (opts.userIntegrityCheckFlags ?? UserIntegrityCheckFlag.None) | UserIntegrityCheckFlag.UserLimits; + + opts.onRequireUserIntegrityCheck?.(opts.userIntegrityCheckFlags); } return await super.createOne(data, opts); @@ -237,10 +184,10 @@ export class UsersService extends ItemsService { /** * Create multiple new users */ - override async createMany(data: Partial[], opts?: MutationOptions): Promise { - const emails = data['map']((payload) => payload['email']).filter((email) => email); - const passwords = data['map']((payload) => payload['password']).filter((password) => password); - const roles = data['map']((payload) => payload['role']).filter((role) => role); + override async createMany(data: Partial[], opts: MutationOptions = {}): Promise { + const emails = data.map((payload) => payload['email']).filter((email) => email); + const passwords = data.map((payload) => payload['password']).filter((password) => password); + const someActive = data.some((payload) => !('status' in payload) || payload['status'] === 'active'); try { if (emails.length) { @@ -251,110 +198,38 @@ export class UsersService extends ItemsService { if (passwords.length) { await this.checkPasswordPolicy(passwords); } - - if (shouldCheckUserLimits() && roles.length) { - const increasedCounts: AccessTypeCount = { - admin: 0, - app: 0, - api: 0, - }; - - const existingRoles = []; - - for (const role of roles) { - if (typeof role === 'object') { - if ('admin_access' in role && role['admin_access'] === true) { - increasedCounts.admin++; - } else if ('app_access' in role && role['app_access'] === true) { - increasedCounts.app++; - } else { - increasedCounts.api++; - } - } else { - existingRoles.push(role); - } - } - - const existingRoleCounts = await getRoleCountsByRoles(this.knex, existingRoles); - - mergeWith(increasedCounts, existingRoleCounts, (x, y) => x + y); - - await checkIncreasedUserLimits(this.knex, increasedCounts); - } } catch (err: any) { - (opts || (opts = {})).preMutationError = err; + opts.preMutationError = err; } - return await super.createMany(data, opts); + if (someActive) { + // Creating users only requires checking user limits if the users are active, no need to care about the role + opts.userIntegrityCheckFlags = + (opts.userIntegrityCheckFlags ?? UserIntegrityCheckFlag.None) | UserIntegrityCheckFlag.UserLimits; + + opts.onRequireUserIntegrityCheck?.(opts.userIntegrityCheckFlags); + } + + // Use generic ItemsService to avoid calling `UserService.createOne` to avoid additional work of validating emails, + // as this requires one query per email if done in `createOne` + const itemsService = new ItemsService(this.collection, { + schema: this.schema, + accountability: this.accountability, + knex: this.knex, + }); + + return await itemsService.createMany(data, opts); } /** * Update many users by primary key */ - override async updateMany(keys: PrimaryKey[], data: Partial, opts?: MutationOptions): Promise { + override async updateMany( + keys: PrimaryKey[], + data: Partial, + opts: MutationOptions = {}, + ): Promise { try { - const needsUserLimitCheck = shouldCheckUserLimits(); - - if (data['role']) { - /* - * data['role'] has the following cases: - * - a string with existing role id - * - an object with existing role id for GraphQL mutations - * - an object with data for new role - */ - const role = data['role']?.id ?? data['role']; - - let newRole; - - if (typeof role === 'string') { - newRole = await this.knex - .select('admin_access', 'app_access') - .from('directus_roles') - .where('id', role) - .first(); - } else { - newRole = role; - } - - if (!newRole?.admin_access) { - await this.checkRemainingAdminExistence(keys); - } - - if (needsUserLimitCheck && newRole) { - const existingCounts = await getRoleCountsByUsers(this.knex, keys); - - const increasedCounts: AccessTypeCount = { - admin: 0, - app: 0, - api: 0, - }; - - if (toBoolean(newRole.admin_access)) { - increasedCounts.admin = keys.length - existingCounts.admin; - } else if (toBoolean(newRole.app_access)) { - increasedCounts.app = keys.length - existingCounts.app; - } else { - increasedCounts.api = keys.length - existingCounts.api; - } - - await checkIncreasedUserLimits(this.knex, increasedCounts); - } - } - - if (needsUserLimitCheck && data['role'] === null) { - await checkIncreasedUserLimits(this.knex, { admin: 0, app: 0, api: 1 }); - } - - if (data['status'] !== undefined && data['status'] !== 'active') { - await this.checkRemainingActiveAdmin(keys); - } - - if (needsUserLimitCheck && data['status'] === 'active') { - const increasedCounts = await getRoleCountsByUsers(this.knex, keys, { inactiveUsers: true }); - - await checkIncreasedUserLimits(this.knex, increasedCounts); - } - if (data['email']) { if (keys.length > 1) { throw new RecordNotUniqueError({ @@ -391,20 +266,49 @@ export class UsersService extends ItemsService { data['auth_data'] = null; } } catch (err: any) { - (opts || (opts = {})).preMutationError = err; + opts.preMutationError = err; } - return await super.updateMany(keys, data, opts); + if ('role' in data) { + opts.userIntegrityCheckFlags = UserIntegrityCheckFlag.All; + } + + if ('status' in data) { + if (data['status'] === 'active') { + // User are being activated, no need to check if there are enough admins + opts.userIntegrityCheckFlags = + (opts.userIntegrityCheckFlags ?? UserIntegrityCheckFlag.None) | UserIntegrityCheckFlag.UserLimits; + } else { + opts.userIntegrityCheckFlags = UserIntegrityCheckFlag.All; + } + } + + if (opts.userIntegrityCheckFlags) { + opts.onRequireUserIntegrityCheck?.(opts.userIntegrityCheckFlags); + } + + const result = await super.updateMany(keys, data, opts); + + // Only clear the caches if the role has been updated + if ('role' in data) { + await this.clearCaches(opts); + } + + return result; } /** * Delete multiple users by primary key */ - override async deleteMany(keys: PrimaryKey[], opts?: MutationOptions): Promise { - try { - await this.checkRemainingAdminExistence(keys); - } catch (err: any) { - (opts || (opts = {})).preMutationError = err; + override async deleteMany(keys: PrimaryKey[], opts: MutationOptions = {}): Promise { + if (opts?.onRequireUserIntegrityCheck) { + opts.onRequireUserIntegrityCheck(opts?.userIntegrityCheckFlags ?? UserIntegrityCheckFlag.None); + } else { + try { + await validateRemainingAdminUsers({ excludeUsers: keys }, { knex: this.knex, schema: this.schema }); + } catch (err: any) { + opts.preMutationError = err; + } } // Manual constraint, see https://github.com/directus/directus/pull/19912 @@ -689,11 +593,19 @@ export class UsersService extends ItemsService { knex: this.knex, schema: this.schema, accountability: { - ...(this.accountability ?? { role: null }), + ...(this.accountability ?? createDefaultAccountability()), admin: true, // We need to skip permissions checks for the update call below }, }); await service.updateOne(user.id, { password, status: 'active' }, opts); } + + private async clearCaches(opts?: MutationOptions) { + await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache }); + + if (this.cache && opts?.autoPurgeCache !== false) { + await this.cache.clear(); + } + } } diff --git a/api/src/services/utils.ts b/api/src/services/utils.ts index 14ee40767b..4d0694d1bd 100644 --- a/api/src/services/utils.ts +++ b/api/src/services/utils.ts @@ -5,6 +5,8 @@ import type { Knex } from 'knex'; import { clearSystemCache, getCache } from '../cache.js'; import getDatabase from '../database/index.js'; import emitter from '../emitter.js'; +import { fetchAllowedFields } from '../permissions/modules/fetch-allowed-fields/fetch-allowed-fields.js'; +import { validateAccess } from '../permissions/modules/validate-access/validate-access.js'; import type { AbstractServiceOptions } from '../types/index.js'; import { shouldClearCache } from '../utils/should-clear-cache.js'; @@ -30,16 +32,23 @@ export class UtilsService { throw new InvalidPayloadError({ reason: `Collection "${collection}" doesn't have a sort field` }); } - if (this.accountability?.admin !== true) { - const permissions = this.accountability?.permissions?.find((permission) => { - return permission.collection === collection && permission.action === 'update'; - }); + if (this.accountability && this.accountability.admin !== true) { + await validateAccess( + { + accountability: this.accountability, + action: 'update', + collection, + }, + { + schema: this.schema, + knex: this.knex, + }, + ); - if (!permissions) { - throw new ForbiddenError(); - } - - const allowedFields = permissions.fields ?? []; + const allowedFields = await fetchAllowedFields( + { collection, action: 'update', accountability: this.accountability }, + { schema: this.schema, knex: this.knex }, + ); if (allowedFields[0] !== '*' && allowedFields.includes(sortField) === false) { throw new ForbiddenError(); diff --git a/api/src/services/versions.ts b/api/src/services/versions.ts index b00014c674..bb08ec4aac 100644 --- a/api/src/services/versions.ts +++ b/api/src/services/versions.ts @@ -7,25 +7,17 @@ import objectHash from 'object-hash'; import { getCache } from '../cache.js'; import getDatabase from '../database/index.js'; import emitter from '../emitter.js'; +import { validateAccess } from '../permissions/modules/validate-access/validate-access.js'; import type { AbstractServiceOptions, MutationOptions } from '../types/index.js'; import { shouldClearCache } from '../utils/should-clear-cache.js'; import { ActivityService } from './activity.js'; -import { AuthorizationService } from './authorization.js'; import { ItemsService } from './items.js'; import { PayloadService } from './payload.js'; import { RevisionsService } from './revisions.js'; export class VersionsService extends ItemsService { - authorizationService: AuthorizationService; - constructor(options: AbstractServiceOptions) { super('directus_versions', options); - - this.authorizationService = new AuthorizationService({ - accountability: this.accountability, - knex: this.knex, - schema: this.schema, - }); } private async validateCreateData(data: Partial): Promise { @@ -68,12 +60,38 @@ export class VersionsService extends ItemsService { } // will throw an error if the accountability does not have permission to read the item - await this.authorizationService.checkAccess('read', data['collection'], data['item']); + if (this.accountability) { + await validateAccess( + { + accountability: this.accountability, + action: 'read', + collection: data['collection'], + primaryKeys: [data['item']], + }, + { + schema: this.schema, + knex: this.knex, + }, + ); + } } async getMainItem(collection: string, item: PrimaryKey, query?: Query): Promise { // will throw an error if the accountability does not have permission to read the item - await this.authorizationService.checkAccess('read', collection, item); + if (this.accountability) { + await validateAccess( + { + accountability: this.accountability, + action: 'read', + collection, + primaryKeys: [item], + }, + { + schema: this.schema, + knex: this.knex, + }, + ); + } const itemsService = new ItemsService(collection, { knex: this.knex, @@ -266,7 +284,20 @@ export class VersionsService extends ItemsService { const { id, collection, item } = (await this.readOne(version)) as ContentVersion; // will throw an error if the accountability does not have permission to update the item - await this.authorizationService.checkAccess('update', collection, item); + if (this.accountability) { + await validateAccess( + { + accountability: this.accountability, + action: 'update', + collection, + primaryKeys: [item], + }, + { + schema: this.schema, + knex: this.knex, + }, + ); + } const { outdated } = await this.verifyHash(collection, item, mainHash); diff --git a/api/src/telemetry/lib/get-report.test.ts b/api/src/telemetry/lib/get-report.test.ts index 1885937472..f6242a6d5c 100644 --- a/api/src/telemetry/lib/get-report.test.ts +++ b/api/src/telemetry/lib/get-report.test.ts @@ -3,11 +3,11 @@ import { version } from 'directus/version'; import { type Knex } from 'knex'; import { afterEach, beforeEach, expect, test, vi } from 'vitest'; import { getDatabase, getDatabaseClient } from '../../database/index.js'; +import { fetchUserCount, type UserCount } from '../../utils/fetch-user-count/fetch-user-count.js'; import { getExtensionCount, type ExtensionCount } from '../utils/get-extension-count.js'; import { getFieldCount, type FieldCount } from '../utils/get-field-count.js'; import { getFilesizeSum, type FilesizeSum } from '../utils/get-filesize-sum.js'; import { getItemCount } from '../utils/get-item-count.js'; -import { getUserCount, type AccessTypeCount } from '../utils/get-user-count.js'; import { getUserItemCount, type UserItemCount } from '../utils/get-user-item-count.js'; import { getReport } from './get-report.js'; @@ -32,14 +32,14 @@ vi.mock('@directus/env', () => ({ vi.mock('../utils/get-item-count.js'); vi.mock('../utils/get-storage.js'); vi.mock('../utils/get-user-item-count.js'); -vi.mock('../utils/get-user-count.js'); vi.mock('../utils/get-field-count.js'); vi.mock('../utils/get-extension-count.js'); +vi.mock('../../utils/fetch-user-count/fetch-user-count.js'); vi.mock('../utils/get-filesize-sum.js'); let mockEnv: Record; let mockDb: Knex; -let mockUserCounts: AccessTypeCount; +let mockUserCounts: UserCount; let mockUserItemCounts: UserItemCount; let mockFieldCounts: FieldCount; let mockExtensionCounts: ExtensionCount; @@ -66,7 +66,7 @@ beforeEach(() => { vi.mocked(getDatabase).mockReturnValue(mockDb); vi.mocked(getItemCount).mockResolvedValue({}); - vi.mocked(getUserCount).mockResolvedValue(mockUserCounts); + vi.mocked(fetchUserCount).mockResolvedValue(mockUserCounts); vi.mocked(getUserItemCount).mockResolvedValue(mockUserItemCounts); vi.mocked(getFieldCount).mockResolvedValue(mockFieldCounts); vi.mocked(getExtensionCount).mockResolvedValue(mockExtensionCounts); @@ -118,7 +118,7 @@ test('Runs and returns basic counts', async () => { test('Runs and returns user counts', async () => { const report = await getReport(); - expect(getUserCount).toHaveBeenCalledWith(mockDb); + expect(fetchUserCount).toHaveBeenCalledWith({ knex: mockDb }); expect(report.admin_users).toBe(mockUserCounts.admin); expect(report.app_users).toBe(mockUserCounts.app); diff --git a/api/src/telemetry/lib/get-report.ts b/api/src/telemetry/lib/get-report.ts index b24db665f8..bf89e9442b 100644 --- a/api/src/telemetry/lib/get-report.ts +++ b/api/src/telemetry/lib/get-report.ts @@ -2,12 +2,12 @@ import { useEnv } from '@directus/env'; import { version } from 'directus/version'; import { getHelpers } from '../../database/helpers/index.js'; import { getDatabase, getDatabaseClient } from '../../database/index.js'; +import { fetchUserCount } from '../../utils/fetch-user-count/fetch-user-count.js'; import type { TelemetryReport } from '../types/report.js'; import { getExtensionCount } from '../utils/get-extension-count.js'; import { getFieldCount } from '../utils/get-field-count.js'; import { getFilesizeSum } from '../utils/get-filesize-sum.js'; import { getItemCount } from '../utils/get-item-count.js'; -import { getUserCount } from '../utils/get-user-count.js'; import { getUserItemCount } from '../utils/get-user-item-count.js'; const basicCountTasks = [ @@ -32,7 +32,7 @@ export const getReport = async (): Promise => { const [basicCounts, userCounts, userItemCount, fieldsCounts, extensionsCounts, databaseSize, filesizes] = await Promise.all([ getItemCount(db, basicCountTasks), - getUserCount(db), + fetchUserCount({ knex: db }), getUserItemCount(db), getFieldCount(db), getExtensionCount(db), diff --git a/api/src/telemetry/utils/check-increased-user-limits.test.ts b/api/src/telemetry/utils/check-increased-user-limits.test.ts deleted file mode 100644 index 61e97b41bf..0000000000 --- a/api/src/telemetry/utils/check-increased-user-limits.test.ts +++ /dev/null @@ -1,65 +0,0 @@ -import type { Knex } from 'knex'; -import { expect, test, vi } from 'vitest'; -import { checkIncreasedUserLimits } from './check-increased-user-limits.js'; -import { getUserCount } from './get-user-count.js'; - -vi.mock('./get-user-count.js'); - -vi.mock('@directus/env', () => ({ - useEnv: vi.fn().mockReturnValue({ - EMAIL_TEMPLATES_PATH: './templates', - USERS_ADMIN_ACCESS_LIMIT: 3, - USERS_APP_ACCESS_LIMIT: 3, - USERS_API_ACCESS_LIMIT: 3, - }), -})); - -const mockDb: Knex = {} as unknown as Knex; - -test('Errors if limits are exceeded with an increase', () => { - vi.mocked(getUserCount).mockResolvedValue({ admin: 1, app: 1, api: 1 }); - - expect(checkIncreasedUserLimits(mockDb, { admin: 3, app: 0, api: 0 })).rejects.toThrowError( - 'Active Admin users limit exceeded.', - ); - - expect(checkIncreasedUserLimits(mockDb, { admin: 3, app: 2, api: 0 })).rejects.toThrowError( - 'Active Admin users limit exceeded.', - ); - - expect(checkIncreasedUserLimits(mockDb, { admin: 0, app: 2, api: 0 })).rejects.toThrowError( - 'Active App users limit exceeded.', - ); - - expect(checkIncreasedUserLimits(mockDb, { admin: 2, app: 0, api: 0 })).rejects.toThrowError( - 'Active App users limit exceeded.', - ); - - expect(checkIncreasedUserLimits(mockDb, { admin: 1, app: 1, api: 0 })).rejects.toThrowError( - 'Active App users limit exceeded.', - ); - - expect(checkIncreasedUserLimits(mockDb, { admin: 2, app: 2, api: 0 })).rejects.toThrowError( - 'Active App users limit exceeded.', - ); - - expect(checkIncreasedUserLimits(mockDb, { admin: 0, app: 0, api: 3 })).rejects.toThrowError( - 'Active API users limit exceeded.', - ); -}); - -test('Does not error if limits are exceeded without any increase', () => { - vi.mocked(getUserCount).mockResolvedValue({ admin: 3, app: 3, api: 3 }); - - expect(() => checkIncreasedUserLimits(mockDb, { admin: 0, app: 0, api: 0 })).not.toThrowError(); -}); - -test('Does not errors if limits are not exceeded with an increase', () => { - vi.mocked(getUserCount).mockResolvedValue({ admin: 1, app: 1, api: 1 }); - - expect(() => checkIncreasedUserLimits(mockDb, { admin: 1, app: 0, api: 0 })).not.toThrowError(); - expect(() => checkIncreasedUserLimits(mockDb, { admin: 0, app: 1, api: 0 })).not.toThrowError(); - expect(() => checkIncreasedUserLimits(mockDb, { admin: 0, app: 0, api: 1 })).not.toThrowError(); - expect(() => checkIncreasedUserLimits(mockDb, { admin: 0, app: 1, api: 2 })).not.toThrowError(); - expect(() => checkIncreasedUserLimits(mockDb, { admin: 1, app: 0, api: 2 })).not.toThrowError(); -}); diff --git a/api/src/telemetry/utils/check-increased-user-limits.ts b/api/src/telemetry/utils/check-increased-user-limits.ts deleted file mode 100644 index 895fc06e3b..0000000000 --- a/api/src/telemetry/utils/check-increased-user-limits.ts +++ /dev/null @@ -1,39 +0,0 @@ -import { useEnv } from '@directus/env'; -import { LimitExceededError } from '@directus/errors'; -import type { PrimaryKey } from '@directus/types'; -import type { Knex } from 'knex'; -import { getUserCount, type AccessTypeCount } from './get-user-count.js'; - -const env = useEnv(); - -/** - * Ensure that user limits are not reached - */ -export async function checkIncreasedUserLimits( - db: Knex, - increasedUserCounts: AccessTypeCount, - ignoreIds: PrimaryKey[] = [], -): Promise { - if (!increasedUserCounts.admin && !increasedUserCounts.app && !increasedUserCounts.api) return; - - const userCounts = await getUserCount(db, ignoreIds); - - // Admins have full permissions, therefore should count under app access limit - const existingAppUsersCount = userCounts.admin + userCounts.app; - const newAppUsersCount = increasedUserCounts.admin + increasedUserCounts.app; - - if ( - increasedUserCounts.admin > 0 && - increasedUserCounts.admin + userCounts.admin > Number(env['USERS_ADMIN_ACCESS_LIMIT']) - ) { - throw new LimitExceededError({ category: 'Active Admin users' }); - } - - if (newAppUsersCount > 0 && newAppUsersCount + existingAppUsersCount > Number(env['USERS_APP_ACCESS_LIMIT'])) { - throw new LimitExceededError({ category: 'Active App users' }); - } - - if (increasedUserCounts.api > 0 && increasedUserCounts.api + userCounts.api > Number(env['USERS_API_ACCESS_LIMIT'])) { - throw new LimitExceededError({ category: 'Active API users' }); - } -} diff --git a/api/src/telemetry/utils/check-user-limits.test.ts b/api/src/telemetry/utils/check-user-limits.test.ts new file mode 100644 index 0000000000..a536312375 --- /dev/null +++ b/api/src/telemetry/utils/check-user-limits.test.ts @@ -0,0 +1,29 @@ +import { expect, test, vi } from 'vitest'; +import { checkUserLimits } from './check-user-limits.js'; + +vi.mock('@directus/env', () => ({ + useEnv: vi.fn().mockReturnValue({ + EMAIL_TEMPLATES_PATH: './templates', + USERS_ADMIN_ACCESS_LIMIT: 3, + USERS_APP_ACCESS_LIMIT: 3, + USERS_API_ACCESS_LIMIT: 3, + }), +})); + +test('Errors if limits are exceeded', () => { + expect(checkUserLimits({ admin: 4, app: 0, api: 0 })).rejects.toThrowError('Active Admin users limit exceeded.'); + + expect(checkUserLimits({ admin: 2, app: 2, api: 0 })).rejects.toThrowError('Active App users limit exceeded.'); + + expect(checkUserLimits({ admin: 0, app: 4, api: 0 })).rejects.toThrowError('Active App users limit exceeded.'); + + expect(checkUserLimits({ admin: 0, app: 0, api: 4 })).rejects.toThrowError('Active API users limit exceeded.'); +}); + +test('Does not errors if limits are not exceeded', () => { + expect(() => checkUserLimits({ admin: 1, app: 0, api: 0 })).not.toThrowError(); + expect(() => checkUserLimits({ admin: 0, app: 1, api: 0 })).not.toThrowError(); + expect(() => checkUserLimits({ admin: 0, app: 0, api: 1 })).not.toThrowError(); + expect(() => checkUserLimits({ admin: 1, app: 1, api: 1 })).not.toThrowError(); + expect(() => checkUserLimits({ admin: 2, app: 1, api: 2 })).not.toThrowError(); +}); diff --git a/api/src/telemetry/utils/check-user-limits.ts b/api/src/telemetry/utils/check-user-limits.ts new file mode 100644 index 0000000000..2776f5be36 --- /dev/null +++ b/api/src/telemetry/utils/check-user-limits.ts @@ -0,0 +1,23 @@ +import { useEnv } from '@directus/env'; +import { LimitExceededError } from '@directus/errors'; +import { type UserCount } from '../../utils/fetch-user-count/fetch-user-count.js'; + +const env = useEnv(); + +/** + * Ensure that user limits are not reached + */ +export async function checkUserLimits(userCounts: UserCount): Promise { + if (userCounts.admin > Number(env['USERS_ADMIN_ACCESS_LIMIT'])) { + throw new LimitExceededError({ category: 'Active Admin users' }); + } + + // Both app and admin users count against the app access limit + if (userCounts.app + userCounts.admin > Number(env['USERS_APP_ACCESS_LIMIT'])) { + throw new LimitExceededError({ category: 'Active App users' }); + } + + if (userCounts.api > Number(env['USERS_API_ACCESS_LIMIT'])) { + throw new LimitExceededError({ category: 'Active API users' }); + } +} diff --git a/api/src/telemetry/utils/get-role-counts-by-roles.test.ts b/api/src/telemetry/utils/get-role-counts-by-roles.test.ts deleted file mode 100644 index 52999755ac..0000000000 --- a/api/src/telemetry/utils/get-role-counts-by-roles.test.ts +++ /dev/null @@ -1,89 +0,0 @@ -import { randomUUID } from 'crypto'; -import { type Knex } from 'knex'; -import { afterEach, beforeEach, expect, test, vi } from 'vitest'; -import { getRoleCountsByRoles } from './get-role-counts-by-roles.js'; - -let mockResult: { id: string; admin_access: number | null; app_access: number | null }[]; -let mockDb: Knex; - -beforeEach(() => { - mockResult = [ - // Admin - { - id: randomUUID(), - admin_access: 1, - app_access: 1, - }, - { - id: randomUUID(), - admin_access: 1, - app_access: 1, - }, - // App - { - id: randomUUID(), - admin_access: 0, - app_access: 1, - }, - { - id: randomUUID(), - admin_access: 0, - app_access: 1, - }, - { - id: randomUUID(), - admin_access: 0, - app_access: 1, - }, - // API - { - id: randomUUID(), - admin_access: 0, - app_access: 0, - }, - { - id: randomUUID(), - admin_access: 0, - app_access: 0, - }, - { - id: randomUUID(), - admin_access: 0, - app_access: 0, - }, - { - id: randomUUID(), - admin_access: 0, - app_access: 0, - }, - ]; - - mockDb = { - select: vi.fn().mockReturnThis(), - from: vi.fn().mockReturnThis(), - whereIn: vi.fn().mockResolvedValue(mockResult), - } as unknown as Knex; -}); - -afterEach(() => { - vi.clearAllMocks(); -}); - -test('Fetches counts from the database', async () => { - const roleIds = [randomUUID(), randomUUID(), randomUUID()]; - await getRoleCountsByRoles(mockDb, roleIds); - - expect(mockDb.select).toHaveBeenCalledWith('id', 'admin_access', 'app_access'); - expect(mockDb.from).toHaveBeenCalledWith('directus_roles'); - expect(mockDb.whereIn).toHaveBeenCalledWith('id', roleIds); -}); - -test('Returns role counts based on combination of admin/app access', async () => { - const res = await getRoleCountsByRoles(mockDb, []); - - expect(res).toEqual({ - admin: 2, - app: 3, - api: 4, - }); -}); diff --git a/api/src/telemetry/utils/get-role-counts-by-roles.ts b/api/src/telemetry/utils/get-role-counts-by-roles.ts deleted file mode 100644 index 11749efa1a..0000000000 --- a/api/src/telemetry/utils/get-role-counts-by-roles.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { toBoolean } from '@directus/utils'; -import type { Knex } from 'knex'; -import { type AccessTypeCount } from './get-user-count.js'; - -/** - * Get the role type counts by role IDs - */ -export async function getRoleCountsByRoles(db: Knex, roles: string[]): Promise { - const counts: AccessTypeCount = { - admin: 0, - app: 0, - api: 0, - }; - - const result = <{ id: string; admin_access: number | boolean | null; app_access: number | boolean | null }[]>( - await db.select('id', 'admin_access', 'app_access').from('directus_roles').whereIn('id', roles) - ); - - for (const role of result) { - const adminAccess = toBoolean(role.admin_access); - const appAccess = toBoolean(role.app_access); - - if (adminAccess) { - counts.admin++; - } else if (appAccess) { - counts.app++; - } else { - counts.api++; - } - } - - return counts; -} diff --git a/api/src/telemetry/utils/get-role-counts-by-users.test.ts b/api/src/telemetry/utils/get-role-counts-by-users.test.ts deleted file mode 100644 index 6026296ca0..0000000000 --- a/api/src/telemetry/utils/get-role-counts-by-users.test.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { randomUUID } from 'crypto'; -import { type Knex } from 'knex'; -import { afterEach, beforeEach, expect, test, vi } from 'vitest'; -import { getRoleCountsByUsers } from './get-role-counts-by-users.js'; - -let mockResult: { admin_access: number | null; app_access: number | null; count: string }[]; -let mockDb: Knex; - -beforeEach(() => { - mockResult = [ - { - admin_access: 1, - app_access: 1, - count: '11', - }, - { - admin_access: 0, - app_access: 1, - count: '22', - }, - { - admin_access: 0, - app_access: 0, - count: '33', - }, - { - admin_access: 1, - app_access: 0, - count: '44', - }, - { - // For users with no role - admin_access: null, - app_access: null, - count: '55', - }, - ]; - - mockDb = { - count: vi.fn().mockReturnThis(), - select: vi.fn().mockReturnThis(), - from: vi.fn().mockReturnThis(), - whereIn: vi.fn().mockReturnThis(), - andWhere: vi.fn().mockReturnThis(), - leftJoin: vi.fn().mockReturnThis(), - groupBy: vi.fn().mockResolvedValue(mockResult), - } as unknown as Knex; -}); - -afterEach(() => { - vi.clearAllMocks(); -}); - -test('Fetches active ccounts from the database', async () => { - const userIds = [randomUUID(), randomUUID(), randomUUID()]; - await getRoleCountsByUsers(mockDb, userIds); - - expect(mockDb.count).toHaveBeenCalledWith('directus_users.id', { as: 'count' }); - expect(mockDb.select).toHaveBeenCalledWith('directus_roles.admin_access', 'directus_roles.app_access'); - expect(mockDb.from).toHaveBeenCalledWith('directus_users'); - expect(mockDb.whereIn).toHaveBeenCalledWith('directus_users.id', userIds); - expect(mockDb.andWhere).toHaveBeenCalledWith('directus_users.status', '=', 'active'); - expect(mockDb.leftJoin).toHaveBeenCalledWith('directus_roles', 'directus_users.role', '=', 'directus_roles.id'); - expect(mockDb.groupBy).toHaveBeenCalledWith('directus_roles.admin_access', 'directus_roles.app_access'); -}); - -test('Fetches inactive counts from the database', async () => { - const userIds = [randomUUID(), randomUUID(), randomUUID()]; - await getRoleCountsByUsers(mockDb, userIds, { inactiveUsers: true }); - - expect(mockDb.count).toHaveBeenCalledWith('directus_users.id', { as: 'count' }); - expect(mockDb.select).toHaveBeenCalledWith('directus_roles.admin_access', 'directus_roles.app_access'); - expect(mockDb.from).toHaveBeenCalledWith('directus_users'); - expect(mockDb.whereIn).toHaveBeenCalledWith('directus_users.id', userIds); - expect(mockDb.andWhere).toHaveBeenCalledWith('directus_users.status', '!=', 'active'); - expect(mockDb.leftJoin).toHaveBeenCalledWith('directus_roles', 'directus_users.role', '=', 'directus_roles.id'); - expect(mockDb.groupBy).toHaveBeenCalledWith('directus_roles.admin_access', 'directus_roles.app_access'); -}); - -test('Sets final counts based on combination of admin/app access', async () => { - const res = await getRoleCountsByUsers(mockDb, []); - - expect(res).toEqual({ - admin: 55, - app: 22, - api: 88, - }); -}); diff --git a/api/src/telemetry/utils/get-role-counts-by-users.ts b/api/src/telemetry/utils/get-role-counts-by-users.ts deleted file mode 100644 index f0af4a2677..0000000000 --- a/api/src/telemetry/utils/get-role-counts-by-users.ts +++ /dev/null @@ -1,48 +0,0 @@ -import type { PrimaryKey } from '@directus/types'; -import { toBoolean } from '@directus/utils'; -import type { Knex } from 'knex'; -import type { AccessTypeCount } from './get-user-count.js'; - -type CountOptions = { - inactiveUsers?: boolean; -}; - -/** - * Get the role type counts by user IDs - */ -export async function getRoleCountsByUsers( - db: Knex, - userIds: PrimaryKey[], - options: CountOptions = {}, -): Promise { - const counts: AccessTypeCount = { - admin: 0, - app: 0, - api: 0, - }; - - const result = <{ count: number | string; admin_access: number | boolean; app_access: number | boolean }[]>await db - .count('directus_users.id', { as: 'count' }) - .select('directus_roles.admin_access', 'directus_roles.app_access') - .from('directus_users') - .whereIn('directus_users.id', userIds) - .andWhere('directus_users.status', options.inactiveUsers ? '!=' : '=', 'active') - .leftJoin('directus_roles', 'directus_users.role', '=', 'directus_roles.id') - .groupBy('directus_roles.admin_access', 'directus_roles.app_access'); - - for (const record of result) { - const adminAccess = toBoolean(record.admin_access); - const appAccess = toBoolean(record.app_access); - const count = Number(record.count); - - if (adminAccess) { - counts.admin += count; - } else if (appAccess) { - counts.app += count; - } else { - counts.api += count; - } - } - - return counts; -} diff --git a/api/src/telemetry/utils/get-user-count.test.ts b/api/src/telemetry/utils/get-user-count.test.ts deleted file mode 100644 index 0966c98fe8..0000000000 --- a/api/src/telemetry/utils/get-user-count.test.ts +++ /dev/null @@ -1,73 +0,0 @@ -import { type Knex } from 'knex'; -import { afterEach, beforeEach, expect, test, vi } from 'vitest'; -import { getUserCount } from './get-user-count.js'; - -let mockResult: { admin_access: number | null; app_access: number | null; count: string }[]; -let mockDb: Knex; - -beforeEach(() => { - mockResult = [ - { - admin_access: 1, - app_access: 1, - count: '15', - }, - { - admin_access: 0, - app_access: 1, - count: '20', - }, - { - admin_access: 0, - app_access: 0, - count: '25', - }, - { - admin_access: 1, - app_access: 0, - count: '30', - }, - { - // For users with no role - admin_access: null, - app_access: null, - count: '35', - }, - ]; - - mockDb = { - count: vi.fn().mockReturnThis(), - select: vi.fn().mockReturnThis(), - from: vi.fn().mockReturnThis(), - leftJoin: vi.fn().mockReturnThis(), - where: vi.fn().mockReturnThis(), - andWhere: vi.fn().mockReturnThis(), - whereNotIn: vi.fn().mockReturnThis(), - groupBy: vi.fn().mockResolvedValue(mockResult), - } as unknown as Knex; -}); - -afterEach(() => { - vi.clearAllMocks(); -}); - -test('Fetches counts from the database', async () => { - await getUserCount(mockDb); - - expect(mockDb.count).toHaveBeenCalledWith('directus_users.id', { as: 'count' }); - expect(mockDb.select).toHaveBeenCalledWith('directus_roles.admin_access', 'directus_roles.app_access'); - expect(mockDb.from).toHaveBeenCalledWith('directus_users'); - expect(mockDb.where).toHaveBeenCalledWith('directus_users.status', '=', 'active'); - expect(mockDb.leftJoin).toHaveBeenCalledWith('directus_roles', 'directus_users.role', '=', 'directus_roles.id'); - expect(mockDb.groupBy).toHaveBeenCalledWith('directus_roles.admin_access', 'directus_roles.app_access'); -}); - -test('Sets final counts based on combination of admin/app access', async () => { - const res = await getUserCount(mockDb); - - expect(res).toEqual({ - admin: 45, - app: 20, - api: 60, - }); -}); diff --git a/api/src/telemetry/utils/get-user-count.ts b/api/src/telemetry/utils/get-user-count.ts deleted file mode 100644 index a7e5cc557c..0000000000 --- a/api/src/telemetry/utils/get-user-count.ts +++ /dev/null @@ -1,45 +0,0 @@ -import type { PrimaryKey } from '@directus/types'; -import { toBoolean } from '@directus/utils'; -import { type Knex } from 'knex'; - -export interface AccessTypeCount { - admin: number; - app: number; - api: number; -} - -export const getUserCount = async (db: Knex, ignoreIds: PrimaryKey[] = []): Promise => { - const counts: AccessTypeCount = { - admin: 0, - app: 0, - api: 0, - }; - - const result = <{ count: number | string; admin_access: number | boolean; app_access: number | boolean }[]>( - await db - .count('directus_users.id', { as: 'count' }) - .select('directus_roles.admin_access', 'directus_roles.app_access') - .from('directus_users') - .whereNotIn('directus_users.id', ignoreIds) - .andWhere('directus_users.status', 'active') - .leftJoin('directus_roles', 'directus_users.role', '=', 'directus_roles.id') - .where('directus_users.status', '=', 'active') - .groupBy('directus_roles.admin_access', 'directus_roles.app_access') - ); - - for (const record of result) { - const adminAccess = toBoolean(record.admin_access); - const appAccess = toBoolean(record.app_access); - const count = Number(record.count); - - if (adminAccess) { - counts.admin += count; - } else if (appAccess) { - counts.app += count; - } else { - counts.api += count; - } - } - - return counts; -}; diff --git a/api/src/telemetry/utils/get-user-counts-by-roles.test.ts b/api/src/telemetry/utils/get-user-counts-by-roles.test.ts deleted file mode 100644 index 9f72c6f944..0000000000 --- a/api/src/telemetry/utils/get-user-counts-by-roles.test.ts +++ /dev/null @@ -1,75 +0,0 @@ -import { type Knex } from 'knex'; -import { afterEach, beforeEach, expect, test, vi } from 'vitest'; -import { getUserCountsByRoles } from './get-user-counts-by-roles.js'; -import { randomUUID } from 'crypto'; - -let mockResult: { admin_access: number | null; app_access: number | null; count: string }[]; -let mockDb: Knex; - -beforeEach(() => { - mockResult = [ - { - admin_access: 1, - app_access: 1, - count: '15', - }, - { - admin_access: 0, - app_access: 1, - count: '20', - }, - { - admin_access: 0, - app_access: 0, - count: '25', - }, - { - admin_access: 1, - app_access: 0, - count: '30', - }, - { - // For users with no role - admin_access: null, - app_access: null, - count: '35', - }, - ]; - - mockDb = { - count: vi.fn().mockReturnThis(), - select: vi.fn().mockReturnThis(), - from: vi.fn().mockReturnThis(), - whereIn: vi.fn().mockReturnThis(), - andWhere: vi.fn().mockReturnThis(), - leftJoin: vi.fn().mockReturnThis(), - groupBy: vi.fn().mockResolvedValue(mockResult), - } as unknown as Knex; -}); - -afterEach(() => { - vi.clearAllMocks(); -}); - -test('Fetches counts from the database', async () => { - const roleIds = [randomUUID(), randomUUID(), randomUUID()]; - await getUserCountsByRoles(mockDb, roleIds); - - expect(mockDb.count).toHaveBeenCalledWith('directus_users.id', { as: 'count' }); - expect(mockDb.select).toHaveBeenCalledWith('directus_roles.admin_access', 'directus_roles.app_access'); - expect(mockDb.from).toHaveBeenCalledWith('directus_users'); - expect(mockDb.whereIn).toHaveBeenCalledWith('directus_roles.id', roleIds); - expect(mockDb.andWhere).toHaveBeenCalledWith('directus_users.status', '=', 'active'); - expect(mockDb.leftJoin).toHaveBeenCalledWith('directus_roles', 'directus_users.role', '=', 'directus_roles.id'); - expect(mockDb.groupBy).toHaveBeenCalledWith('directus_roles.admin_access', 'directus_roles.app_access'); -}); - -test('Sets final counts based on combination of admin/app access', async () => { - const res = await getUserCountsByRoles(mockDb, []); - - expect(res).toEqual({ - admin: 45, - app: 20, - api: 60, - }); -}); diff --git a/api/src/telemetry/utils/get-user-counts-by-roles.ts b/api/src/telemetry/utils/get-user-counts-by-roles.ts deleted file mode 100644 index c9b1c5c5d2..0000000000 --- a/api/src/telemetry/utils/get-user-counts-by-roles.ts +++ /dev/null @@ -1,42 +0,0 @@ -import type { PrimaryKey } from '@directus/types'; -import { toBoolean } from '@directus/utils'; -import type { Knex } from 'knex'; -import { type AccessTypeCount } from './get-user-count.js'; - -/** - * Get the user type counts by role IDs - */ -export async function getUserCountsByRoles(db: Knex, roleIds: PrimaryKey[]): Promise { - const counts: AccessTypeCount = { - admin: 0, - app: 0, - api: 0, - }; - - const result = <{ count: number | string; admin_access: number | boolean; app_access: number | boolean }[]>( - await db - .count('directus_users.id', { as: 'count' }) - .select('directus_roles.admin_access', 'directus_roles.app_access') - .from('directus_users') - .whereIn('directus_roles.id', roleIds) - .andWhere('directus_users.status', '=', 'active') - .leftJoin('directus_roles', 'directus_users.role', '=', 'directus_roles.id') - .groupBy('directus_roles.admin_access', 'directus_roles.app_access') - ); - - for (const record of result) { - const adminAccess = toBoolean(record.admin_access); - const appAccess = toBoolean(record.app_access); - const count = Number(record.count); - - if (adminAccess) { - counts.admin += count; - } else if (appAccess) { - counts.app += count; - } else { - counts.api += count; - } - } - - return counts; -} diff --git a/api/src/types/ast.ts b/api/src/types/ast.ts index f9812e6799..d69ee6ebaa 100644 --- a/api/src/types/ast.ts +++ b/api/src/types/ast.ts @@ -1,4 +1,4 @@ -import type { Query, Relation } from '@directus/types'; +import type { Filter, Query, Relation } from '@directus/types'; export type M2ONode = { type: 'm2o'; @@ -9,6 +9,16 @@ export type M2ONode = { relation: Relation; parentKey: string; relatedKey: string; + + /** + * Which permission cases have to be met on the current item for this field to return a value + */ + whenCase: number[]; + + /** + * Permissions rules for the item access of the children of this item. + */ + cases: Filter[]; }; export type A2MNode = { @@ -23,9 +33,22 @@ export type A2MNode = { relatedKey: { [collection: string]: string; }; + fieldKey: string; relation: Relation; parentKey: string; + + /** + * Which permission cases have to be met on the current item for this field to return a value + */ + whenCase: number[]; + + /** + * Permissions rules for the item access of the children of this item. + */ + cases: { + [collection: string]: Filter[]; + }; }; export type O2MNode = { @@ -37,6 +60,16 @@ export type O2MNode = { relation: Relation; parentKey: string; relatedKey: string; + + /** + * Which permission cases have to be met on the current item for this field to return a value + */ + whenCase: number[]; + + /** + * Permissions rules for the item access of the children of this item. + */ + cases: Filter[]; }; export type NestedCollectionNode = M2ONode | O2MNode | A2MNode; @@ -45,6 +78,11 @@ export type FieldNode = { type: 'field'; name: string; fieldKey: string; + + /** + * Which permission cases have to be met on the current item for this field to return a value + */ + whenCase: number[]; }; export type FunctionFieldNode = { @@ -53,6 +91,15 @@ export type FunctionFieldNode = { fieldKey: string; query: Query; relatedCollection: string; + + /** + * Which permission cases have to be met on the current item for this field to return a value + */ + whenCase: number[]; + /** + * Permissions rules for the item access of the related collection of this item. + */ + cases: Filter[]; }; export type AST = { @@ -60,4 +107,9 @@ export type AST = { name: string; children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[]; query: Query; + + /** + * Permissions rules for the item access of the children of this item. + */ + cases: Filter[]; }; diff --git a/api/src/types/items.ts b/api/src/types/items.ts index 5ab528e628..c8ec955ed3 100644 --- a/api/src/types/items.ts +++ b/api/src/types/items.ts @@ -1,6 +1,7 @@ import type { DirectusError } from '@directus/errors'; import type { EventContext, PrimaryKey } from '@directus/types'; import type { MutationTracker } from '../services/items.js'; +import type { UserIntegrityCheckFlag } from '../utils/validate-user-count-integrity.js'; export type MutationOptions = { /** @@ -45,6 +46,18 @@ export type MutationOptions = { preMutationError?: DirectusError | undefined; bypassAutoIncrementSequenceReset?: boolean; + + /** + * Indicate that the top level mutation needs to perform a user integrity check before commiting the transaction + * This is a combination of flags + * @see UserIntegrityCheckFlag + */ + userIntegrityCheckFlags?: UserIntegrityCheckFlag; + + /** + * Callback function that is called whenever a mutation requires a user integrity check to be made + */ + onRequireUserIntegrityCheck?: ((flags: UserIntegrityCheckFlag) => void) | undefined; }; export type ActionEventParams = { diff --git a/api/src/utils/apply-query.test.ts b/api/src/utils/apply-query.test.ts index b0673de87e..e1a81042ef 100644 --- a/api/src/utils/apply-query.test.ts +++ b/api/src/utils/apply-query.test.ts @@ -105,7 +105,7 @@ describe('applySearch', () => { return db; }); - await applySearch(db as any, FAKE_SCHEMA, db as any, number, 'test'); + applySearch(db as any, FAKE_SCHEMA, db as any, number, 'test'); expect(db['andWhere']).toBeCalledTimes(1); expect(db['orWhere']).toBeCalledTimes(0); @@ -123,7 +123,7 @@ describe('applySearch', () => { return db; }); - await applySearch(db as any, FAKE_SCHEMA, db as any, number, 'test'); + applySearch(db as any, FAKE_SCHEMA, db as any, number, 'test'); expect(db['andWhere']).toBeCalledTimes(1); expect(db['orWhere']).toBeCalledTimes(2); @@ -144,7 +144,7 @@ describe('applySearch', () => { return db; }); - await applySearch(db as any, schemaWithStringFieldRemoved, db as any, 'searchstring', 'test'); + applySearch(db as any, schemaWithStringFieldRemoved, db as any, 'searchstring', 'test'); expect(db['andWhere']).toBeCalledTimes(1); expect(db['orWhere']).toBeCalledTimes(0); @@ -198,7 +198,7 @@ describe('applyFilter', () => { _and: [{ [field]: { [`_${filterOperator}`]: filterValue } }], }; - const { query } = applyFilter(db, FAKE_SCHEMA, queryBuilder, rootFilter, collection, {}); + const { query } = applyFilter(db, FAKE_SCHEMA, queryBuilder, rootFilter, collection, {}, []); const tracker = createTracker(db); tracker.on.select('*').response([]); @@ -264,7 +264,7 @@ describe('applyFilter', () => { }, }; - const { query } = applyFilter(db, BIGINT_FAKE_SCHEMA, queryBuilder, rootFilter, collection, {}); + const { query } = applyFilter(db, BIGINT_FAKE_SCHEMA, queryBuilder, rootFilter, collection, {}, []); const tracker = createTracker(db); tracker.on.select('*').response([]); diff --git a/api/src/utils/apply-query.ts b/api/src/utils/apply-query.ts index 3da36d1d81..d3670d9d40 100644 --- a/api/src/utils/apply-query.ts +++ b/api/src/utils/apply-query.ts @@ -17,6 +17,7 @@ import type { Knex } from 'knex'; import { clone, isPlainObject } from 'lodash-es'; import { customAlphabet } from 'nanoid/non-secure'; import { getHelpers } from '../database/helpers/index.js'; +import { applyCaseWhen } from '../database/run-ast/utils/apply-case-when.js'; import type { AliasMap } from './get-column-path.js'; import { getColumnPath } from './get-column-path.js'; import { getColumn } from './get-column.js'; @@ -27,6 +28,13 @@ import { parseNumericString } from './parse-numeric-string.js'; export const generateAlias = customAlphabet('abcdefghijklmnopqrstuvwxyz', 5); +type ApplyQueryOptions = { + aliasMap?: AliasMap; + isInnerQuery?: boolean; + hasMultiRelationalSort?: boolean | undefined; + groupWhenCases?: number[][] | undefined; +}; + /** * Apply the Query to a given Knex query builder instance */ @@ -36,7 +44,8 @@ export default function applyQuery( dbQuery: Knex.QueryBuilder, query: Query, schema: SchemaOverview, - options?: { aliasMap?: AliasMap; isInnerQuery?: boolean; hasMultiRelationalSort?: boolean | undefined }, + cases: Filter[], + options?: ApplyQueryOptions, ) { const aliasMap: AliasMap = options?.aliasMap ?? Object.create(null); let hasJoins = false; @@ -64,12 +73,16 @@ export default function applyQuery( applySearch(knex, schema, dbQuery, query.search, collection); } - if (query.group) { - dbQuery.groupBy(query.group.map((column) => getColumn(knex, collection, column, false, schema))); - } + // `cases` are the permissions cases that are required for the current data set. We're + // dynamically adding those into the filters that the user provided to enforce the permission + // rules. You should be able to read an item if one or more of the cases matches. The actual case + // is reused in the column selection case/when to dynamically return or nullify the field values + // you're actually allowed to read - if (query.filter) { - const filterResult = applyFilter(knex, schema, dbQuery, query.filter, collection, aliasMap); + const filter: Filter | null = joinFilterWithCases(query.filter, cases); + + if (filter) { + const filterResult = applyFilter(knex, schema, dbQuery, filter, collection, aliasMap, cases); if (!hasJoins) { hasJoins = filterResult.hasJoins; @@ -78,6 +91,48 @@ export default function applyQuery( hasMultiRelationalFilter = filterResult.hasMultiRelationalFilter; } + if (query.group) { + const rawColumns = query.group.map((column) => getColumn(knex, collection, column, false, schema)); + let columns; + + if (options?.groupWhenCases) { + columns = rawColumns.map((column, index) => + applyCaseWhen( + { + columnCases: options.groupWhenCases![index]!.map((caseIndex) => cases[caseIndex]!), + column, + aliasMap, + cases, + table: collection, + }, + { + knex, + schema, + }, + ), + ); + + if (query.sort && query.sort.length === 1 && query.sort[0] === query.group[0]) { + // Special case, where the sort query is injected by the group by operation + dbQuery.clear('order'); + + let order = 'asc'; + + if (query.sort[0]!.startsWith('-')) { + order = 'desc'; + } + + // @ts-expect-error (orderBy does not accept Knex.Raw for some reason, even though it is handled correctly) + // https://github.com/knex/knex/issues/5711 + dbQuery.orderBy([{ column: columns[0]!, order }]); + } + } else { + columns = rawColumns; + } + + dbQuery.groupBy(columns); + } + if (query.aggregate) { applyAggregate(schema, dbQuery, query.aggregate, collection, hasJoins); } @@ -392,6 +447,7 @@ export function applyFilter( rootFilter: Filter, collection: string, aliasMap: AliasMap, + cases: Filter[], ) { const helpers = getHelpers(knex); const relations: Relation[] = schema.relations; @@ -404,12 +460,22 @@ export function applyFilter( return { query: rootQuery, hasJoins, hasMultiRelationalFilter }; function addJoins(dbQuery: Knex.QueryBuilder, filter: Filter, collection: string) { - for (const [key, value] of Object.entries(filter)) { + // eslint-disable-next-line prefer-const + for (let [key, value] of Object.entries(filter)) { if (key === '_or' || key === '_and') { // If the _or array contains an empty object (full permissions), we should short-circuit and ignore all other // permission checks, as {} already matches full permissions. if (key === '_or' && value.some((subFilter: Record) => Object.keys(subFilter).length === 0)) { - continue; + // But only do so, if the value is not equal to `cases` (since then this is not permission related at all) + // or the length of value is 1, ie. only the empty filter. + // If the length is more than one it means that some items (and fields) might now be available, so + // the joins are required for the case/when construction. + if (value !== cases || value.length === 1) { + continue; + } else { + // Otherwise we can at least filter out all empty filters that would not add joins anyway + value = value.filter((subFilter: Record) => Object.keys(subFilter).length > 0); + } } value.forEach((subFilter: Record) => { @@ -511,7 +577,7 @@ export function applyFilter( .from(collection) .whereNotNull(column); - applyQuery(knex, relation!.collection, subQueryKnex, { filter }, schema); + applyQuery(knex, relation!.collection, subQueryKnex, { filter }, schema, cases); }; const childKey = Object.keys(value)?.[0]; @@ -837,13 +903,13 @@ export function applyFilter( } } -export async function applySearch( +export function applySearch( knex: Knex, schema: SchemaOverview, dbQuery: Knex.QueryBuilder, searchQuery: string, collection: string, -): Promise { +) { const { number: numberHelper } = getHelpers(knex); const fields = Object.entries(schema.collections[collection]!.fields); @@ -936,6 +1002,18 @@ export function applyAggregate( } } +export function joinFilterWithCases(filter: Filter | null | undefined, cases: Filter[]) { + if (cases.length > 0 && !filter) { + return { _or: cases }; + } else if (filter && cases.length === 0) { + return filter ?? null; + } else if (filter && cases.length > 0) { + return { _and: [filter, { _or: cases }] }; + } + + return null; +} + function getFilterPath(key: string, value: Record) { const path = [key]; const childKey = Object.keys(value)[0]; diff --git a/api/src/utils/fetch-user-count/fetch-access-lookup.ts b/api/src/utils/fetch-user-count/fetch-access-lookup.ts new file mode 100644 index 0000000000..3dea9d1587 --- /dev/null +++ b/api/src/utils/fetch-user-count/fetch-access-lookup.ts @@ -0,0 +1,61 @@ +import type { PrimaryKey } from '@directus/types'; +import type { Knex } from 'knex'; + +export interface AccessLookup { + role: string | null; + user: string | null; + app_access: boolean | number; + admin_access: boolean | number; + user_status: 'active' | string; + user_role: string | null; +} + +export interface FetchAccessLookupOptions { + excludeAccessRows?: PrimaryKey[]; + excludePolicies?: PrimaryKey[]; + excludeUsers?: PrimaryKey[]; + excludeRoles?: PrimaryKey[]; + adminOnly?: boolean; + knex: Knex; +} + +export async function fetchAccessLookup(options: FetchAccessLookupOptions): Promise { + let query = options.knex + .select( + 'directus_access.role', + 'directus_access.user', + 'directus_policies.app_access', + 'directus_policies.admin_access', + 'directus_users.status as user_status', + 'directus_users.role as user_role', + ) + .from('directus_access') + .leftJoin('directus_policies', 'directus_access.policy', 'directus_policies.id') + .leftJoin('directus_users', 'directus_access.user', 'directus_users.id'); + + if (options.excludeAccessRows && options.excludeAccessRows.length > 0) { + query = query.whereNotIn('directus_access.id', options.excludeAccessRows); + } + + if (options.excludePolicies && options.excludePolicies.length > 0) { + query = query.whereNotIn('directus_access.policy', options.excludePolicies); + } + + if (options.excludeUsers && options.excludeUsers.length > 0) { + query = query.where((q) => + q.whereNotIn('directus_access.user', options.excludeUsers!).orWhereNull('directus_access.user'), + ); + } + + if (options.excludeRoles && options.excludeRoles.length > 0) { + query = query.where((q) => + q.whereNotIn('directus_access.role', options.excludeRoles!).orWhereNull('directus_access.role'), + ); + } + + if (options.adminOnly) { + query = query.where('directus_policies.admin_access', 1); + } + + return query; +} diff --git a/api/src/utils/fetch-user-count/fetch-access-roles.test.ts b/api/src/utils/fetch-user-count/fetch-access-roles.test.ts new file mode 100644 index 0000000000..4ddb2fa919 --- /dev/null +++ b/api/src/utils/fetch-user-count/fetch-access-roles.test.ts @@ -0,0 +1,120 @@ +import { type Knex } from 'knex'; +import { vi, beforeEach, test, expect } from 'vitest'; +import { fetchAccessRoles } from './fetch-access-roles.js'; + +let knex: Knex; + +beforeEach(() => { + vi.clearAllMocks(); + + knex = { + select: vi.fn().mockReturnThis(), + from: vi.fn().mockReturnThis(), + whereNotNull: vi.fn().mockReturnThis(), + whereNotIn: vi.fn(), + } as unknown as Knex; +}); + +test('Returns the full admin and app access roles if no nested roles are found', async () => { + const options = { + adminRoles: new Set(['admin']), + appRoles: new Set(['app']), + }; + + vi.mocked(knex.whereNotIn).mockResolvedValue([]); + + const result = await fetchAccessRoles(options, { knex }); + + expect(result).toEqual(options); +}); + +test('Returns the correct admin and app access roles if a roles parent grants both admin and app access', async () => { + const options = { + adminRoles: new Set(['role-a']), + appRoles: new Set(['role-a']), + }; + + vi.mocked(knex.whereNotIn).mockResolvedValue([{ id: 'role-b', parent: 'role-a' }]); + + const result = await fetchAccessRoles(options, { knex }); + + expect(result).toEqual({ + adminRoles: new Set(['role-a', 'role-b']), + appRoles: new Set(['role-a', 'role-b']), + }); +}); + +test('Excludes roles that are passed in the excludeRoles option in the query', async () => { + const options = { + adminRoles: new Set([]), + appRoles: new Set([]), + excludeRoles: ['role-a'], + }; + + vi.mocked(knex.whereNotIn).mockResolvedValue([]); + + await fetchAccessRoles(options, { knex }); + + expect(knex.whereNotIn).toHaveBeenCalledWith('id', ['role-a']); +}); + +test('Does not add an unrelated role to the admin or app roles', async () => { + const options = { + adminRoles: new Set(['admin']), + appRoles: new Set(['app']), + }; + + vi.mocked(knex.whereNotIn).mockResolvedValue([{ id: 'role-a', parent: 'unrelated' }]); + + const result = await fetchAccessRoles(options, { knex }); + + expect(result).toEqual(options); +}); + +test.each([ + [ + [{ id: 'role-a', parent: 'admin' }], + { + adminRoles: new Set(['admin', 'role-a']), + appRoles: new Set(['app']), + }, + ], + [ + [{ id: 'role-a', parent: 'app' }], + { + adminRoles: new Set(['admin']), + appRoles: new Set(['app', 'role-a']), + }, + ], + [ + [ + { id: 'role-a', parent: 'role-b' }, + { id: 'role-b', parent: 'admin' }, + ], + { + adminRoles: new Set(['admin', 'role-a', 'role-b']), + appRoles: new Set(['app']), + }, + ], + [ + [ + { id: 'role-a', parent: 'role-b' }, + { id: 'role-b', parent: 'app' }, + ], + { + adminRoles: new Set(['admin']), + appRoles: new Set(['app', 'role-a', 'role-b']), + }, + ], +])('Returns the correct admin and app access roles when roles are nested', async (queryReturn, expected) => { + const options = { + adminRoles: new Set(['admin']), + appRoles: new Set(['app']), + }; + + vi.mocked(knex.whereNotIn).mockResolvedValue(queryReturn); + + const result = await fetchAccessRoles(options, { knex }); + + expect(result).toEqual(expected); +}); diff --git a/api/src/utils/fetch-user-count/fetch-access-roles.ts b/api/src/utils/fetch-user-count/fetch-access-roles.ts new file mode 100644 index 0000000000..96a309b112 --- /dev/null +++ b/api/src/utils/fetch-user-count/fetch-access-roles.ts @@ -0,0 +1,51 @@ +import type { PrimaryKey } from '@directus/types'; +import type { Knex } from 'knex'; + +export interface FetchAccessRolesOptions { + adminRoles: Set; + appRoles: Set; + excludeRoles?: PrimaryKey[]; +} + +/** + * Return a set of roles that allow app or admin access, if itself or any of its parents do + */ +export async function fetchAccessRoles(options: FetchAccessRolesOptions, context: { knex: Knex }) { + // Only fetch the roles that have a parent, as otherwise those roles should already be included in at least one of the input set + const allChildRoles = await context.knex + .select<{ id: string; parent: string }[]>('id', 'parent') + .from('directus_roles') + .whereNotNull('parent') + .whereNotIn('id', options.excludeRoles ?? []); + + const adminRoles = new Set(options.adminRoles); + const appRoles = new Set(options.appRoles); + const remainingRoles = new Set(allChildRoles); + let hasChanged = remainingRoles.size > 0; + + // This loop accounts for the undefined order in which the roles are returned, as there is the possibility + // of a role parent not being in the set of roles yet, so we need to iterate over the roles multiple times + // until no further roles are added to the sets + while (hasChanged) { + hasChanged = false; + + for (const role of remainingRoles) { + if (adminRoles.has(role.parent)) { + adminRoles.add(role.id); + remainingRoles.delete(role); + hasChanged = true; + } + + if (appRoles.has(role.parent)) { + appRoles.add(role.id); + remainingRoles.delete(role); + hasChanged = true; + } + } + } + + return { + adminRoles, + appRoles, + }; +} diff --git a/api/src/utils/fetch-user-count/fetch-active-users.ts b/api/src/utils/fetch-user-count/fetch-active-users.ts new file mode 100644 index 0000000000..77c156f035 --- /dev/null +++ b/api/src/utils/fetch-user-count/fetch-active-users.ts @@ -0,0 +1,10 @@ +import type { Knex } from 'knex'; + +export interface ActiveUser { + id: string; + role: string | null; +} + +export async function fetchActiveUsers(knex: Knex): Promise { + return await knex.select('id', 'role').from('directus_users').where('status', 'active'); +} diff --git a/api/src/utils/fetch-user-count/fetch-user-count.ts b/api/src/utils/fetch-user-count/fetch-user-count.ts new file mode 100644 index 0000000000..892860c669 --- /dev/null +++ b/api/src/utils/fetch-user-count/fetch-user-count.ts @@ -0,0 +1,101 @@ +import { toBoolean } from '@directus/utils'; +import { fetchAccessLookup, type FetchAccessLookupOptions } from './fetch-access-lookup.js'; +import { fetchAccessRoles } from './fetch-access-roles.js'; +import { getUserCountQuery } from './get-user-count-query.js'; + +export interface FetchUserCountOptions extends FetchAccessLookupOptions {} + +export interface UserCount { + admin: number; + app: number; + api: number; +} + +/** + * Returns counts of all active users in the system grouped by admin, app, and api access + */ +export async function fetchUserCount(options: FetchUserCountOptions): Promise { + const accessRows = await fetchAccessLookup(options); + + const adminRoles = new Set( + accessRows.filter((row) => toBoolean(row.admin_access) && row.role !== null).map((row) => row.role!), + ); + + const appRoles = new Set( + accessRows + .filter((row) => !toBoolean(row.admin_access) && toBoolean(row.app_access) && row.role !== null) + .map((row) => row.role!), + ); + + // All users that are directly granted rights through a connected policy + const adminUsers = new Set( + accessRows + .filter((row) => toBoolean(row.admin_access) && row.user !== null && row.user_status === 'active') + .map((row) => row.user!), + ); + + // Some roles might be granted access rights through nesting, so determine all roles that grant admin or app access, + // including nested roles + const { adminRoles: allAdminRoles, appRoles: allAppRoles } = await fetchAccessRoles( + { + adminRoles, + appRoles, + ...options, + }, + { knex: options.knex }, + ); + + // All users that are granted admin rights through a role, but not directly + const adminCountQuery = getUserCountQuery(options.knex, { + includeRoles: Array.from(allAdminRoles), + excludeIds: [...adminUsers, ...(options.excludeUsers ?? [])], + }); + + if (options.adminOnly) { + // Shortcut for only counting admin users + + const adminResult = await adminCountQuery; + + return { + admin: Number(adminResult?.['count'] ?? 0) + adminUsers.size, + app: 0, + api: 0, + }; + } + + const appUsers = new Set( + accessRows + .filter( + (row) => + !toBoolean(row.admin_access) && + toBoolean(row.app_access) && + row.user !== null && + row.user_status === 'active' && + adminUsers.has(row.user) === false && + adminRoles.has(row.user_role as any) === false, + ) + .map((row) => row.user!), + ); + + // All users that are granted app rights through a role, but not directly, and that aren't admin users + const appCountQuery = getUserCountQuery(options.knex, { + includeRoles: Array.from(allAppRoles), + excludeRoles: Array.from(allAdminRoles), + excludeIds: [...appUsers, ...adminUsers, ...(options.excludeUsers ?? [])], + }); + + const allCountQuery = getUserCountQuery(options.knex, { + excludeIds: options.excludeUsers ?? [], + }); + + const [adminResult, appResult, allResult] = await Promise.all([adminCountQuery, appCountQuery, allCountQuery]); + + const adminCount = Number(adminResult?.['count'] ?? 0) + adminUsers.size; + const appCount = Number(appResult?.['count'] ?? 0) + appUsers.size; + + return { + admin: adminCount, + app: appCount, + api: Math.max(0, Number(allResult?.['count'] ?? 0) - adminCount - appCount), + }; +} diff --git a/api/src/utils/fetch-user-count/get-user-count-query.ts b/api/src/utils/fetch-user-count/get-user-count-query.ts new file mode 100644 index 0000000000..4c5efc411a --- /dev/null +++ b/api/src/utils/fetch-user-count/get-user-count-query.ts @@ -0,0 +1,31 @@ +import type { PrimaryKey } from '@directus/types'; +import type { Knex } from 'knex'; + +export interface GetUserCountOptions { + excludeIds?: PrimaryKey[]; + excludeRoles?: PrimaryKey[]; + includeRoles?: PrimaryKey[]; +} + +export function getUserCountQuery(knex: Knex, options: GetUserCountOptions) { + // Safety check for an empty list of includeRoles, which would otherwise return all users + if (options.includeRoles && options.includeRoles.length === 0) { + return Promise.resolve({ count: 0 }); + } + + let query = knex('directus_users').count({ count: '*' }).as('count').where('status', '=', 'active'); + + if (options.excludeIds && options.excludeIds.length > 0) { + query = query.whereNotIn('id', options.excludeIds); + } + + if (options.excludeRoles && options.excludeRoles.length > 0) { + query = query.whereNotIn('role', options.excludeRoles); + } + + if (options.includeRoles && options.includeRoles.length > 0) { + query = query.whereIn('role', options.includeRoles); + } + + return query.first(); +} diff --git a/api/src/utils/get-accountability-for-role.test.ts b/api/src/utils/get-accountability-for-role.test.ts index aa0afabc60..0e92388f8d 100644 --- a/api/src/utils/get-accountability-for-role.test.ts +++ b/api/src/utils/get-accountability-for-role.test.ts @@ -1,20 +1,18 @@ -import { expect, describe, test, vi } from 'vitest'; +import { beforeEach, describe, expect, test, vi } from 'vitest'; +import { fetchRolesTree } from '../permissions/lib/fetch-roles-tree.js'; +import { fetchGlobalAccess } from '../permissions/modules/fetch-global-access/fetch-global-access.js'; import { getAccountabilityForRole } from './get-accountability-for-role.js'; vi.mock('./get-permissions', () => ({ getPermissions: vi.fn().mockReturnValue([]), })); -function mockDatabase() { - const self: Record = { - select: vi.fn(() => self), - from: vi.fn(() => self), - where: vi.fn(() => self), - first: vi.fn(), - }; +vi.mock('../permissions/modules/fetch-global-access/fetch-global-access.ts'); +vi.mock('../permissions/lib/fetch-roles-tree.js'); - return self; -} +beforeEach(() => { + vi.clearAllMocks(); +}); describe('getAccountabilityForRole', async () => { test('no role', async () => { @@ -27,7 +25,8 @@ describe('getAccountabilityForRole', async () => { expect(result).toStrictEqual({ admin: false, app: false, - permissions: [], + ip: null, + roles: [], role: null, user: null, }); @@ -43,45 +42,49 @@ describe('getAccountabilityForRole', async () => { expect(result).toStrictEqual({ admin: true, app: true, - permissions: [], + ip: null, + roles: [], role: null, user: null, }); }); - test('get role from database', async () => { - const db = mockDatabase(); - - db['first'].mockReturnValue({ - admin_access: 'not true', - app_access: '1', - }); + test('get role and role tree from database', async () => { + const roles = ['123-456', '234-567']; + vi.mocked(fetchRolesTree).mockResolvedValue(roles); + vi.mocked(fetchGlobalAccess).mockResolvedValue({ admin: false, app: true }); const result = await getAccountabilityForRole('123-456', { accountability: null, schema: {} as any, - database: db as any, + database: {} as any, }); expect(result).toStrictEqual({ admin: false, app: true, - permissions: [], + roles: roles, role: '123-456', user: null, + ip: null, }); + + expect(fetchRolesTree).toHaveBeenCalledWith('123-456', {}); + expect(fetchGlobalAccess).toHaveBeenCalledWith({ roles, user: null, ip: null }, {}); }); - test('database invalid role', async () => { - const db = mockDatabase(); - db['first'].mockReturnValue(false); + test('invalid role throws error', async () => { + vi.mocked(fetchRolesTree).mockResolvedValue([]); + vi.mocked(fetchGlobalAccess).mockResolvedValue({ admin: false, app: false }); - expect(() => + expect( getAccountabilityForRole('456-789', { accountability: null, schema: {} as any, - database: db as any, + database: {} as any, }), - ).rejects.toThrow('Configured role "456-789" isn\'t a valid role ID or doesn\'t exist.'); + ).rejects.toThrowErrorMatchingInlineSnapshot( + `[Error: Configured role "456-789" isn't a valid role ID or doesn't exist.]`, + ); }); }); diff --git a/api/src/utils/get-accountability-for-role.ts b/api/src/utils/get-accountability-for-role.ts index 0390263cf7..7ea8b35551 100644 --- a/api/src/utils/get-accountability-for-role.ts +++ b/api/src/utils/get-accountability-for-role.ts @@ -1,6 +1,8 @@ import type { Accountability, SchemaOverview } from '@directus/types'; import type { Knex } from 'knex'; -import { getPermissions } from './get-permissions.js'; +import { fetchRolesTree } from '../permissions/lib/fetch-roles-tree.js'; +import { fetchGlobalAccess } from '../permissions/modules/fetch-global-access/fetch-global-access.js'; +import { createDefaultAccountability } from '../permissions/utils/create-default-accountability.js'; export async function getAccountabilityForRole( role: null | string, @@ -10,44 +12,35 @@ export async function getAccountabilityForRole( database: Knex; }, ): Promise { - let generatedAccountability: Accountability | null = context.accountability; + let generatedAccountability: Accountability | null; if (role === null) { - generatedAccountability = { - role: null, - user: null, - admin: false, - app: false, - }; - - generatedAccountability.permissions = await getPermissions(generatedAccountability, context.schema); + generatedAccountability = createDefaultAccountability(); } else if (role === 'system') { - generatedAccountability = { - user: null, - role: null, + generatedAccountability = createDefaultAccountability({ admin: true, app: true, - permissions: [], - }; + }); } else { - const roleInfo = await context.database - .select(['app_access', 'admin_access']) - .from('directus_roles') - .where({ id: role }) - .first(); + const roles = await fetchRolesTree(role, context.database); - if (!roleInfo) { + // The roles tree should always include the passed role. If it doesn't, it's because it + // couldn't be read from the database and therefore doesn't exist + if (roles.length === 0) { throw new Error(`Configured role "${role}" isn't a valid role ID or doesn't exist.`); } - generatedAccountability = { - role, - user: null, - admin: roleInfo.admin_access === 1 || roleInfo.admin_access === '1' || roleInfo.admin_access === true, - app: roleInfo.app_access === 1 || roleInfo.app_access === '1' || roleInfo.app_access === true, - }; + const globalAccess = await fetchGlobalAccess( + { user: null, roles, ip: context.accountability?.ip ?? null }, + context.database, + ); - generatedAccountability.permissions = await getPermissions(generatedAccountability, context.schema); + generatedAccountability = createDefaultAccountability({ + role, + roles, + user: null, + ...globalAccess, + }); } return generatedAccountability; diff --git a/api/src/utils/get-accountability-for-token.test.ts b/api/src/utils/get-accountability-for-token.test.ts index 127da5ccc7..91b0dc135f 100644 --- a/api/src/utils/get-accountability-for-token.test.ts +++ b/api/src/utils/get-accountability-for-token.test.ts @@ -1,10 +1,18 @@ -import { useEnv } from '@directus/env'; import jwt from 'jsonwebtoken'; -import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest'; +import { beforeEach, describe, expect, test, vi } from 'vitest'; import getDatabase from '../database/index.js'; import { getAccountabilityForToken } from './get-accountability-for-token.js'; +import { fetchGlobalAccess } from '../permissions/modules/fetch-global-access/fetch-global-access.js'; +import { fetchRolesTree } from '../permissions/lib/fetch-roles-tree.js'; -vi.mock('@directus/env'); +vi.mock('@directus/env', () => { + return { + useEnv: vi.fn().mockReturnValue({ SECRET: 'super-secure-secret', EXTENSIONS_PATH: './extensions' }), + }; +}); + +vi.mock('../permissions/modules/fetch-global-access/fetch-global-access.js'); +vi.mock('../permissions/lib/fetch-roles-tree.js'); vi.mock('../database/index', () => { const self: Record = { @@ -19,24 +27,26 @@ vi.mock('../database/index', () => { }); beforeEach(() => { - vi.mocked(useEnv).mockReturnValue({ - SECRET: 'super-secure-secret', - EXTENSIONS_PATH: './extensions', - }); -}); - -afterEach(() => { vi.clearAllMocks(); }); describe('getAccountabilityForToken', async () => { test('minimal token payload', async () => { + const db = getDatabase(); + + vi.mocked(fetchRolesTree).mockResolvedValue([]); + vi.mocked(fetchGlobalAccess).mockResolvedValue({ app: false, admin: false }); + const token = jwt.sign({ role: '123-456-789', app_access: false, admin_access: false }, 'super-secure-secret', { issuer: 'directus', }); + const expectedAccountability = { admin: false, app: false, role: '123-456-789', roles: [], ip: null, user: null }; + const result = await getAccountabilityForToken(token); - expect(result).toStrictEqual({ admin: false, app: false, role: '123-456-789', user: null }); + expect(result).toStrictEqual(expectedAccountability); + expect(fetchRolesTree).toHaveBeenCalledWith('123-456-789', db); + expect(fetchGlobalAccess).toHaveBeenCalledWith(expectedAccountability, db); }); test('full token payload', async () => { @@ -53,13 +63,21 @@ describe('getAccountabilityForToken', async () => { { issuer: 'directus' }, ); + vi.mocked(fetchRolesTree).mockResolvedValue([]); + vi.mocked(fetchGlobalAccess).mockResolvedValue({ app: true, admin: true }); + const result = await getAccountabilityForToken(token); - expect(result.admin).toBe(true); - expect(result.app).toBe(true); - expect(result.role).toBe('role-id'); - expect(result.share).toBe('share-id'); - expect(result.share_scope).toBe('share-scope'); - expect(result.user).toBe('user-id'); + + expect(result).toStrictEqual({ + admin: true, + app: true, + user: 'user-id', + role: 'role-id', + roles: [], + ip: null, + share: 'share-id', + share_scope: 'share-scope', + }); }); test('throws token expired error', async () => { @@ -78,19 +96,28 @@ describe('getAccountabilityForToken', async () => { vi.spyOn(db, 'first').mockReturnValue({ id: 'user-id', role: 'role-id', - admin_access: false, - app_access: true, } as any); - const token = jwt.sign({ role: '123-456-789' }, 'bad-secret'); - const result = await getAccountabilityForToken(token); + vi.mocked(fetchRolesTree).mockResolvedValue([]); + vi.mocked(fetchGlobalAccess).mockResolvedValue({ app: true, admin: false }); - expect(result).toStrictEqual({ + const token = jwt.sign({ role: '123-456-789' }, 'bad-secret'); + + const expectedAccountability = { user: 'user-id', role: 'role-id', + roles: [], admin: false, app: true, - }); + ip: null, + }; + + const result = await getAccountabilityForToken(token); + + expect(result).toStrictEqual(expectedAccountability); + + expect(fetchRolesTree).toHaveBeenCalledWith('role-id', db); + expect(fetchGlobalAccess).toHaveBeenCalledWith(expectedAccountability, db); }); test('no user found', async () => { diff --git a/api/src/utils/get-accountability-for-token.ts b/api/src/utils/get-accountability-for-token.ts index b7deba9441..829b927a01 100644 --- a/api/src/utils/get-accountability-for-token.ts +++ b/api/src/utils/get-accountability-for-token.ts @@ -1,24 +1,25 @@ import { InvalidCredentialsError } from '@directus/errors'; import type { Accountability } from '@directus/types'; import getDatabase from '../database/index.js'; +import { fetchRolesTree } from '../permissions/lib/fetch-roles-tree.js'; +import { fetchGlobalAccess } from '../permissions/modules/fetch-global-access/fetch-global-access.js'; +import { createDefaultAccountability } from '../permissions/utils/create-default-accountability.js'; import { getSecret } from './get-secret.js'; import isDirectusJWT from './is-directus-jwt.js'; -import { verifySessionJWT } from './verify-session-jwt.js'; import { verifyAccessJWT } from './jwt.js'; +import { verifySessionJWT } from './verify-session-jwt.js'; export async function getAccountabilityForToken( token?: string | null, accountability?: Accountability, ): Promise { if (!accountability) { - accountability = { - user: null, - role: null, - admin: false, - app: false, - }; + accountability = createDefaultAccountability(); } + // Try finding the user with the provided token + const database = getDatabase(); + if (token) { if (isDirectusJWT(token)) { const payload = verifyAccessJWT(token, getSecret()); @@ -27,21 +28,21 @@ export async function getAccountabilityForToken( await verifySessionJWT(payload); } - accountability.role = payload.role; - accountability.admin = payload.admin_access === true || payload.admin_access == 1; - accountability.app = payload.app_access === true || payload.app_access == 1; - if (payload.share) accountability.share = payload.share; if (payload.share_scope) accountability.share_scope = payload.share_scope; if (payload.id) accountability.user = payload.id; - } else { - // Try finding the user with the provided token - const database = getDatabase(); + accountability.role = payload.role; + accountability.roles = await fetchRolesTree(payload.role, database); + + const { admin, app } = await fetchGlobalAccess(accountability, database); + + accountability.admin = admin; + accountability.app = app; + } else { const user = await database - .select('directus_users.id', 'directus_users.role', 'directus_roles.admin_access', 'directus_roles.app_access') + .select('directus_users.id', 'directus_users.role') .from('directus_users') - .leftJoin('directus_roles', 'directus_users.role', 'directus_roles.id') .where({ 'directus_users.token': token, status: 'active', @@ -54,8 +55,12 @@ export async function getAccountabilityForToken( accountability.user = user.id; accountability.role = user.role; - accountability.admin = user.admin_access === true || user.admin_access == 1; - accountability.app = user.app_access === true || user.app_access == 1; + accountability.roles = await fetchRolesTree(user.role, database); + + const { admin, app } = await fetchGlobalAccess(accountability, database); + + accountability.admin = admin; + accountability.app = app; } } diff --git a/api/src/utils/get-ast-from-query.ts b/api/src/utils/get-ast-from-query.ts deleted file mode 100644 index 8a2b6206b4..0000000000 --- a/api/src/utils/get-ast-from-query.ts +++ /dev/null @@ -1,407 +0,0 @@ -/** - * Generate an AST based on a given collection and query - */ - -import { REGEX_BETWEEN_PARENS } from '@directus/constants'; -import type { Accountability, PermissionsAction, Query, SchemaOverview } from '@directus/types'; -import type { Knex } from 'knex'; -import { cloneDeep, isEmpty, mapKeys, omitBy, uniq } from 'lodash-es'; -import type { AST, FieldNode, FunctionFieldNode, NestedCollectionNode } from '../types/index.js'; -import { getRelationType } from './get-relation-type.js'; - -type GetASTOptions = { - accountability?: Accountability | null; - action?: PermissionsAction; - knex?: Knex; -}; - -type anyNested = { - [collectionScope: string]: string[]; -}; - -export default async function getASTFromQuery( - collection: string, - query: Query, - schema: SchemaOverview, - options?: GetASTOptions, -): Promise { - query = cloneDeep(query); - - const accountability = options?.accountability; - const action = options?.action || 'read'; - - const permissions = - accountability && accountability.admin !== true - ? accountability?.permissions?.filter((permission) => { - return permission.action === action; - }) ?? [] - : null; - - const ast: AST = { - type: 'root', - name: collection, - query: query, - children: [], - }; - - let fields = ['*']; - - if (query.fields) { - fields = query.fields; - } - - /** - * When using aggregate functions, you can't have any other regular fields - * selected. This makes sure you never end up in a non-aggregate fields selection error - */ - if (Object.keys(query.aggregate || {}).length > 0) { - fields = []; - } - - /** - * Similarly, when grouping on a specific field, you can't have other non-aggregated fields. - * The group query will override the fields query - */ - if (query.group) { - fields = query.group; - } - - fields = uniq(fields); - - const deep = query.deep || {}; - - // Prevent fields/deep from showing up in the query object in further use - delete query.fields; - delete query.deep; - - if (!query.sort) { - // We'll default to the primary key for the standard sort output - let sortField = schema.collections[collection]!.primary; - - // If a custom manual sort field is configured, use that - if (schema.collections[collection]?.sortField) { - sortField = schema.collections[collection]!.sortField as string; - } - - // When group by is used, default to the first column provided in the group by clause - if (query.group?.[0]) { - sortField = query.group[0]; - } - - query.sort = [sortField]; - } - - // When no group by is supplied, but an aggregate function is used, only a single row will be - // returned. In those cases, we'll ignore the sort field altogether - if (query.aggregate && Object.keys(query.aggregate).length && !query.group?.[0]) { - delete query.sort; - } - - ast.children = await parseFields(collection, fields, deep); - - return ast; - - async function parseFields(parentCollection: string, fields: string[] | null, deep?: Record) { - if (!fields) return []; - - fields = await convertWildcards(parentCollection, fields); - - if (!fields || !Array.isArray(fields)) return []; - - const children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[] = []; - - const relationalStructure: Record = Object.create(null); - - for (const fieldKey of fields) { - let name = fieldKey; - - if (query.alias) { - // check for field alias (is one of the key) - if (name in query.alias) { - name = query.alias[fieldKey]!; - } - } - - const isRelational = - name.includes('.') || - // We'll always treat top level o2m fields as a related item. This is an alias field, otherwise it won't return - // anything - !!schema.relations.find( - (relation) => relation.related_collection === parentCollection && relation.meta?.one_field === name, - ); - - if (isRelational) { - // field is relational - const parts = fieldKey.split('.'); - - let rootField = parts[0]!; - let collectionScope: string | null = null; - - // a2o related collection scoped field selector `fields=sections.section_id:headings.title` - if (rootField.includes(':')) { - const [key, scope] = rootField.split(':'); - rootField = key!; - collectionScope = scope!; - } - - if (rootField in relationalStructure === false) { - if (collectionScope) { - relationalStructure[rootField] = { [collectionScope]: [] }; - } else { - relationalStructure[rootField] = []; - } - } - - if (parts.length > 1) { - const childKey = parts.slice(1).join('.'); - - if (collectionScope) { - if (collectionScope in relationalStructure[rootField]! === false) { - (relationalStructure[rootField] as anyNested)[collectionScope] = []; - } - - (relationalStructure[rootField] as anyNested)[collectionScope]!.push(childKey); - } else { - (relationalStructure[rootField] as string[]).push(childKey); - } - } - } else { - if (fieldKey.includes('(') && fieldKey.includes(')')) { - const columnName = fieldKey.match(REGEX_BETWEEN_PARENS)![1]!; - const foundField = schema.collections[parentCollection]!.fields[columnName]; - - if (foundField && foundField.type === 'alias') { - const foundRelation = schema.relations.find( - (relation) => relation.related_collection === parentCollection && relation.meta?.one_field === columnName, - ); - - if (foundRelation) { - children.push({ - type: 'functionField', - name, - fieldKey, - query: {}, - relatedCollection: foundRelation.collection, - }); - - continue; - } - } - } - - children.push({ type: 'field', name, fieldKey }); - } - } - - for (const [fieldKey, nestedFields] of Object.entries(relationalStructure)) { - let fieldName = fieldKey; - - if (query.alias && fieldKey in query.alias) { - fieldName = query.alias[fieldKey]!; - } - - const relatedCollection = getRelatedCollection(parentCollection, fieldName); - const relation = getRelation(parentCollection, fieldName); - - if (!relation) continue; - - const relationType = getRelationType({ - relation, - collection: parentCollection, - field: fieldName, - }); - - if (!relationType) continue; - - let child: NestedCollectionNode | null = null; - - if (relationType === 'a2o') { - const allowedCollections = relation.meta!.one_allowed_collections!.filter((collection) => { - if (!permissions) return true; - return permissions.some((permission) => permission.collection === collection); - }); - - child = { - type: 'a2o', - names: allowedCollections, - children: {}, - query: {}, - relatedKey: {}, - parentKey: schema.collections[parentCollection]!.primary, - fieldKey: fieldKey, - relation: relation, - }; - - for (const relatedCollection of allowedCollections) { - child.children[relatedCollection] = await parseFields( - relatedCollection, - Array.isArray(nestedFields) ? nestedFields : (nestedFields as anyNested)[relatedCollection] || [], - deep?.[`${fieldKey}:${relatedCollection}`], - ); - - child.query[relatedCollection] = getDeepQuery(deep?.[`${fieldKey}:${relatedCollection}`] || {}); - - child.relatedKey[relatedCollection] = schema.collections[relatedCollection]!.primary; - } - } else if (relatedCollection) { - if (permissions && permissions.some((permission) => permission.collection === relatedCollection) === false) { - continue; - } - - // update query alias for children parseFields - const deepAlias = getDeepQuery(deep?.[fieldKey] || {})?.['alias']; - if (!isEmpty(deepAlias)) query.alias = deepAlias; - - child = { - type: relationType, - name: relatedCollection, - fieldKey: fieldKey, - parentKey: schema.collections[parentCollection]!.primary, - relatedKey: schema.collections[relatedCollection]!.primary, - relation: relation, - query: getDeepQuery(deep?.[fieldKey] || {}), - children: await parseFields(relatedCollection, nestedFields as string[], deep?.[fieldKey] || {}), - }; - - if (relationType === 'o2m' && !child!.query.sort) { - child!.query.sort = [relation.meta?.sort_field || schema.collections[relation.collection]!.primary]; - } - } - - if (child) { - children.push(child); - } - } - - // Deduplicate any children fields that are included both as a regular field, and as a nested m2o field - const nestedCollectionNodes = children.filter((childNode) => childNode.type !== 'field'); - - return children.filter((childNode) => { - const existsAsNestedRelational = !!nestedCollectionNodes.find( - (nestedCollectionNode) => childNode.fieldKey === nestedCollectionNode.fieldKey, - ); - - if (childNode.type === 'field' && existsAsNestedRelational) return false; - - return true; - }); - } - - async function convertWildcards(parentCollection: string, fields: string[]) { - fields = cloneDeep(fields); - - const fieldsInCollection = Object.entries(schema.collections[parentCollection]!.fields).map(([name]) => name); - - let allowedFields: string[] | null = fieldsInCollection; - - if (permissions) { - const permittedFields = permissions.find((permission) => parentCollection === permission.collection)?.fields; - if (permittedFields !== undefined) allowedFields = permittedFields; - } - - if (!allowedFields || allowedFields.length === 0) return []; - - // In case of full read permissions - if (allowedFields[0] === '*') allowedFields = fieldsInCollection; - - for (let index = 0; index < fields.length; index++) { - const fieldKey = fields[index]!; - - if (fieldKey.includes('*') === false) continue; - - if (fieldKey === '*') { - const aliases = Object.keys(query.alias ?? {}); - - // Set to all fields in collection - if (allowedFields.includes('*')) { - fields.splice(index, 1, ...fieldsInCollection, ...aliases); - } else { - // Set to all allowed fields - const allowedAliases = aliases.filter((fieldKey) => { - const name = query.alias![fieldKey]!; - return allowedFields!.includes(name); - }); - - fields.splice(index, 1, ...allowedFields, ...allowedAliases); - } - } - - // Swap *.* case for *,.*,.* - if (fieldKey.includes('.') && fieldKey.split('.')[0] === '*') { - const parts = fieldKey.split('.'); - - const relationalFields = allowedFields.includes('*') - ? schema.relations - .filter( - (relation) => - relation.collection === parentCollection || relation.related_collection === parentCollection, - ) - .map((relation) => { - const isMany = relation.collection === parentCollection; - return isMany ? relation.field : relation.meta?.one_field; - }) - : allowedFields.filter((fieldKey) => !!getRelation(parentCollection, fieldKey)); - - const nonRelationalFields = allowedFields.filter((fieldKey) => relationalFields.includes(fieldKey) === false); - - const aliasFields = Object.keys(query.alias ?? {}).map((fieldKey) => { - const name = query.alias![fieldKey]; - - if (relationalFields.includes(name)) { - return `${fieldKey}.${parts.slice(1).join('.')}`; - } - - return fieldKey; - }); - - fields.splice( - index, - 1, - ...[ - ...relationalFields.map((relationalField) => { - return `${relationalField}.${parts.slice(1).join('.')}`; - }), - ...nonRelationalFields, - ...aliasFields, - ], - ); - } - } - - return fields; - } - - function getRelation(collection: string, field: string) { - const relation = schema.relations.find((relation) => { - return ( - (relation.collection === collection && relation.field === field) || - (relation.related_collection === collection && relation.meta?.one_field === field) - ); - }); - - return relation; - } - - function getRelatedCollection(collection: string, field: string): string | null { - const relation = getRelation(collection, field); - - if (!relation) return null; - - if (relation.collection === collection && relation.field === field) { - return relation.related_collection || null; - } - - if (relation.related_collection === collection && relation.meta?.one_field === field) { - return relation.collection || null; - } - - return null; - } -} - -function getDeepQuery(query: Record) { - return mapKeys( - omitBy(query, (_value, key) => key.startsWith('_') === false), - (_value, key) => key.substring(1), - ); -} diff --git a/api/src/utils/get-cache-key.test.ts b/api/src/utils/get-cache-key.test.ts index 9846387ab8..b27e64affb 100644 --- a/api/src/utils/get-cache-key.test.ts +++ b/api/src/utils/get-cache-key.test.ts @@ -1,12 +1,27 @@ import { useEnv } from '@directus/env'; import type { Request } from 'express'; +import type { Knex } from 'knex'; import { afterEach, beforeAll, beforeEach, describe, expect, test, vi, type MockInstance } from 'vitest'; +import { fetchPoliciesIpAccess } from '../permissions/modules/fetch-policies-ip-access/fetch-policies-ip-access.js'; +import { getDatabase } from '../database/index.js'; import { getCacheKey } from './get-cache-key.js'; import * as getGraphqlQueryUtil from './get-graphql-query-and-variables.js'; +vi.mock('../database/index.js'); + +vi.mock('../permissions/modules/fetch-policies-ip-access/fetch-policies-ip-access.js'); + vi.mock('directus/version', () => ({ version: '1.2.3' })); -vi.mock('@directus/env'); +vi.mock('@directus/env', () => ({ + useEnv: vi.fn().mockReturnValue({ + REDIS_ENABLED: false, + }), +})); + +beforeEach(() => { + vi.mocked(getDatabase).mockReturnValue({} as Knex); +}); const baseUrl = 'http://localhost'; const restUrl = `${baseUrl}/items/example`; @@ -67,8 +82,8 @@ afterEach(() => { vi.clearAllMocks(); }); -describe('get cache key', () => { - describe('isGraphQl', () => { +describe('get cache key', async () => { + describe('isGraphQl', async () => { let getGraphqlQuerySpy: MockInstance; beforeAll(() => { @@ -77,30 +92,30 @@ describe('get cache key', () => { test.each(['/items/test', '/items/graphql', '/collections/test', '/collections/graphql'])( 'path "%s" should not be interpreted as a graphql query', - (path) => { - getCacheKey({ originalUrl: `${baseUrl}${path}` } as Request); + async (path) => { + await getCacheKey({ originalUrl: `${baseUrl}${path}` } as Request); expect(getGraphqlQuerySpy).not.toHaveBeenCalled(); }, ); - test.each(['/graphql', '/graphql/system'])('path "%s" should be interpreted as a graphql query', (path) => { - getCacheKey({ originalUrl: `${baseUrl}${path}` } as Request); + test.each(['/graphql', '/graphql/system'])('path "%s" should be interpreted as a graphql query', async (path) => { + await getCacheKey({ originalUrl: `${baseUrl}${path}` } as Request); expect(getGraphqlQuerySpy).toHaveBeenCalledOnce(); }); }); - test.each(cases)('should create a cache key for %s', (_, params, key) => { - expect(getCacheKey(params as unknown as Request)).toEqual(key); + test.each(cases)('should create a cache key for %s', async (_, params, key) => { + expect(await getCacheKey(params as unknown as Request)).toEqual(key); }); - test('should create a unique key for each request', () => { - const keys = cases.map(([, params]) => getCacheKey(params as unknown as Request)); + test('should create a unique key for each request', async () => { + const keys = cases.map(async ([, params]) => await getCacheKey(params as unknown as Request)); const hasDuplicate = keys.some((key) => keys.indexOf(key) !== keys.lastIndexOf(key)); expect(hasDuplicate).toBeFalsy(); }); - test('should create a unique key for GraphQL requests with different variables', () => { + test('should create a unique key for GraphQL requests with different variables', async () => { const query = 'query Test ($name: String) { test (filter: { name: { _eq: $name } }) { id } }'; const operationName = 'test'; const variables1 = JSON.stringify({ name: 'test 1' }); @@ -110,9 +125,30 @@ describe('get cache key', () => { const postReq1: any = { method: 'POST', originalUrl: req1.originalUrl, body: req1.query }; const postReq2: any = { method: 'POST', originalUrl: req2.originalUrl, body: req2.query }; - expect(getCacheKey(req1)).not.toEqual(getCacheKey(req2)); - expect(getCacheKey(postReq1)).not.toEqual(getCacheKey(postReq2)); - expect(getCacheKey(req1)).toEqual(getCacheKey(postReq1)); - expect(getCacheKey(req2)).toEqual(getCacheKey(postReq2)); + expect(await getCacheKey(req1)).not.toEqual(await getCacheKey(req2)); + expect(await getCacheKey(postReq1)).not.toEqual(await getCacheKey(postReq2)); + expect(await getCacheKey(req1)).toEqual(await getCacheKey(postReq1)); + expect(await getCacheKey(req2)).toEqual(await getCacheKey(postReq2)); + }); + + test('it should create a unique key for requests which match a policy ip_access filter', async () => { + const reqWithMatchingIp: any = { + method, + originalUrl: restUrl, + accountability: { ...accountability, ip: '127.0.0.1' }, + }; + + const reqWithNotMatchingIp: any = { + method, + originalUrl: restUrl, + accountability: { ...accountability, ip: '127.0.0.2' }, + }; + + const reqWithoutIp: any = { method, originalUrl: restUrl, accountability: { ...accountability } }; + + vi.mocked(fetchPoliciesIpAccess).mockResolvedValue([['127.0.0.1']]); + + expect(await getCacheKey(reqWithMatchingIp)).not.toEqual(await getCacheKey(reqWithoutIp)); + expect(await getCacheKey(reqWithNotMatchingIp)).toEqual(await getCacheKey(reqWithoutIp)); }); }); diff --git a/api/src/utils/get-cache-key.ts b/api/src/utils/get-cache-key.ts index a090b260f5..a3a17b77f7 100644 --- a/api/src/utils/get-cache-key.ts +++ b/api/src/utils/get-cache-key.ts @@ -1,18 +1,31 @@ import type { Request } from 'express'; import hash from 'object-hash'; import url from 'url'; +import getDatabase from '../database/index.js'; +import { fetchPoliciesIpAccess } from '../permissions/modules/fetch-policies-ip-access/fetch-policies-ip-access.js'; import { getGraphqlQueryAndVariables } from './get-graphql-query-and-variables.js'; import { version } from 'directus/version'; +import { ipInNetworks } from './ip-in-networks.js'; -export function getCacheKey(req: Request): string { +export async function getCacheKey(req: Request) { const path = url.parse(req.originalUrl).pathname; const isGraphQl = path?.startsWith('/graphql'); + let includeIp = false; + + if (req.accountability && req.accountability.ip) { + // Check if the IP influences the result of the request, that can be the case if some policies have an ip_access + // filter and the request IP matches any of those filters + const ipFilters = await fetchPoliciesIpAccess(req.accountability, getDatabase()); + includeIp = ipFilters.length > 0 && ipFilters.some((networks) => ipInNetworks(req.accountability!.ip!, networks)); + } + const info = { version, user: req.accountability?.user || null, path, query: isGraphQl ? getGraphqlQueryAndVariables(req) : req.sanitizedQuery, + ...(includeIp && { ip: req.accountability!.ip }), }; const key = hash(info); diff --git a/api/src/utils/get-column.ts b/api/src/utils/get-column.ts index e669c97619..ac8f7f5a7a 100644 --- a/api/src/utils/get-column.ts +++ b/api/src/utils/get-column.ts @@ -1,5 +1,5 @@ import { REGEX_BETWEEN_PARENS } from '@directus/constants'; -import type { FieldFunction, Query, SchemaOverview } from '@directus/types'; +import type { FieldFunction, Filter, Query, SchemaOverview } from '@directus/types'; import { getFunctionsForType } from '@directus/utils'; import type { Knex } from 'knex'; import { getFunctions } from '../database/helpers/index.js'; @@ -8,6 +8,7 @@ import { applyFunctionToColumnName } from './apply-function-to-column-name.js'; type GetColumnOptions = { query?: Query | undefined; + cases?: Filter[]; originalCollectionName?: string | undefined; }; @@ -49,6 +50,7 @@ export function getColumn( const result = fn[functionName as keyof typeof fn](table, columnName!, { type, query: options?.query, + cases: options?.cases, originalCollectionName: options?.originalCollectionName, }) as Knex.Raw; diff --git a/api/src/utils/get-permissions.ts b/api/src/utils/get-permissions.ts deleted file mode 100644 index 643ee8b55f..0000000000 --- a/api/src/utils/get-permissions.ts +++ /dev/null @@ -1,208 +0,0 @@ -import { useEnv } from '@directus/env'; -import type { Accountability, Permission, SchemaOverview } from '@directus/types'; -import { deepMap, parseFilter, parseJSON, parsePreset } from '@directus/utils'; -import { cloneDeep } from 'lodash-es'; -import hash from 'object-hash'; -import { getCache, getCacheValue, getSystemCache, setCacheValue, setSystemCache } from '../cache.js'; -import getDatabase from '../database/index.js'; -import { appAccessMinimalPermissions } from '@directus/system-data'; -import { useLogger } from '../logger/index.js'; -import { RolesService } from '../services/roles.js'; -import { UsersService } from '../services/users.js'; -import { mergePermissionsForShare } from './merge-permissions-for-share.js'; -import { mergePermissions } from './merge-permissions.js'; - -export async function getPermissions(accountability: Accountability, schema: SchemaOverview) { - const database = getDatabase(); - const { cache } = getCache(); - const env = useEnv(); - const logger = useLogger(); - - let permissions: Permission[] = []; - - const { user, role, app, admin, share_scope } = accountability; - const cacheKey = `permissions-${hash({ user, role, app, admin, share_scope })}`; - - if (cache && env['CACHE_PERMISSIONS'] !== false) { - let cachedPermissions; - - try { - cachedPermissions = await getSystemCache(cacheKey); - } catch (err: any) { - logger.warn(err, `[cache] Couldn't read key ${cacheKey}. ${err.message}`); - } - - if (cachedPermissions) { - if (!cachedPermissions['containDynamicData']) { - return processPermissions(accountability, cachedPermissions['permissions'], {}); - } - - const cachedFilterContext = await getCacheValue( - cache, - `filterContext-${hash({ user, role, permissions: cachedPermissions['permissions'] })}`, - ); - - if (cachedFilterContext) { - return processPermissions(accountability, cachedPermissions['permissions'], cachedFilterContext); - } else { - const { - permissions: parsedPermissions, - requiredPermissionData, - containDynamicData, - } = parsePermissions(cachedPermissions['permissions']); - - permissions = parsedPermissions; - - const filterContext = containDynamicData - ? await getFilterContext(schema, accountability, requiredPermissionData) - : {}; - - if (containDynamicData && env['CACHE_ENABLED'] !== false) { - await setCacheValue(cache, `filterContext-${hash({ user, role, permissions })}`, filterContext); - } - - return processPermissions(accountability, permissions, filterContext); - } - } - } - - if (accountability.admin !== true) { - const query = database.select('*').from('directus_permissions'); - - if (accountability.role) { - query.where({ role: accountability.role }); - } else { - query.whereNull('role'); - } - - const permissionsForRole = await query; - - const { - permissions: parsedPermissions, - requiredPermissionData, - containDynamicData, - } = parsePermissions(permissionsForRole); - - permissions = parsedPermissions; - - if (accountability.app === true) { - permissions = mergePermissions( - 'or', - permissions, - appAccessMinimalPermissions.map((perm) => ({ ...perm, role: accountability.role })), - ); - } - - if (accountability.share_scope) { - permissions = mergePermissionsForShare(permissions, accountability, schema); - } - - const filterContext = containDynamicData - ? await getFilterContext(schema, accountability, requiredPermissionData) - : {}; - - if (cache && env['CACHE_PERMISSIONS'] !== false) { - await setSystemCache(cacheKey, { permissions, containDynamicData }); - - if (containDynamicData && env['CACHE_ENABLED'] !== false) { - await setCacheValue(cache, `filterContext-${hash({ user, role, permissions })}`, filterContext); - } - } - - return processPermissions(accountability, permissions, filterContext); - } - - return permissions; -} - -function parsePermissions(permissions: any[]) { - const requiredPermissionData = { - $CURRENT_USER: [] as string[], - $CURRENT_ROLE: [] as string[], - }; - - let containDynamicData = false; - - permissions = permissions.map((permissionRaw) => { - const permission = cloneDeep(permissionRaw); - - if (permission.permissions && typeof permission.permissions === 'string') { - permission.permissions = parseJSON(permission.permissions); - } - - if (permission.validation && typeof permission.validation === 'string') { - permission.validation = parseJSON(permission.validation); - } else if (permission.validation === null) { - permission.validation = {}; - } - - if (permission.presets && typeof permission.presets === 'string') { - permission.presets = parseJSON(permission.presets); - } else if (permission.presets === null) { - permission.presets = {}; - } - - if (permission.fields && typeof permission.fields === 'string') { - permission.fields = permission.fields.split(','); - } else if (permission.fields === null) { - permission.fields = []; - } - - const extractPermissionData = (val: any) => { - if (typeof val === 'string' && val.startsWith('$CURRENT_USER.')) { - requiredPermissionData.$CURRENT_USER.push(val.replace('$CURRENT_USER.', '')); - containDynamicData = true; - } - - if (typeof val === 'string' && val.startsWith('$CURRENT_ROLE.')) { - requiredPermissionData.$CURRENT_ROLE.push(val.replace('$CURRENT_ROLE.', '')); - containDynamicData = true; - } - - return val; - }; - - deepMap(permission.permissions, extractPermissionData); - deepMap(permission.validation, extractPermissionData); - deepMap(permission.presets, extractPermissionData); - - return permission; - }); - - return { permissions, requiredPermissionData, containDynamicData }; -} - -async function getFilterContext(schema: SchemaOverview, accountability: Accountability, requiredPermissionData: any) { - const usersService = new UsersService({ schema }); - const rolesService = new RolesService({ schema }); - - const filterContext: Record = {}; - - if (accountability.user && requiredPermissionData.$CURRENT_USER.length > 0) { - filterContext['$CURRENT_USER'] = await usersService.readOne(accountability.user, { - fields: requiredPermissionData.$CURRENT_USER, - }); - } - - if (accountability.role && requiredPermissionData.$CURRENT_ROLE.length > 0) { - filterContext['$CURRENT_ROLE'] = await rolesService.readOne(accountability.role, { - fields: requiredPermissionData.$CURRENT_ROLE, - }); - } - - return filterContext; -} - -function processPermissions( - accountability: Accountability, - permissions: Permission[], - filterContext: Record, -) { - return permissions.map((permission) => { - permission.permissions = parseFilter(permission.permissions, accountability!, filterContext); - permission.validation = parseFilter(permission.validation, accountability!, filterContext); - permission.presets = parsePreset(permission.presets, accountability!, filterContext); - - return permission; - }); -} diff --git a/api/src/utils/get-service.ts b/api/src/utils/get-service.ts index 9c5ed67e49..033a9e9138 100644 --- a/api/src/utils/get-service.ts +++ b/api/src/utils/get-service.ts @@ -1,5 +1,6 @@ import { ForbiddenError } from '@directus/errors'; import { + AccessService, ActivityService, DashboardsService, FilesService, @@ -10,6 +11,7 @@ import { OperationsService, PanelsService, PermissionsService, + PoliciesService, PresetsService, RevisionsService, RolesService, @@ -28,6 +30,8 @@ import type { AbstractServiceOptions } from '../types/services.js'; */ export function getService(collection: string, opts: AbstractServiceOptions): ItemsService { switch (collection) { + case 'directus_access': + return new AccessService(opts); case 'directus_activity': return new ActivityService(opts); case 'directus_dashboards': @@ -48,6 +52,8 @@ export function getService(collection: string, opts: AbstractServiceOptions): It return new PermissionsService(opts); case 'directus_presets': return new PresetsService(opts); + case 'directus_policies': + return new PoliciesService(opts); case 'directus_revisions': return new RevisionsService(opts); case 'directus_roles': diff --git a/api/src/utils/merge-permissions-for-share.ts b/api/src/utils/merge-permissions-for-share.ts deleted file mode 100644 index de4d3c4a02..0000000000 --- a/api/src/utils/merge-permissions-for-share.ts +++ /dev/null @@ -1,181 +0,0 @@ -import type { Accountability, Filter, Permission, SchemaOverview } from '@directus/types'; -import { assign, set, uniq } from 'lodash-es'; -import { schemaPermissions } from '@directus/system-data'; -import { mergePermissions } from './merge-permissions.js'; -import { reduceSchema } from './reduce-schema.js'; - -export function mergePermissionsForShare( - currentPermissions: Permission[], - accountability: Accountability, - schema: SchemaOverview, -): Permission[] { - const defaults: Permission = { - action: 'read', - role: accountability.role, - collection: '', - permissions: {}, - validation: null, - presets: null, - fields: null, - }; - - const { collection, item } = accountability.share_scope!; - - const parentPrimaryKeyField = schema.collections[collection]!.primary; - - const reducedSchema = reduceSchema(schema, currentPermissions, ['read']); - - const relationalPermissions = traverse(reducedSchema, parentPrimaryKeyField, item, collection); - - const parentCollectionPermission: Permission = assign({}, defaults, { - collection, - permissions: { - [parentPrimaryKeyField]: { - _eq: item, - }, - }, - }); - - // All permissions that will be merged into the original permissions set - const allGeneratedPermissions = [ - parentCollectionPermission, - ...relationalPermissions.map((generated) => assign({}, defaults, generated)), - ...schemaPermissions, - ]; - - // All the collections that are touched through the relational tree from the current root collection, and the schema collections - const allowedCollections = uniq(allGeneratedPermissions.map(({ collection }) => collection)); - - const generatedPermissions: Permission[] = []; - - // Merge all the permissions that relate to the same collection with an _or (this allows you to properly retrieve) - // the items of a collection if you entered that collection from multiple angles - for (const collection of allowedCollections) { - const permissionsForCollection = allGeneratedPermissions.filter( - (permission) => permission.collection === collection, - ); - - if (permissionsForCollection.length > 0) { - generatedPermissions.push(...mergePermissions('or', permissionsForCollection)); - } else { - generatedPermissions.push(...permissionsForCollection); - } - } - - // Explicitly filter out permissions to collections unrelated to the root parent item. - const limitedPermissions = currentPermissions.filter( - ({ action, collection }) => allowedCollections.includes(collection) && action === 'read', - ); - - return mergePermissions('and', limitedPermissions, generatedPermissions); -} - -export function traverse( - schema: SchemaOverview, - rootItemPrimaryKeyField: string, - rootItemPrimaryKey: string, - currentCollection: string, - parentCollections: string[] = [], - path: string[] = [], -): Partial[] { - const permissions: Partial[] = []; - - // If there's already a permissions rule for the collection we're currently checking, we'll shortcircuit. - // This prevents infinite loop in recursive relationships, like articles->related_articles->articles, or - // articles.author->users.avatar->files.created_by->users.avatar->files.created_by->🔁 - if (parentCollections.includes(currentCollection)) { - return permissions; - } - - const relationsInCollection = schema.relations.filter((relation) => { - return relation.collection === currentCollection || relation.related_collection === currentCollection; - }); - - for (const relation of relationsInCollection) { - let type; - - if (relation.related_collection === currentCollection) { - type = 'o2m'; - } else if (!relation.related_collection) { - type = 'a2o'; - } else { - type = 'm2o'; - } - - if (type === 'o2m') { - permissions.push({ - collection: relation.collection, - permissions: getFilterForPath(type, [...path, relation.field], rootItemPrimaryKeyField, rootItemPrimaryKey), - }); - - permissions.push( - ...traverse( - schema, - rootItemPrimaryKeyField, - rootItemPrimaryKey, - relation.collection, - [...parentCollections, currentCollection], - [...path, relation.field], - ), - ); - } - - if (type === 'a2o' && relation.meta?.one_allowed_collections) { - for (const collection of relation.meta.one_allowed_collections) { - permissions.push({ - collection, - permissions: getFilterForPath( - type, - [...path, `$FOLLOW(${relation.collection},${relation.field},${relation.meta.one_collection_field})`], - rootItemPrimaryKeyField, - rootItemPrimaryKey, - ), - }); - } - } - - if (type === 'm2o') { - permissions.push({ - collection: relation.related_collection!, - permissions: getFilterForPath( - type, - [...path, `$FOLLOW(${relation.collection},${relation.field})`], - rootItemPrimaryKeyField, - rootItemPrimaryKey, - ), - }); - - if (relation.meta?.one_field) { - permissions.push( - ...traverse( - schema, - rootItemPrimaryKeyField, - rootItemPrimaryKey, - relation.related_collection!, - [...parentCollections, currentCollection], - [...path, relation.meta?.one_field], - ), - ); - } - } - } - - return permissions; -} - -export function getFilterForPath( - type: 'o2m' | 'm2o' | 'a2o', - path: string[], - rootPrimaryKeyField: string, - rootPrimaryKey: string, -): Filter { - const filter: Filter = {}; - - if (type === 'm2o' || type === 'a2o') { - set(filter, path.reverse(), { [rootPrimaryKeyField]: { _eq: rootPrimaryKey } }); - } else { - set(filter, path.reverse(), { _eq: rootPrimaryKey }); - } - - return filter; -} diff --git a/api/src/utils/merge-permissions.test.ts b/api/src/utils/merge-permissions.test.ts deleted file mode 100644 index 91aeee34db..0000000000 --- a/api/src/utils/merge-permissions.test.ts +++ /dev/null @@ -1,132 +0,0 @@ -import type { Filter, Permission } from '@directus/types'; -import { describe, expect, test } from 'vitest'; -import { mergePermission } from './merge-permissions.js'; - -const fullFilter = {} as Filter; -const conditionalFilter = { user: { id: { _eq: '$CURRENT_USER' } } } as Filter; -const conditionalFilter2 = { count: { _gt: 42 } } as Filter; - -const permissionTemplate = { - role: null, - collection: 'directus_users', - permissions: null, - validation: null, - presets: null, - fields: null, -} as Permission; - -describe('merging permissions', () => { - test('processes _or permissions', () => { - const mergedPermission = mergePermission( - 'or', - { ...permissionTemplate, permissions: conditionalFilter }, - { ...permissionTemplate, permissions: conditionalFilter2 }, - ); - - expect(mergedPermission).toStrictEqual({ - ...permissionTemplate, - permissions: { - _or: [conditionalFilter, conditionalFilter2], - }, - }); - }); - - test('processes _or validations', () => { - const mergedPermission = mergePermission( - 'or', - { ...permissionTemplate, validation: conditionalFilter }, - { ...permissionTemplate, validation: conditionalFilter2 }, - ); - - expect(mergedPermission).toStrictEqual({ - ...permissionTemplate, - validation: { - _or: [conditionalFilter, conditionalFilter2], - }, - }); - }); - - test('processes _and permissions', () => { - const mergedPermission = mergePermission( - 'and', - { ...permissionTemplate, permissions: conditionalFilter }, - { ...permissionTemplate, permissions: conditionalFilter2 }, - ); - - expect(mergedPermission).toStrictEqual({ - ...permissionTemplate, - permissions: { - _and: [conditionalFilter, conditionalFilter2], - }, - }); - }); - - test('processes _and validations', () => { - const mergedPermission = mergePermission( - 'and', - { ...permissionTemplate, validation: conditionalFilter }, - { ...permissionTemplate, validation: conditionalFilter2 }, - ); - - expect(mergedPermission).toStrictEqual({ - ...permissionTemplate, - validation: { - _and: [conditionalFilter, conditionalFilter2], - }, - }); - }); - - test('{} supersedes conditional permissions in _or', () => { - const mergedPermission = mergePermission( - 'or', - { ...permissionTemplate, permissions: fullFilter }, - { ...permissionTemplate, permissions: conditionalFilter }, - ); - - expect(mergedPermission).toStrictEqual({ ...permissionTemplate, permissions: fullFilter }); - }); - - test('{} supersedes conditional validations in _or', () => { - const mergedPermission = mergePermission( - 'or', - { ...permissionTemplate, validation: fullFilter }, - { ...permissionTemplate, validation: conditionalFilter }, - ); - - expect(mergedPermission).toStrictEqual({ ...permissionTemplate, validation: fullFilter }); - }); - - test('{} does not supersede conditional permissions in _and', () => { - const mergedPermission = mergePermission( - 'and', - { ...permissionTemplate, permissions: fullFilter }, - { ...permissionTemplate, permissions: conditionalFilter }, - ); - - const expectedPermission = { - ...permissionTemplate, - permissions: { - _and: [fullFilter, conditionalFilter], - }, - }; - - expect(mergedPermission).toStrictEqual(expectedPermission); - }); - - test('{} does not supersede conditional validations in _and', () => { - const mergedPermission = mergePermission( - 'and', - { ...permissionTemplate, validation: fullFilter }, - { ...permissionTemplate, validation: conditionalFilter }, - ); - - const expectedPermission = { - ...permissionTemplate, - validation: { - _and: [fullFilter, conditionalFilter], - }, - }; - - expect(mergedPermission).toStrictEqual(expectedPermission); - }); -}); diff --git a/api/src/utils/merge-permissions.ts b/api/src/utils/merge-permissions.ts deleted file mode 100644 index 4a8f6cf094..0000000000 --- a/api/src/utils/merge-permissions.ts +++ /dev/null @@ -1,105 +0,0 @@ -import type { LogicalFilterAND, LogicalFilterOR, Permission } from '@directus/types'; -import { flatten, intersection, isEqual, merge, omit } from 'lodash-es'; - -export function mergePermissions(strategy: 'and' | 'or', ...permissions: Permission[][]): Permission[] { - const allPermissions = flatten(permissions); - - const mergedPermissions = allPermissions - .reduce((acc, val) => { - const key = `${val.collection}__${val.action}__${val.role || '$PUBLIC'}`; - const current = acc.get(key); - acc.set(key, current ? mergePermission(strategy, current, val) : val); - return acc; - }, new Map()) - .values(); - - return Array.from(mergedPermissions); -} - -export function mergePermission( - strategy: 'and' | 'or', - currentPerm: Permission, - newPerm: Permission, -): Omit { - const logicalKey = `_${strategy}` as keyof LogicalFilterOR | keyof LogicalFilterAND; - - let permissions = currentPerm.permissions; - let validation = currentPerm.validation; - let fields = currentPerm.fields; - let presets = currentPerm.presets; - - if (newPerm.permissions) { - if (currentPerm.permissions && Object.keys(currentPerm.permissions)[0] === logicalKey) { - permissions = { - [logicalKey]: [ - ...(currentPerm.permissions as LogicalFilterOR & LogicalFilterAND)[logicalKey], - newPerm.permissions, - ], - } as LogicalFilterAND | LogicalFilterOR; - } else if (currentPerm.permissions) { - // Empty {} supersedes other permissions in _OR merge - if (strategy === 'or' && (isEqual(currentPerm.permissions, {}) || isEqual(newPerm.permissions, {}))) { - permissions = {}; - } else { - permissions = { - [logicalKey]: [currentPerm.permissions, newPerm.permissions], - } as LogicalFilterAND | LogicalFilterOR; - } - } else { - permissions = { - [logicalKey]: [newPerm.permissions], - } as LogicalFilterAND | LogicalFilterOR; - } - } - - if (newPerm.validation) { - if (currentPerm.validation && Object.keys(currentPerm.validation)[0] === logicalKey) { - validation = { - [logicalKey]: [ - ...(currentPerm.validation as LogicalFilterOR & LogicalFilterAND)[logicalKey], - newPerm.validation, - ], - } as LogicalFilterAND | LogicalFilterOR; - } else if (currentPerm.validation) { - // Empty {} supersedes other validations in _OR merge - if (strategy === 'or' && (isEqual(currentPerm.validation, {}) || isEqual(newPerm.validation, {}))) { - validation = {}; - } else { - validation = { - [logicalKey]: [currentPerm.validation, newPerm.validation], - } as LogicalFilterAND | LogicalFilterOR; - } - } else { - validation = { - [logicalKey]: [newPerm.validation], - } as LogicalFilterAND | LogicalFilterOR; - } - } - - if (newPerm.fields) { - if (Array.isArray(currentPerm.fields) && strategy === 'or') { - fields = [...new Set([...currentPerm.fields, ...newPerm.fields])]; - } else if (Array.isArray(currentPerm.fields) && strategy === 'and') { - fields = intersection(currentPerm.fields, newPerm.fields); - } else { - fields = newPerm.fields; - } - - if (fields.includes('*')) fields = ['*']; - } - - if (newPerm.presets) { - presets = merge({}, presets, newPerm.presets); - } - - return omit( - { - ...currentPerm, - permissions, - validation, - fields, - presets, - }, - ['id', 'system'], - ); -} diff --git a/api/src/utils/reduce-schema.ts b/api/src/utils/reduce-schema.ts index b81b00f6b7..2e9612b6f3 100644 --- a/api/src/utils/reduce-schema.ts +++ b/api/src/utils/reduce-schema.ts @@ -1,57 +1,26 @@ -import type { Permission, PermissionsAction, SchemaOverview } from '@directus/types'; -import { uniq } from 'lodash-es'; +import type { SchemaOverview } from '@directus/types'; +import type { FieldMap } from '../permissions/modules/fetch-allowed-field-map/fetch-allowed-field-map.js'; /** * Reduces the schema based on the included permissions. The resulting object is the schema structure, but with only - * the allowed collections/fields/relations included based on the permissions. - * @param schema The full project schema - * @param actions Array of permissions actions (crud) - * @returns Reduced schema + * the allowed collections/fields/relations included based on the passed field map. */ -export function reduceSchema( - schema: SchemaOverview, - permissions: Permission[] | null, - actions: PermissionsAction[] = ['create', 'read', 'update', 'delete'], -): SchemaOverview { +export function reduceSchema(schema: SchemaOverview, fieldMap: FieldMap): SchemaOverview { const reduced: SchemaOverview = { collections: {}, relations: [], }; - const allowedFieldsInCollection = - permissions - ?.filter((permission) => actions.includes(permission.action)) - .reduce( - (acc, permission) => { - if (!acc[permission.collection]) { - acc[permission.collection] = []; - } - - if (permission.fields) { - acc[permission.collection] = uniq([...acc[permission.collection]!, ...permission.fields]); - } - - return acc; - }, - {} as { [collection: string]: string[] }, - ) ?? {}; - for (const [collectionName, collection] of Object.entries(schema.collections)) { - if ( - !permissions?.some( - (permission) => permission.collection === collectionName && actions.includes(permission.action), - ) - ) { + if (!fieldMap[collectionName]) { + // Collection is not allowed at all continue; } const fields: SchemaOverview['collections'][string]['fields'] = {}; for (const [fieldName, field] of Object.entries(schema.collections[collectionName]!.fields)) { - if ( - !allowedFieldsInCollection[collectionName]?.includes('*') && - !allowedFieldsInCollection[collectionName]?.includes(fieldName) - ) { + if (!fieldMap[collectionName]?.includes('*') && !fieldMap[collectionName]?.includes(fieldName)) { continue; } @@ -59,12 +28,7 @@ export function reduceSchema( (relation) => relation.related_collection === collectionName && relation.meta?.one_field === fieldName, ); - if ( - o2mRelation && - !permissions?.some( - (permission) => permission.collection === o2mRelation.collection && actions.includes(permission.action), - ) - ) { + if (o2mRelation && !fieldMap[collectionName]) { continue; } @@ -81,32 +45,30 @@ export function reduceSchema( let collectionsAllowed = true; let fieldsAllowed = true; - if (Object.keys(allowedFieldsInCollection).includes(relation.collection) === false) { + if (Object.keys(fieldMap).includes(relation.collection) === false) { collectionsAllowed = false; } if ( relation.related_collection && - (Object.keys(allowedFieldsInCollection).includes(relation.related_collection) === false || + (Object.keys(fieldMap).includes(relation.related_collection) === false || // Ignore legacy permissions with an empty fields array - allowedFieldsInCollection[relation.related_collection]?.length === 0) + fieldMap[relation.related_collection]?.length === 0) ) { collectionsAllowed = false; } if ( relation.meta?.one_allowed_collections && - relation.meta.one_allowed_collections.every((collection) => - Object.keys(allowedFieldsInCollection).includes(collection), - ) === false + relation.meta.one_allowed_collections.every((collection) => Object.keys(fieldMap).includes(collection)) === false ) { collectionsAllowed = false; } if ( - !allowedFieldsInCollection[relation.collection] || - (allowedFieldsInCollection[relation.collection]?.includes('*') === false && - allowedFieldsInCollection[relation.collection]?.includes(relation.field) === false) + !fieldMap[relation.collection] || + (fieldMap[relation.collection]?.includes('*') === false && + fieldMap[relation.collection]?.includes(relation.field) === false) ) { fieldsAllowed = false; } @@ -114,9 +76,9 @@ export function reduceSchema( if ( relation.related_collection && relation.meta?.one_field && - (!allowedFieldsInCollection[relation.related_collection] || - (allowedFieldsInCollection[relation.related_collection]?.includes('*') === false && - allowedFieldsInCollection[relation.related_collection]?.includes(relation.meta?.one_field) === false)) + (!fieldMap[relation.related_collection] || + (fieldMap[relation.related_collection]?.includes('*') === false && + fieldMap[relation.related_collection]?.includes(relation.meta?.one_field) === false)) ) { fieldsAllowed = false; } diff --git a/api/src/utils/validate-user-count-integrity.test.ts b/api/src/utils/validate-user-count-integrity.test.ts new file mode 100644 index 0000000000..81ba4b2985 --- /dev/null +++ b/api/src/utils/validate-user-count-integrity.test.ts @@ -0,0 +1,3 @@ +import { test } from 'vitest'; + +test.todo('unimplemented test'); diff --git a/api/src/utils/validate-user-count-integrity.ts b/api/src/utils/validate-user-count-integrity.ts new file mode 100644 index 0000000000..9599040045 --- /dev/null +++ b/api/src/utils/validate-user-count-integrity.ts @@ -0,0 +1,39 @@ +import { validateRemainingAdminCount } from '../permissions/modules/validate-remaining-admin/validate-remaining-admin-count.js'; +import { checkUserLimits } from '../telemetry/utils/check-user-limits.js'; +import { shouldCheckUserLimits } from '../telemetry/utils/should-check-user-limits.js'; +import { fetchUserCount, type FetchUserCountOptions } from './fetch-user-count/fetch-user-count.js'; + +export enum UserIntegrityCheckFlag { + None = 0, + /** Check if the number of remaining admin users is greater than 0 */ + RemainingAdmins = 1 << 0, + /** Check if the number of users is within the limits */ + UserLimits = 1 << 1, + All = ~(~0 << 2), +} + +export interface ValidateUserCountIntegrityOptions extends Omit { + flags: UserIntegrityCheckFlag; +} + +export async function validateUserCountIntegrity(options: ValidateUserCountIntegrityOptions) { + const validateUserLimits = (options.flags & UserIntegrityCheckFlag.UserLimits) !== 0; + const validateRemainingAdminUsers = (options.flags & UserIntegrityCheckFlag.RemainingAdmins) !== 0; + + const limitCheck = validateUserLimits && shouldCheckUserLimits(); + + if (!validateRemainingAdminUsers && !limitCheck) { + return; + } + + const adminOnly = validateRemainingAdminUsers && !limitCheck; + const userCounts = await fetchUserCount({ ...options, adminOnly }); + + if (limitCheck) { + await checkUserLimits(userCounts); + } + + if (validateRemainingAdminUsers) { + validateRemainingAdminCount(userCounts.admin); + } +} diff --git a/api/src/websocket/authenticate.test.ts b/api/src/websocket/authenticate.test.ts index 52ea2737f4..869946042c 100644 --- a/api/src/websocket/authenticate.test.ts +++ b/api/src/websocket/authenticate.test.ts @@ -1,10 +1,9 @@ +import { InvalidCredentialsError } from '@directus/errors'; import type { Accountability } from '@directus/types'; import type { Mock } from 'vitest'; import { describe, expect, test, vi } from 'vitest'; -import { InvalidCredentialsError } from '@directus/errors'; import { getAccountabilityForToken } from '../utils/get-accountability-for-token.js'; -import { getPermissions } from '../utils/get-permissions.js'; -import { authenticateConnection, authenticationSuccess, refreshAccountability } from './authenticate.js'; +import { authenticateConnection, authenticationSuccess } from './authenticate.js'; import type { WebSocketAuthMessage } from './messages.js'; import { getExpiresAtForToken } from './utils/get-expires-at-for-token.js'; @@ -106,23 +105,6 @@ describe('authenticateConnection', () => { }); }); -describe('refreshAccountability', () => { - test('update permissions', async () => { - (getPermissions as Mock).mockReturnValue([]); - - const result = await refreshAccountability({ - role: '123-456-789', - user: 'abc-def-ghi', - }); - - expect(result).toStrictEqual({ - role: '123-456-789', - user: 'abc-def-ghi', - permissions: [], - }); - }); -}); - describe('authenticationSuccess', () => { test('without uid', async () => { const result = authenticationSuccess(); diff --git a/api/src/websocket/authenticate.ts b/api/src/websocket/authenticate.ts index aea7120058..218d32a5ee 100644 --- a/api/src/websocket/authenticate.ts +++ b/api/src/websocket/authenticate.ts @@ -1,8 +1,6 @@ -import type { Accountability } from '@directus/types'; import { DEFAULT_AUTH_PROVIDER } from '../constants.js'; import { AuthenticationService } from '../services/index.js'; import { getAccountabilityForToken } from '../utils/get-accountability-for-token.js'; -import { getPermissions } from '../utils/get-permissions.js'; import { getSchema } from '../utils/get-schema.js'; import { WebSocketError } from './errors.js'; import type { BasicAuthMessage, WebSocketResponse } from './messages.js'; @@ -42,22 +40,6 @@ export async function authenticateConnection( } } -export async function refreshAccountability( - accountability: Accountability | null | undefined, -): Promise { - accountability = accountability ?? { - role: null, - user: null, - admin: false, - app: false, - }; - - const schema = await getSchema(); - const permissions = await getPermissions(accountability, schema); - - return { ...accountability, permissions }; -} - export function authenticationSuccess(uid?: string | number, refresh_token?: string): string { const message: WebSocketResponse = { type: 'auth', diff --git a/api/src/websocket/controllers/graphql.ts b/api/src/websocket/controllers/graphql.ts index 3f820fb8f0..3543900497 100644 --- a/api/src/websocket/controllers/graphql.ts +++ b/api/src/websocket/controllers/graphql.ts @@ -7,7 +7,7 @@ import { useLogger } from '../../logger/index.js'; import { bindPubSub } from '../../services/graphql/subscription.js'; import { GraphQLService } from '../../services/index.js'; import { getSchema } from '../../utils/get-schema.js'; -import { authenticateConnection, refreshAccountability } from '../authenticate.js'; +import { authenticateConnection } from '../authenticate.js'; import { handleWebSocketError } from '../errors.js'; import { ConnectionParams, WebSocketMessage } from '../messages.js'; import type { AuthenticationState, GraphQLSocket, UpgradeContext, WebSocketClient } from '../types.js'; @@ -78,8 +78,6 @@ export class GraphQLSubscriptionController extends SocketController { // the first message should authenticate successfully in this mode client.close(CloseCode.Forbidden, 'Forbidden'); return; - } else { - client.accountability = await refreshAccountability(client.accountability); } await cb(JSON.stringify(message)); diff --git a/api/src/websocket/controllers/hooks.ts b/api/src/websocket/controllers/hooks.ts index bbb5af3ba1..e31469f00f 100644 --- a/api/src/websocket/controllers/hooks.ts +++ b/api/src/websocket/controllers/hooks.ts @@ -10,19 +10,23 @@ export function registerWebSocketEvents() { registerActionHooks([ 'items', + 'access', 'activity', 'collections', 'dashboards', + 'flows', 'folders', 'notifications', 'operations', 'panels', 'permissions', + 'policies', 'presets', 'revisions', 'roles', 'settings', 'shares', + 'translations', 'users', 'versions', 'webhooks', diff --git a/api/src/websocket/controllers/rest.ts b/api/src/websocket/controllers/rest.ts index 8782498109..d48b9db792 100644 --- a/api/src/websocket/controllers/rest.ts +++ b/api/src/websocket/controllers/rest.ts @@ -4,7 +4,6 @@ import type { Server as httpServer } from 'http'; import type WebSocket from 'ws'; import emitter from '../../emitter.js'; import { useLogger } from '../../logger/index.js'; -import { refreshAccountability } from '../authenticate.js'; import { WebSocketError, handleWebSocketError } from '../errors.js'; import { WebSocketMessage } from '../messages.js'; import type { AuthenticationState, WebSocketClient } from '../types.js'; @@ -29,7 +28,6 @@ export class WebSocketController extends SocketController { client.on('parsed-message', async (message: WebSocketMessage) => { try { message = WebSocketMessage.parse(await emitter.emitFilter('websocket.message', message, { client })); - client.accountability = await refreshAccountability(client.accountability); emitter.emitAction('websocket.message', { message, client }); } catch (error) { handleWebSocketError(client, error, 'server'); diff --git a/api/src/websocket/handlers/subscribe.ts b/api/src/websocket/handlers/subscribe.ts index 1dd4287187..524d61607f 100644 --- a/api/src/websocket/handlers/subscribe.ts +++ b/api/src/websocket/handlers/subscribe.ts @@ -4,7 +4,6 @@ import { useBus } from '../../bus/index.js'; import emitter from '../../emitter.js'; import { getSchema } from '../../utils/get-schema.js'; import { sanitizeQuery } from '../../utils/sanitize-query.js'; -import { refreshAccountability } from '../authenticate.js'; import { WebSocketError, handleWebSocketError } from '../errors.js'; import type { WebSocketEvent } from '../messages.js'; import { WebSocketSubscribeMessage } from '../messages.js'; @@ -124,8 +123,6 @@ export class SubscribeHandler { } try { - client.accountability = await refreshAccountability(client.accountability); - const result = await getPayload(subscription, client.accountability, schema, event); if (Array.isArray(result?.['data']) && result?.['data']?.length === 0) continue; diff --git a/app/src/modules/settings/routes/roles/app-permissions.ts b/app/src/app-permissions.ts similarity index 98% rename from app/src/modules/settings/routes/roles/app-permissions.ts rename to app/src/app-permissions.ts index a4e7f22036..227bfad59b 100644 --- a/app/src/modules/settings/routes/roles/app-permissions.ts +++ b/app/src/app-permissions.ts @@ -140,6 +140,7 @@ export const appRecommendedPermissions: Partial[] = [ permissions: { _or: [ { + // TODO should this be _in $CURRENT_ROLES? role: { _eq: '$CURRENT_ROLE', }, diff --git a/app/src/components/v-chip.vue b/app/src/components/v-chip.vue index 541dae2ac5..ab2fda6081 100644 --- a/app/src/components/v-chip.vue +++ b/app/src/components/v-chip.vue @@ -1,6 +1,6 @@ + + + + diff --git a/app/src/interfaces/_system/system-permissions/detail/components/actions.vue b/app/src/interfaces/_system/system-permissions/detail/components/actions.vue new file mode 100644 index 0000000000..8ff978a519 --- /dev/null +++ b/app/src/interfaces/_system/system-permissions/detail/components/actions.vue @@ -0,0 +1,33 @@ + + + + + diff --git a/app/src/modules/settings/routes/roles/permissions-detail/components/app-minimal.vue b/app/src/interfaces/_system/system-permissions/detail/components/app-minimal.vue similarity index 100% rename from app/src/modules/settings/routes/roles/permissions-detail/components/app-minimal.vue rename to app/src/interfaces/_system/system-permissions/detail/components/app-minimal.vue diff --git a/app/src/modules/settings/routes/roles/permissions-detail/components/fields.vue b/app/src/interfaces/_system/system-permissions/detail/components/fields.vue similarity index 93% rename from app/src/modules/settings/routes/roles/permissions-detail/components/fields.vue rename to app/src/interfaces/_system/system-permissions/detail/components/fields.vue index b1c3cf0794..7988a04c80 100644 --- a/app/src/modules/settings/routes/roles/permissions-detail/components/fields.vue +++ b/app/src/interfaces/_system/system-permissions/detail/components/fields.vue @@ -1,14 +1,14 @@ -
+
- +
diff --git a/app/src/interfaces/_system/system-permissions/index.ts b/app/src/interfaces/_system/system-permissions/index.ts new file mode 100644 index 0000000000..d4496f399f --- /dev/null +++ b/app/src/interfaces/_system/system-permissions/index.ts @@ -0,0 +1,14 @@ +import { defineInterface } from '@directus/extensions'; +import SystemPermissionsInterface from './system-permissions.vue'; + +export default defineInterface({ + id: 'system-permissions', + name: '$t:permissions', + component: SystemPermissionsInterface, + icon: 'verified', + types: ['alias'], + localTypes: ['o2m'], + relational: true, + options: [], + system: true, +}); diff --git a/app/src/interfaces/_system/system-permissions/permissions-header.vue b/app/src/interfaces/_system/system-permissions/permissions-header.vue new file mode 100644 index 0000000000..cda7f24869 --- /dev/null +++ b/app/src/interfaces/_system/system-permissions/permissions-header.vue @@ -0,0 +1,28 @@ + + + + + diff --git a/app/src/interfaces/_system/system-permissions/permissions-row.vue b/app/src/interfaces/_system/system-permissions/permissions-row.vue new file mode 100644 index 0000000000..97cac94339 --- /dev/null +++ b/app/src/interfaces/_system/system-permissions/permissions-row.vue @@ -0,0 +1,140 @@ + + + + + diff --git a/app/src/interfaces/_system/system-permissions/permissions-toggle.vue b/app/src/interfaces/_system/system-permissions/permissions-toggle.vue new file mode 100644 index 0000000000..ecd7d405eb --- /dev/null +++ b/app/src/interfaces/_system/system-permissions/permissions-toggle.vue @@ -0,0 +1,164 @@ + + + + + diff --git a/app/src/interfaces/_system/system-permissions/system-permissions.vue b/app/src/interfaces/_system/system-permissions/system-permissions.vue new file mode 100644 index 0000000000..1d1ddc1745 --- /dev/null +++ b/app/src/interfaces/_system/system-permissions/system-permissions.vue @@ -0,0 +1,765 @@ + + + + + diff --git a/app/src/interfaces/list-m2m/list-m2m.vue b/app/src/interfaces/list-m2m/list-m2m.vue index ccde39cd63..8396971acf 100644 --- a/app/src/interfaces/list-m2m/list-m2m.vue +++ b/app/src/interfaces/list-m2m/list-m2m.vue @@ -24,7 +24,7 @@ import Draggable from 'vuedraggable'; const props = withDefaults( defineProps<{ value?: (number | string | Record)[] | Record; - primaryKey: string | number; + primaryKey: string | number | null; collection: string; field: string; width: string; @@ -41,6 +41,7 @@ const props = withDefaults( limit?: number; allowDuplicates?: boolean; junctionFieldLocation?: string; + junctionFilter?: Filter | null; }>(), { value: () => [], @@ -123,6 +124,7 @@ const page = ref(1); const search = ref(''); const searchFilter = ref(); const sort = ref(); +const junctionFilter = ref(props.junctionFilter ?? null); const query = computed(() => { const q: RelationQueryMultiple = { @@ -139,6 +141,14 @@ const query = computed(() => { q.filter = searchFilter.value; } + if (junctionFilter.value) { + if (q.filter) { + q.filter = { _and: [q.filter, junctionFilter.value] }; + } else { + q.filter = junctionFilter.value; + } + } + if (search.value) { q.search = search.value; } @@ -221,7 +231,9 @@ watch( }) .filter((key) => key !== null); }, - { immediate: true }, + { + immediate: true, + }, ); const spacings = { diff --git a/app/src/lang/translations/en-US.yaml b/app/src/lang/translations/en-US.yaml index 54a34ac525..ac2714da4d 100644 --- a/app/src/lang/translations/en-US.yaml +++ b/app/src/lang/translations/en-US.yaml @@ -115,6 +115,7 @@ field_name_translations: Field Name Translations enter_password_to_enable_tfa: Enter your password to enable Two-Factor Authentication add_field: Add Field role_name: Role Name +policy_name: Policy Name branch: Branch leaf: Leaf indeterminate: Indeterminate @@ -132,6 +133,7 @@ requires_value: Requires value create_preset: Create Preset create_panel: Create Panel create_role: Create Role +create_policy: Create Policy create_user: Create User message: Message delete_panel: Delete Panel @@ -195,7 +197,12 @@ logoutReason: SESSION_EXPIRED: Session expired public_label: Public public_description: Controls what API data is available without authenticating. +public_role_info: >- + The public role controls what API data is available to unauthenticated users or users without a role. Requests with + the public role can not have app or admin access. If app or admin access are given to a policy assigned to the public + role they will be ignored. admin_description: Initial administrative role with unrestricted App/API access. +admin_policy_description: Initial administrative policy with unrestricted App/API access. no_description: No description... reached_maximum_number_of_extensions: You've reached the maximum number of extensions for this project ({n}). Contact your system administrator for more @@ -268,9 +275,13 @@ field_permissions: Field Permissions field_validation: Field Validation field_presets: Field Presets permissions_for_role: 'Items the {role} Role can {action}.' +permissions_for_policy: 'Items the {policy} Policy can {action}.' fields_for_role: 'Fields the {role} Role can {action}.' +fields_for_policy: 'Fields the {policy} Policy can {action}.' validation_for_role: 'Field {action} rules the {role} Role must obey.' +validation_for_policy: 'Field {action} rules the {policy} Policy must obey.' presets_for_role: 'Field value defaults for the {role} Role.' +presets_for_policy: 'Field value defaults for the {policy} Policy.' presets_field_warning: 'Relational presets for field "{field}" should be configured with the "detailed" syntax.' presentation_and_aliases: Presentation & Aliases revision_post_create: Here is what this item looked like when it was created. @@ -377,6 +388,7 @@ no_collections_found: No collections found. new_data_alert: 'The following will be created within your Data Model:' search_collection: Search Collection... search_role: Search Role... +search_policy: Search Policy... search_field: Search Field... new_field: 'New Field' new_collection: 'New Collection' @@ -527,6 +539,11 @@ hours: Hours month: Month year: Year select_all: Select All +permissionsLevel: + all: Full {action} Access + partial: Partial {action} Access + custom: Partial {action} Access + none: No {action} Access months: january: January february: February @@ -665,6 +682,7 @@ copy_to: Copy To... no_other_dashboards_copy: You don't have any other Dashboards yet. inactive: Inactive users: Users +roles: Roles activity: Activity activity_item: Activity Item action: Action @@ -1106,7 +1124,8 @@ editing_unit: 'Editing {unit}' editing_in_batch: 'Batch Editing {count} Items' no_options_available: No options available settings_data_model: Data Model -settings_permissions: Access Control +settings_roles: User Roles +settings_permissions: Access Policies settings_project: Settings settings_appearance: Appearance settings_webhooks: Webhooks @@ -1160,7 +1179,9 @@ page_help_settings_datamodel_collections: >- as well as unmanaged database tables that can be added. page_help_settings_datamodel_fields: >- **Data Model: Collection** — A form for managing this collection and its fields. -page_help_settings_roles_collection: '**Browse Roles** — Lists the Admin, Public and custom User Roles.' +page_help_settings_policies_collection: '**Browse Policies** — Lists all policies within the project.' +page_help_settings_policies_item: "**Policy Detail** — Manage a policy's permissions and other settings." +page_help_settings_roles_collection: '**Browse Roles** — Lists the all user roles within the project.' page_help_settings_roles_item: "**Role Detail** — Manage a role's permissions and other settings." page_help_settings_presets_collection: >- **Browse Presets** — Lists all presets in the project, including: user, role, and global bookmarks, as well as default @@ -1211,6 +1232,7 @@ singleton: Singleton singleton_label: Treat as single object system_fields_locked: System fields are locked and can't be edited directus_collection: + directus_access: Policy attachments directus_activity: Accountability logs for all events directus_collections: Additional collection configuration and metadata directus_dashboards: Dashboards within the Insights module @@ -1222,7 +1244,8 @@ directus_collection: directus_notifications: Notifications sent to users directus_operations: Operations that run in Flows directus_panels: Individual panels within Insights dashboards - directus_permissions: Access permissions for each role + directus_permissions: Access permissions for each policy + directus_policies: Access control policies directus_presets: Presets for collection defaults and bookmarks directus_relations: Relationship configuration and metadata directus_revisions: Data snapshots for all activity @@ -1402,12 +1425,16 @@ fields: name: Role Name icon: Role Icon description: Description + users: Users in Role + parent: Parent Role + children: Child Roles + directus_policies: + name: Policy Name + description: Description app_access: App Access admin_access: Admin Access ip_access: IP Access enforce_tfa: Require 2FA - users: Users in Role - module_list: Module Navigation directus_webhooks: name: Name method: Method @@ -1491,16 +1518,23 @@ field_options: focal_point_divider: Focal Point filename_disk: Name on disk storage... filename_download: Name when downloading... + directus_policies: + name: A unique name for this policy... + description: A description of this policy... + ip_access: Add allowed IP addresses, IP ranges and CIDR blocks. Leave empty to allow all... + enforce_tfa: Enforce Two-Factor Authentication + assigned_to: Assigned To directus_roles: name: The unique name for this role... description: A description of this role... - ip_access: Add allowed IP addresses, IP ranges and CIDR blocks. Leave empty to allow all... fields: icon_name: Icon name_name: Name name_placeholder: Enter a title... link_name: Link link_placeholder: Relative or absolute URL... + parent_note: Optional parent role that this role inherits permissions from + children_note: Nested child roles that inherit this roles permissions collections_name: Collections collections_addLabel: Add Collection... directus_users: @@ -1588,6 +1622,9 @@ continue_label: Continue continue_as: >- {name} is currently authenticated. If you recognize this account, press continue. editing_role: '{role} Role' +editing_policy: '{policy} Policy' +no_permissions: No Permissions +permission_add_collection: Add Collection creating_webhook: Creating Webhook default_label: Default delete_label: Delete diff --git a/app/src/layouts/tabular/index.ts b/app/src/layouts/tabular/index.ts index b05eab72d2..c59b60803e 100644 --- a/app/src/layouts/tabular/index.ts +++ b/app/src/layouts/tabular/index.ts @@ -11,7 +11,7 @@ import { syncRefProperty } from '@/utils/sync-ref-property'; import { useCollection, useItems, useSync } from '@directus/composables'; import { defineLayout } from '@directus/extensions'; import { Field } from '@directus/types'; -import { debounce } from 'lodash'; +import { debounce, flatten } from 'lodash'; import { computed, ref, toRefs, unref, watch } from 'vue'; import { useRouter } from 'vue-router'; import TabularActions from './actions.vue'; @@ -47,11 +47,9 @@ export default defineLayout({ const { aliasedFields, aliasQuery, aliasedKeys } = useAliasFields(fields, collection); - const fieldsWithRelationalAliased = computed(() => { - return Object.values(aliasedFields.value).reduce((acc, value) => { - return [...acc, ...value.fields]; - }, []); - }); + const fieldsWithRelationalAliased = computed(() => + flatten(Object.values(aliasedFields.value).map(({ fields }) => fields)), + ); const { items, @@ -166,9 +164,9 @@ export default defineLayout({ const fieldsDefaultValue = computed(() => { return fieldsInCollection.value - .filter((field: Field) => !field.meta?.hidden) + .filter((field) => !field.meta?.hidden && !field.meta?.special?.includes('no-data')) .slice(0, 4) - .map(({ field }: Field) => field) + .map(({ field }) => field) .sort(); }); diff --git a/app/src/modules/files/index.ts b/app/src/modules/files/index.ts index 862b0a0433..e96c120557 100644 --- a/app/src/modules/files/index.ts +++ b/app/src/modules/files/index.ts @@ -70,13 +70,10 @@ export default defineModule({ }, ], preRegisterCheck(user, permissions) { - const admin = user.role.admin_access; + const admin = user.admin_access; if (admin) return true; - const permission = permissions.find( - (permission) => permission.collection === 'directus_files' && permission.action === 'read', - ); - - return !!permission; + const access = permissions['directus_files']?.['read']?.access; + return access === 'partial' || access === 'full'; }, }); diff --git a/app/src/modules/insights/index.ts b/app/src/modules/insights/index.ts index 1e509d74da..349eedd68a 100644 --- a/app/src/modules/insights/index.ts +++ b/app/src/modules/insights/index.ts @@ -37,14 +37,11 @@ export default defineModule({ }, ], preRegisterCheck(user, permissions) { - const admin = user.role.admin_access; + const admin = user.admin_access; if (admin) return true; - const permission = permissions.find( - (permission) => permission.collection === 'directus_dashboards' && permission.action === 'read', - ); - - return !!permission; + const access = permissions['directus_dashboards']?.['read']?.access; + return access === 'partial' || access === 'full'; }, }); diff --git a/app/src/modules/settings/components/navigation.vue b/app/src/modules/settings/components/navigation.vue index 99d39207fa..a6df81bf66 100644 --- a/app/src/modules/settings/components/navigation.vue +++ b/app/src/modules/settings/components/navigation.vue @@ -25,17 +25,24 @@ const links = computed(() => [ name: t('settings_data_model'), to: `/settings/data-model`, }, - { - icon: 'admin_panel_settings', - name: t('settings_permissions'), - to: `/settings/roles`, - }, { icon: 'bolt', name: t('settings_flows'), to: `/settings/flows`, }, ], + [ + { + icon: 'group', + name: t('settings_roles'), + to: `/settings/roles`, + }, + { + icon: 'admin_panel_settings', + name: t('settings_permissions'), + to: `/settings/policies`, + }, + ], [ { icon: 'tune', diff --git a/app/src/modules/settings/index.ts b/app/src/modules/settings/index.ts index 1910b9c487..f4fe20eb8d 100644 --- a/app/src/modules/settings/index.ts +++ b/app/src/modules/settings/index.ts @@ -17,13 +17,15 @@ import MarketplaceAccount from './routes/marketplace/routes/account/account.vue' import MarketplaceExtension from './routes/marketplace/routes/extension/extension.vue'; import MarketplaceRegistry from './routes/marketplace/routes/registry/registry.vue'; import NotFound from './routes/not-found.vue'; +import PoliciesCollection from './routes/policies/collection.vue'; +import PoliciesItem from './routes/policies/item.vue'; +import NewPolicy from './routes/policies/add-new.vue'; import PresetsCollection from './routes/presets/collection/collection.vue'; import PresetsItem from './routes/presets/item.vue'; import Project from './routes/project/project.vue'; import NewRole from './routes/roles/add-new.vue'; import RolesCollection from './routes/roles/collection.vue'; -import RolesItem from './routes/roles/item/item.vue'; -import RolesPermissionsDetail from './routes/roles/permissions-detail/permissions-detail.vue'; +import RolesItem from './routes/roles/item.vue'; import RolesPublicItem from './routes/roles/public-item.vue'; import TranslationsCollection from './routes/translations/collection.vue'; import TranslationsItem from './routes/translations/item.vue'; @@ -111,6 +113,32 @@ export default defineModule({ }, ], }, + { + path: 'policies', + component: RouterPass, + children: [ + { + name: 'settings-policies-collection', + path: '', + component: PoliciesCollection, + children: [ + { + path: '+', + name: 'settings-add-new-policy', + components: { + add: NewPolicy, + }, + }, + ], + }, + { + name: 'settings-policies-item', + path: ':primaryKey', + component: PoliciesItem, + props: true, + }, + ], + }, { path: 'roles', component: RouterPass, @@ -130,31 +158,15 @@ export default defineModule({ ], }, { + name: 'settings-roles-public-item', path: 'public', component: RolesPublicItem, - props: true, - children: [ - { - path: ':permissionKey', - components: { - permissionsDetail: RolesPermissionsDetail, - }, - }, - ], }, { name: 'settings-roles-item', path: ':primaryKey', component: RolesItem, props: true, - children: [ - { - path: ':permissionKey', - components: { - permissionsDetail: RolesPermissionsDetail, - }, - }, - ], }, ], }, @@ -280,6 +292,6 @@ export default defineModule({ }, ], preRegisterCheck: (user) => { - return user.role.admin_access === true; + return user.admin_access === true; }, }); diff --git a/app/src/modules/settings/routes/policies/add-new.vue b/app/src/modules/settings/routes/policies/add-new.vue new file mode 100644 index 0000000000..658b7f2840 --- /dev/null +++ b/app/src/modules/settings/routes/policies/add-new.vue @@ -0,0 +1,67 @@ + + + + + diff --git a/app/src/modules/settings/routes/policies/collection.vue b/app/src/modules/settings/routes/policies/collection.vue new file mode 100644 index 0000000000..c13d37050b --- /dev/null +++ b/app/src/modules/settings/routes/policies/collection.vue @@ -0,0 +1,237 @@ + + + + + diff --git a/app/src/modules/settings/routes/policies/item.vue b/app/src/modules/settings/routes/policies/item.vue new file mode 100644 index 0000000000..6502fbe010 --- /dev/null +++ b/app/src/modules/settings/routes/policies/item.vue @@ -0,0 +1,195 @@ + + + + + diff --git a/app/src/modules/settings/routes/policies/use-save.ts b/app/src/modules/settings/routes/policies/use-save.ts new file mode 100644 index 0000000000..2bc8897fca --- /dev/null +++ b/app/src/modules/settings/routes/policies/use-save.ts @@ -0,0 +1,48 @@ +import api from '@/api'; +import { appRecommendedPermissions } from '@/app-permissions.js'; +import { unexpectedError } from '@/utils/unexpected-error'; +import type { Ref } from 'vue'; +import { ref } from 'vue'; +import { useRouter } from 'vue-router'; + +export interface UseSaveOptions { + name: Ref; + adminAccess: Ref; + appAccess: Ref; +} + +export function useSave({ name, adminAccess, appAccess }: UseSaveOptions) { + const router = useRouter(); + + const saving = ref(false); + + return { saving, save }; + + async function save() { + saving.value = true; + + try { + const policyResponse = await api.post('/policies', { + name: name.value, + admin_access: adminAccess.value, + app_access: appAccess.value, + }); + + if (appAccess.value === true && adminAccess.value === false) { + await api.post( + '/permissions', + appRecommendedPermissions.map((permission) => ({ + ...permission, + policy: policyResponse.data.data.id, + })), + ); + } + + router.push(`/settings/policies/${policyResponse.data.data.id}`); + } catch (error) { + unexpectedError(error); + } finally { + saving.value = false; + } + } +} diff --git a/app/src/modules/settings/routes/roles/add-new.vue b/app/src/modules/settings/routes/roles/add-new.vue index af6d28c14e..6fd24dc353 100644 --- a/app/src/modules/settings/routes/roles/add-new.vue +++ b/app/src/modules/settings/routes/roles/add-new.vue @@ -1,11 +1,9 @@