Merge branch 'main' into Kinzi/main

This commit is contained in:
rijkvanzanten
2020-12-16 16:54:54 -05:00
534 changed files with 26084 additions and 48664 deletions

View File

@@ -5,7 +5,6 @@ end_of_line = lf
insert_final_newline = true
charset = utf-8
indent_style = tab
indent_size = 4
trim_trailing_whitespace = true
[{package.json,*.yml,*.yaml}]
@@ -13,9 +12,7 @@ indent_style = space
indent_size = 2
[Dockerfile]
indent_size = 2
indent_style = tab
[Makefile]
indent_size = 2
indent_style = tab

31
.eslintrc.js Normal file
View File

@@ -0,0 +1,31 @@
module.exports = {
root: true,
env: {
node: true,
},
extends: ['plugin:@typescript-eslint/recommended', 'plugin:prettier/recommended'],
plugins: ['@typescript-eslint', 'prettier'],
rules: {
'no-console': process.env.NODE_ENV === 'production' ? 'error' : 'off',
'no-debugger': process.env.NODE_ENV === 'production' ? 'error' : 'off',
'@typescript-eslint/camelcase': 0,
'@typescript-eslint/no-use-before-define': 0,
'@typescript-eslint/ban-ts-ignore': 0,
'@typescript-eslint/no-explicit-any': 0,
'@typescript-eslint/no-var-requires': 0,
'prettier/prettier': ['error', { singleQuote: true }],
'comma-dangle': [
'error',
{
arrays: 'always-multiline',
exports: 'always-multiline',
functions: 'never',
imports: 'always-multiline',
objects: 'always-multiline',
},
],
},
parserOptions: {
parser: '@typescript-eslint/parser',
},
};

8
.github/FUNDING.yml vendored Normal file
View File

@@ -0,0 +1,8 @@
# These are supported funding model platforms
github: [directus, benhaynes, rijkvanzanten]
patreon: directus # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
custom: # Replace with a single custom sponsorship URL

30
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@@ -0,0 +1,30 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: ''
assignees: ''
---
<!--
Hi, thank you for taking the time to create an issue.
Can you please provide:
1) The issue and what you expected to happen
The _ _ does _ _ when _ _ while it should _ _
2) Exact steps to reproduce this issue
Click this, tap that, see error _ _
3) Your environment:
Which DBMS are you using (MySQL 8, Postgres 12, ...).
Which deployment are you using (npx, Docker, ...).
What browser are you using (Chrome 87, Safari 14, ...).
4) Any other relevant information we might need to reproduce this issue
A SQL dump of the setup.
What third party services you rely on (S3, managed database, ...).
-->

8
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View File

@@ -0,0 +1,8 @@
blank_issues_enabled: false
contact_links:
- name: Feature Request
url: https://github.com/directus/directus/discussions/new
about: Share your ideas on how to make Directus better.
- name: Directus Community Support
url: https://directus.chat/
about: Please ask and answer questions here.

View File

@@ -5,7 +5,7 @@ tag=$(version)
cmd=
user=directus
registry=ghcr.io
repository=directus/next
repository=directus/directus
.PHONY: build

View File

@@ -14,7 +14,7 @@ services:
context: "../"
args:
VERSION: "v9.0.0-rc.5"
REPOSITORY: "directus/next"
REPOSITORY: "directus/directus"
ports:
- 8055:8055
networks:

View File

@@ -2,45 +2,9 @@
set -e
function seed() {
# TODO: move users to a separate check, outside database installation
local show=false
local email=${DIRECTUS_ADMIN_EMAIL:-"admin@example.com"}
local password=${DIRECTUS_ADMIN_PASSWORD:-""}
if [ "${password}" == "" ] ; then
password=$(node -e 'console.log(require("nanoid").nanoid(12))')
show=true
fi
print --level=info "Creating administrator role"
local role=$(npx directus roles create --name Administrator --admin)
print --level=info "Creating administrator user"
local user=$(npx directus users create --email "${email}" --password "${password}" --role "${role}")
if [ "${show}" == "true" ] ; then
print --level=info --stdin <<MSG
>
> Email: $email
> Password: $password
>
MSG
else
print --level=info --stdin <<MSG
>
> Email: $email
> Password: <env>
>
MSG
fi
}
function bootstrap() {
local warn=false
print --level=info "Initializing..."
if [ "${KEY}" == "" ] ; then
export KEY=$(uuidgen)
warn=true
@@ -54,20 +18,20 @@ function bootstrap() {
if [ "${warn}" == "true" ] ; then
print --level=warn --stdin <<WARN
>
> WARNING!
> WARNING!
>
> The KEY and SECRET environment variables are not set.
> Some temporar
y variables were generated to fill the gap,
> but in production this is going to cause problems.
> The KEY and SECRET environment variables are not set. Some
> temporary variables were generated to fill the gap, but in
> production this is going to cause problems.
>
> Reference:
> https://docs.directus.io/reference/environment-variables.html
>
> Please refer to the docs at https://docs.directus.io/
> on how and why to configure them properly
>
WARN
fi
# Install database if using sqlite and file doesn't exist
# Create folder if using sqlite and file doesn't exist
if [ "${DB_CLIENT}" == "sqlite3" ] ; then
if [ "${DB_FILENAME}" == "" ] ; then
print --level=error "Missing DB_FILENAME environment variable"
@@ -77,28 +41,9 @@ WARN
if [ ! -f "${DB_FILENAME}" ] ; then
mkdir -p $(dirname ${DB_FILENAME})
fi
else
print --level=info "Checking database connection"
timeout ${DB_TIMEOUT:-"30"} bash -c 'until nc -z -w 1 "$0" $1; do sleep 1; done' "${DB_HOST}" ${DB_PORT}
#while ! nc -z -w 1 "${DB_HOST}" ${DB_PORT}; do
# print --level=warn "Cannot connect to the database, waiting for the server."
# sleep 1
#done
fi
should_seed=false
set +e
npx directus database install &>/dev/null
if [ "$?" == "0" ] ; then
print --level=info "Database installed"
should_seed=true
fi
set -e
if [ "${should_seed}" == "true" ] ; then
seed
fi
npx directus bootstrap
}
command=""

View File

@@ -69,7 +69,7 @@ function main() {
registry=$(argument registry "")
registry=$(echo "${registry}" | tr '[:upper:]' '[:lower:]')
repository=$(argument repository "directus/next")
repository=$(argument repository "directus/directus")
repository=$(echo "${repository}" | tr '[:upper:]' '[:lower:]')
version=$(argument version "")

View File

@@ -8,6 +8,11 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Sleep for 30 seconds
uses: jakejarvis/wait-action@master
with:
time: '30s'
- name: Checkout
uses: actions/checkout@v2

View File

@@ -0,0 +1,21 @@
name: Deploy Website / Docs
on:
schedule:
- cron: '59 23 * * *'
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: satak/webrequest-action@master
with:
url: ${{ secrets.BUILD_HOOK_WEBSITE }}
method: POST
- uses: satak/webrequest-action@master
with:
url: ${{ secrets.BUILD_HOOK_DOCS }}
method: POST

3
.gitignore vendored
View File

@@ -1,6 +1,6 @@
.DS_Store
node_modules
.vs_code
.vscode
.env
.secrets
npm-debug.log
@@ -11,3 +11,4 @@ dist
*.sublime-settings
*.db
.nyc_output
/.idea/

View File

@@ -1,5 +0,0 @@
{
"printWidth": 100,
"singleQuote": true,
"useTabs": true
}

7
.prettierrc.js Normal file
View File

@@ -0,0 +1,7 @@
module.exports = {
htmlWhitespaceSensitivity: 'ignore',
printWidth: 120,
singleQuote: true,
useTabs: true,
proseWrap: 'always',
};

View File

@@ -5,7 +5,6 @@ end_of_line = lf
insert_final_newline = true
charset = utf-8
indent_style = tab
indent_size = 4
trim_trailing_whitespace = true
[{package.json,*.yml,*.yaml}]

5
api/.eslintrc.js Normal file
View File

@@ -0,0 +1,5 @@
const parentConfig = require('../.eslintrc.js');
module.exports = {
...parentConfig,
};

5
api/.prettierrc.js Normal file
View File

@@ -0,0 +1,5 @@
const parentConfig = require('../.prettierrc.js');
module.exports = {
...parentConfig,
};

View File

@@ -1,31 +1,56 @@
<img width="250" alt="Logo" src="https://user-images.githubusercontent.com/9141017/88821768-0dc99800-d191-11ea-8c66-09c55ab451a2.png">
<p>&nbsp;</p>
## 🐰 Introduction
<a href="https://directus.io" target="_blank" rel="noopener noreferrer"><img width="250" alt="Logo" src="https://user-images.githubusercontent.com/9141017/88821768-0dc99800-d191-11ea-8c66-09c55ab451a2.png"></a>
Welcome to the preview release of the next major version of Directus.
<p>&nbsp;</p>
**NOTE:** This is pre-release software and should be treated as such. DO NOT use this in production.
Migrations between versions aren't provided, and breaking changes might happen at any release.
## Introduction
## ⚙️ Installation
**Directus is a free and open-source data platform for headless content management**. It can be installed on top of any
new or existing SQL database, instantly providing a dynamic API (REST+GraphQL) and accompanying App for managing
content. Built entirely in TypeScript (in Node and Vue), Directus is completely modular and end-to-end extensible...
with absolutely no paywalls or artificial limitations.
_Directus requires NodeJS 10+_
Modern and intuitive, the Directus App enables no-code data discovery, allowing for even the most non-technical users to
view, author, and manage your raw database content. Our performant and flexible API is able to adapt to any relational
schema, and includes rule-based permissions, event/web hooks, custom endpoints, numerous auth options, configurable
storage adapters, and much more.
We've created a little CLI tool you can use to quickly start up a Directus project. You can use it by running:
Current database support includes: PostgreSQL, MySQL, SQLite, MS-SQL Server, OracleDB, MariaDB, and varients such as AWS
Aurora/Redshift or Google Cloud Platform SQL.
Learn more at...
- [Website](https://directus.io/)
- [GitHub](https://github.com/directus/directus)
- [Community](https://directus.chat/)
- [Twitter](https://twitter.com/directus)
- [Docs](https://docs.directus.io/)
- [Marketplace](https://directus.market/)
- [Cloud](http://directus.cloud/)
<p>&nbsp;</p>
## Installing
Directus requires NodeJS 10+. Create a new project with our simple CLI tool:
```
npx create-directus-project my-project
```
or using yarn:
Or using yarn:
```
yarn create directus-project my-project
```
on the command line. This will create the given directory, setup the configuration, and install the database.
The above command will create a directory with your project name, then walk you through the database configuration and
creation of your first admin user.
## ✨ Updating
<p>&nbsp;</p>
## Updating
To update an existing Directus project, navigate to your project directory and run:
@@ -33,19 +58,31 @@ To update an existing Directus project, navigate to your project directory and r
npm update
```
## 🔧 Contributing
<p>&nbsp;</p>
Please report any and all quirks / issues you come across as [an issue](https://github.com/directus/next/issues/new).
## Contributing
Pull requests are more than welcome and always appreciated. Seeing this is in active development, please make sure to reach out a member of the core team in an issue or [on Discord](http://discord.gg/directus) before you start working on a feature or bug to ensure you don't work on the same thing as somebody else :)
Please report any and all issues [on our GitHub](https://github.com/directus/directus/issues/new).
## ❤️ Supporting Directus
Pull-requests are more than welcome, and always appreciated. Please read our
[Contributors Guide](https://docs.directus.io/getting-started/contributing.html) before starting work on a new feature
or bug, or reach out a member of the Core Team via [GitHub](https://github.com/directus/directus/discussions) or
[Discord](https://directus.chat) with any questions.
Directus is a GPLv3-licensed open source project with development made possible by support from our core team, contributors, and sponsors. It's not easy building premium open-source software; if you would like to help ensure Directus stays free, please consider becoming a sponsor.
<p>&nbsp;</p>
- [Support us through GitHub Sponsors](https://github.com/sponsors/directus)
- [One-time donation through PayPal](https://www.paypal.me/supportdirectus)
## Supporting
## 📄 License
Directus is a free and open-source project with development made possible by support from our passionate core team,
amazing contributors, and generous sponsors. It's not easy building premium open-source software; if you would like to
help ensure Directus stays free, please consider becoming a sponsor.
Directus is released under [the GPLv3 license](./license). Monospace Inc. owns all Directus trademarks and logos on behalf of our project's community. Copyright © 2006-2020, Monospace Inc.
- [Support us through GitHub Sponsors](https://github.com/sponsors/directus)
- [One-time donation through PayPal](https://www.paypal.me/supportdirectus)
<p>&nbsp;</p>
## License
Directus is released under the [GPLv3 license](./license). Monospace Inc owns all Directus trademarks, logos, and
intellectual property on behalf of our project's community. Copyright © 2004-2020, Monospace Inc.

View File

@@ -53,6 +53,9 @@ CACHE_ENABLED=true
CACHE_TTL="30m"
CACHE_NAMESPACE="directus-cache"
CACHE_STORE=memory # memory | redis | memcache
CACHE_AUTO_PURGE=true
ASSETS_CACHE_TTL="30m"
# CACHE_REDIS="redis://:authpassword@127.0.0.1:6380/4"
# --OR--

View File

@@ -1,8 +1,8 @@
{
"name": "directus",
"version": "9.0.0-rc.14",
"version": "9.0.0-rc.23",
"license": "GPL-3.0-only",
"homepage": "https://github.com/directus/next#readme",
"homepage": "https://github.com/directus/directus#readme",
"description": "Directus is a real-time API and App dashboard for managing SQL database content.",
"keywords": [
"directus",
@@ -24,10 +24,10 @@
],
"repository": {
"type": "git",
"url": "git+https://github.com/directus/next.git"
"url": "git+https://github.com/directus/directus.git"
},
"bugs": {
"url": "https://github.com/directus/next/issues"
"url": "https://github.com/directus/directus/issues"
},
"author": {
"name": "Monospace Inc",
@@ -52,10 +52,12 @@
},
"scripts": {
"start": "npx directus start",
"build": "rm -rf dist && tsc --build && copyfiles \"src/**/*.*\" -e \"src/**/*.ts\" -u 1 dist",
"dev": "cross-env NODE_ENV=development LOG_LEVEL=trace ts-node-dev --files src/start.ts --respawn --watch \"src/**/*.ts\" --watch \".env\" --transpile-only",
"build": "rimraf dist && tsc --build && copyfiles \"src/**/*.*\" -e \"src/**/*.ts\" -u 1 dist",
"dev": "cross-env NODE_ENV=development ts-node-dev --files src/start.ts --respawn --watch \"src/**/*.ts\" --watch \".env\" --transpile-only",
"cli": "cross-env NODE_ENV=development ts-node --script-mode --transpile-only src/cli/index.ts",
"prepublishOnly": "npm run build"
"lint": "eslint \"src/**/*.ts\" cli.js index.js",
"prepublishOnly": "npm run build",
"prettier": "prettier --write \"src/**/*.ts\" cli.js index.js"
},
"files": [
"dist",
@@ -138,8 +140,14 @@
},
"gitHead": "4476da28dbbc2824e680137aa28b2b91b5afabec",
"devDependencies": {
"@typescript-eslint/eslint-plugin": "^4.9.1",
"@typescript-eslint/parser": "^4.9.1",
"copyfiles": "^2.4.0",
"cross-env": "^7.0.2",
"eslint": "^7.15.0",
"eslint-config-prettier": "^7.0.0",
"eslint-plugin-prettier": "^3.2.0",
"prettier": "^2.2.1",
"ts-node-dev": "^1.0.0",
"typescript": "^4.0.5"
}

View File

@@ -9,13 +9,13 @@ let cache: Keyv | null = null;
if (env.CACHE_ENABLED === true) {
validateEnv(['CACHE_NAMESPACE', 'CACHE_TTL', 'CACHE_STORE']);
cache = getKevyInstance();
cache = getKeyvInstance();
cache.on('error', (err) => logger.error(err));
}
export default cache;
function getKevyInstance() {
function getKeyvInstance() {
switch (env.CACHE_STORE) {
case 'redis':
return new Keyv(getConfig('redis'));

View File

@@ -0,0 +1,74 @@
import env from '../../../env';
import logger from '../../../logger';
import installDatabase from '../../../database/seeds/run';
import runMigrations from '../../../database/migrations/run';
import { nanoid } from 'nanoid';
export default async function bootstrap() {
logger.info('Initializing bootstrap...');
if ((await isDatabaseAvailable()) === false) {
logger.error(`Can't connect to the database`);
process.exit(1);
}
const { isInstalled, default: database, schemaInspector } = require('../../../database');
const { RolesService } = require('../../../services/roles');
const { UsersService } = require('../../../services/users');
if ((await isInstalled()) === false) {
logger.info('Installing Directus system tables...');
await installDatabase(database);
const schema = await schemaInspector.overview();
logger.info('Setting up first admin role...');
const rolesService = new RolesService({ schema });
const role = await rolesService.create({ name: 'Admin', admin_access: true });
logger.info('Adding first admin user...');
const usersService = new UsersService({ schema });
let adminEmail = env.ADMIN_EMAIL;
if (!adminEmail) {
logger.info('No admin email provided. Defaulting to "admin@example.com"');
adminEmail = 'admin@example.com';
}
let adminPassword = env.ADMIN_PASSWORD;
if (!adminPassword) {
adminPassword = nanoid(12);
logger.info(`No admin password provided. Defaulting to "${adminPassword}"`);
}
await usersService.create({ email: adminEmail, password: adminPassword, role });
} else {
logger.info('Database already initialized, skipping install');
}
logger.info('Running migrations...');
await runMigrations(database, 'latest');
logger.info('Done');
process.exit(0);
}
async function isDatabaseAvailable() {
const { hasDatabaseConnection } = require('../../../database');
const tries = 5;
const secondsBetweenTries = 5;
for (var i = 0; i < tries; i++) {
if (await hasDatabaseConnection()) {
return true;
}
await new Promise((resolve) => setTimeout(resolve, secondsBetweenTries * 1000));
}
return false;
}

View File

@@ -11,9 +11,11 @@ export default async function count(collection: string) {
const count = Number(records[0].count);
console.log(count);
database.destroy();
process.exit(0);
} catch (err) {
console.error(err);
} finally {
database.destroy();
process.exit(1);
}
}

View File

@@ -8,10 +8,11 @@ export default async function start() {
try {
await installSeeds(database);
await runMigrations(database, 'latest');
database.destroy();
process.exit(0);
} catch (err) {
console.log(err);
process.exit(1);
} finally {
database.destroy();
process.exit(1);
}
}

View File

@@ -15,10 +15,11 @@ export default async function migrate(direction: 'latest' | 'up' | 'down') {
} else {
console.log('✨ Database up to date');
}
database.destroy();
process.exit();
} catch (err) {
console.log(err);
process.exit(1);
} finally {
database.destroy();
process.exit(1);
}
}

View File

@@ -53,7 +53,7 @@ export default async function init(options: Record<string, any>) {
console.log();
console.log('Something went wrong while seeding the database:');
console.log();
console.log(`${err.code && chalk.red(`[${err.code}]`)} ${err.message}`);
console.log(`${chalk.red(`[${err.code || 'Error'}]`)} ${err.message}`);
console.log();
console.log('Please try again');
console.log();
@@ -115,7 +115,7 @@ export default async function init(options: Record<string, any>) {
role: roleID,
});
db.destroy();
await db.destroy();
console.log(`
Your project has been created at ${chalk.green(rootPath)}.
@@ -126,4 +126,6 @@ Start Directus by running:
${chalk.blue('cd')} ${rootPath}
${chalk.blue('npx directus')} start
`);
process.exit(0);
}

View File

@@ -13,9 +13,10 @@ export default async function rolesCreate({ name, admin }: any) {
const id = await service.create({ name, admin_access: admin });
console.log(id);
database.destroy();
process.exit(0);
} catch (err) {
console.error(err);
} finally {
database.destroy();
process.exit(1);
}
}

View File

@@ -13,9 +13,10 @@ export default async function usersCreate({ email, password, role }: any) {
const id = await service.create({ email, password, role, status: 'active' });
console.log(id);
database.destroy();
process.exit(0);
} catch (err) {
console.error(err);
} finally {
database.destroy();
process.exit(1);
}
}

View File

@@ -11,6 +11,7 @@ import dbMigrate from './commands/database/migrate';
import usersCreate from './commands/users/create';
import rolesCreate from './commands/roles/create';
import count from './commands/count';
import bootstrap from './commands/bootstrap';
program.name('directus').usage('[command] [options]');
program.version(pkg.version, '-v, --version');
@@ -52,10 +53,9 @@ rolesCommand
.option('--admin', `whether or not the role has admin access`)
.action(rolesCreate);
program
.command('count <collection>')
.description('Count the amount of items in a given collection')
.action(count);
program.command('count <collection>').description('Count the amount of items in a given collection').action(count);
program.command('bootstrap').description('Initialize or update the database').action(bootstrap);
program.parseAsync(process.argv).catch((err) => {
console.error(err);

View File

@@ -21,11 +21,7 @@ const defaults = {
},
};
export default async function createEnv(
client: keyof typeof drivers,
credentials: Credentials,
directory: string
) {
export default async function createEnv(client: keyof typeof drivers, credentials: Credentials, directory: string) {
const config: Record<string, any> = {
...defaults,
database: {

View File

@@ -9,6 +9,8 @@ import { Transformation } from '../types/assets';
import storage from '../storage';
import { PayloadService, AssetsService } from '../services';
import useCollection from '../middleware/use-collection';
import env from '../env';
import ms from 'ms';
const router = Router();
@@ -30,11 +32,7 @@ router.get(
const isValidUUID = validate(id, 4);
if (isValidUUID === false) throw new ForbiddenException();
const file = await database
.select('id', 'storage', 'filename_disk')
.from('directus_files')
.where({ id })
.first();
const file = await database.select('id', 'storage', 'filename_disk').from('directus_files').where({ id }).first();
if (!file) throw new ForbiddenException();
@@ -64,24 +62,17 @@ router.get(
const transformation = pick(req.query, ASSET_TRANSFORM_QUERY_KEYS);
if (transformation.hasOwnProperty('key') && Object.keys(transformation).length > 1) {
throw new InvalidQueryException(
`You can't combine the "key" query parameter with any other transformation.`
);
throw new InvalidQueryException(`You can't combine the "key" query parameter with any other transformation.`);
}
const systemKeys = SYSTEM_ASSET_ALLOW_LIST.map((transformation) => transformation.key);
const allKeys: string[] = [
...systemKeys,
...(assetSettings.storage_asset_presets || []).map(
(transformation: Transformation) => transformation.key
),
...(assetSettings.storage_asset_presets || []).map((transformation: Transformation) => transformation.key),
];
// For use in the next request handler
res.locals.shortcuts = [
...SYSTEM_ASSET_ALLOW_LIST,
...(assetSettings.storage_asset_presets || []),
];
res.locals.shortcuts = [...SYSTEM_ASSET_ALLOW_LIST, ...(assetSettings.storage_asset_presets || [])];
res.locals.transformation = transformation;
if (Object.keys(transformation).length === 0) {
@@ -93,15 +84,10 @@ router.get(
return next();
} else if (assetSettings.storage_asset_transform === 'shortcut') {
if (allKeys.includes(transformation.key as string)) return next();
throw new InvalidQueryException(
`Only configured shortcuts can be used in asset generation.`
);
throw new InvalidQueryException(`Only configured shortcuts can be used in asset generation.`);
} else {
if (transformation.key && systemKeys.includes(transformation.key as string))
return next();
throw new InvalidQueryException(
`Dynamic asset generation has been disabled for this project.`
);
if (transformation.key && systemKeys.includes(transformation.key as string)) return next();
throw new InvalidQueryException(`Dynamic asset generation has been disabled for this project.`);
}
}),
@@ -114,8 +100,7 @@ router.get(
const transformation: Transformation = res.locals.transformation.key
? res.locals.shortcuts.find(
(transformation: Transformation) =>
transformation.key === res.locals.transformation.key
(transformation: Transformation) => transformation.key === res.locals.transformation.key
)
: res.locals.transformation;
@@ -128,6 +113,8 @@ router.get(
res.removeHeader('Content-Disposition');
}
const access = !!req.accountability?.role ? 'private' : 'public';
res.setHeader('Cache-Control', `${access}, max-age="${ms(env.ASSETS_CACHE_TTL as string)}"`);
stream.pipe(res);
})
);

View File

@@ -68,8 +68,7 @@ router.post(
httpOnly: true,
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
secure: env.REFRESH_TOKEN_COOKIE_SECURE === 'true' ? true : false,
sameSite:
(env.REFRESH_TOKEN_COOKIE_SAME_SITE as 'lax' | 'strict' | 'none') || 'strict',
sameSite: (env.REFRESH_TOKEN_COOKIE_SAME_SITE as 'lax' | 'strict' | 'none') || 'strict',
});
}
@@ -97,16 +96,12 @@ router.post(
const currentRefreshToken = req.body.refresh_token || req.cookies.directus_refresh_token;
if (!currentRefreshToken) {
throw new InvalidPayloadException(
`"refresh_token" is required in either the JSON payload or Cookie`
);
throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`);
}
const mode: 'json' | 'cookie' = req.body.mode || req.body.refresh_token ? 'json' : 'cookie';
const { accessToken, refreshToken, expires } = await authenticationService.refresh(
currentRefreshToken
);
const { accessToken, refreshToken, expires } = await authenticationService.refresh(currentRefreshToken);
const payload = {
data: { access_token: accessToken, expires },
@@ -121,8 +116,7 @@ router.post(
httpOnly: true,
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
secure: env.REFRESH_TOKEN_COOKIE_SECURE === 'true' ? true : false,
sameSite:
(env.REFRESH_TOKEN_COOKIE_SAME_SITE as 'lax' | 'strict' | 'none') || 'strict',
sameSite: (env.REFRESH_TOKEN_COOKIE_SAME_SITE as 'lax' | 'strict' | 'none') || 'strict',
});
}
@@ -150,9 +144,7 @@ router.post(
const currentRefreshToken = req.body.refresh_token || req.cookies.directus_refresh_token;
if (!currentRefreshToken) {
throw new InvalidPayloadException(
`"refresh_token" is required in either the JSON payload or Cookie`
);
throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`);
}
await authenticationService.logout(currentRefreshToken);
@@ -222,10 +214,7 @@ router.get(
respond
);
router.use(
'/oauth',
session({ secret: env.SECRET as string, saveUninitialized: false, resave: false })
);
router.use('/oauth', session({ secret: env.SECRET as string, saveUninitialized: false, resave: false }));
router.get(
'/oauth/:provider',
@@ -279,8 +268,7 @@ router.get(
httpOnly: true,
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
secure: env.REFRESH_TOKEN_COOKIE_SECURE === 'true' ? true : false,
sameSite:
(env.REFRESH_TOKEN_COOKIE_SAME_SITE as 'lax' | 'strict' | 'none') || 'strict',
sameSite: (env.REFRESH_TOKEN_COOKIE_SAME_SITE as 'lax' | 'strict' | 'none') || 'strict',
});
return res.redirect(redirect);

View File

@@ -52,8 +52,7 @@ router.get(
schema: req.schema,
});
if (req.params.field in req.schema[req.params.collection].columns === false)
throw new ForbiddenException();
if (req.params.field in req.schema[req.params.collection].columns === false) throw new ForbiddenException();
const field = await service.readOne(req.params.collection, req.params.field);
@@ -80,8 +79,7 @@ router.post(
'/:collection',
validateCollection,
asyncHandler(async (req, res, next) => {
if (!req.body.schema && !req.body.meta)
throw new InvalidPayloadException(`"schema" or "meta" is required`);
if (!req.body.schema && !req.body.meta) throw new InvalidPayloadException(`"schema" or "meta" is required`);
const service = new FieldsService({
accountability: req.accountability,

View File

@@ -68,11 +68,7 @@ const multipartHandler = asyncHandler(async (req, res, next) => {
};
try {
const primaryKey = await service.upload(
fileStream,
payloadWithRequiredFields,
existingPrimaryKey
);
const primaryKey = await service.upload(fileStream, payloadWithRequiredFields, existingPrimaryKey);
savedFiles.push(primaryKey);
tryDone();
} catch (error) {

View File

@@ -2,11 +2,7 @@ import express from 'express';
import asyncHandler from 'express-async-handler';
import collectionExists from '../middleware/collection-exists';
import { ItemsService, MetaService } from '../services';
import {
RouteNotFoundException,
ForbiddenException,
FailedValidationException,
} from '../exceptions';
import { RouteNotFoundException, ForbiddenException, FailedValidationException } from '../exceptions';
import { respond } from '../middleware/respond';
import { InvalidPayloadException } from '../exceptions';
import { PrimaryKey } from '../types';
@@ -52,6 +48,7 @@ router.get(
accountability: req.accountability,
schema: req.schema,
});
const metaService = new MetaService({
accountability: req.accountability,
schema: req.schema,
@@ -67,6 +64,7 @@ router.get(
meta: meta,
data: records || null,
};
return next();
}),
respond

View File

@@ -2,11 +2,7 @@ import express from 'express';
import asyncHandler from 'express-async-handler';
import { PermissionsService, MetaService } from '../services';
import { clone } from 'lodash';
import {
InvalidCredentialsException,
ForbiddenException,
InvalidPayloadException,
} from '../exceptions';
import { InvalidCredentialsException, ForbiddenException, InvalidPayloadException } from '../exceptions';
import useCollection from '../middleware/use-collection';
import { respond } from '../middleware/respond';
import { PrimaryKey } from '../types';

View File

@@ -1,11 +1,7 @@
import express from 'express';
import asyncHandler from 'express-async-handler';
import Joi from 'joi';
import {
InvalidPayloadException,
InvalidCredentialsException,
ForbiddenException,
} from '../exceptions';
import { InvalidPayloadException, InvalidCredentialsException, ForbiddenException } from '../exceptions';
import { UsersService, MetaService, AuthenticationService } from '../services';
import useCollection from '../middleware/use-collection';
import { respond } from '../middleware/respond';
@@ -205,10 +201,7 @@ router.delete(
);
const inviteSchema = Joi.object({
email: Joi.alternatives(
Joi.string().email(),
Joi.array().items(Joi.string().email())
).required(),
email: Joi.alternatives(Joi.string().email(), Joi.array().items(Joi.string().email())).required(),
role: Joi.string().uuid({ version: 'uuidv4' }).required(),
});

View File

@@ -7,34 +7,22 @@ import env from '../env';
import { performance } from 'perf_hooks';
import SchemaInspector from '@directus/schema';
import { getConfigFromEnv } from '../utils/get-config-from-env';
dotenv.config({ path: path.resolve(__dirname, '../../', '.env') });
const connectionConfig: Record<string, any> = {};
for (let [key, value] of Object.entries(env)) {
key = key.toLowerCase();
if (key.startsWith('db') === false) continue;
if (key === 'db_client') continue;
if (key === 'db_search_path') continue;
if (key === 'db_connection_string') continue;
key = key.slice(3); // remove `DB_`
connectionConfig[camelCase(key)] = value;
}
const connectionConfig: Record<string, any> = getConfigFromEnv('DB_', [
'DB_CLIENT',
'DB_SEARCH_PATH',
'DB_CONNECTION_STRING',
]);
const knexConfig: Config = {
client: env.DB_CLIENT,
searchPath: env.DB_SEARCH_PATH,
connection: env.DB_CONNECTION_STRING || connectionConfig,
log: {
warn: (msg) => {
/** @note this is wild */
if (msg === '.returning() is not supported by mysql and will not have any effect.')
return;
logger.warn(msg);
},
warn: (msg) => logger.warn(msg),
error: (msg) => logger.error(msg),
deprecate: (msg) => logger.info(msg),
debug: (msg) => logger.debug(msg),
@@ -58,9 +46,18 @@ database
logger.trace(`[${delta.toFixed(3)}ms] ${queryInfo.sql} [${queryInfo.bindings.join(', ')}]`);
});
export async function hasDatabaseConnection() {
try {
await database.raw('select 1 + 1 as result');
return true;
} catch {
return false;
}
}
export async function validateDBConnection() {
try {
await database.raw('select 1+1 as result');
await hasDatabaseConnection();
} catch (error) {
logger.fatal(`Can't connect to the database.`);
logger.fatal(error);

View File

@@ -1206,8 +1206,7 @@ const systemFields = [
text: 'Weak Minimum 8 Characters',
},
{
value:
"/(?=^.{8,}$)(?=.*\\d)(?=.*[a-z])(?=.*[A-Z])(?=.*[!@#$%^&*()_+}{';'?>.<,])(?!.*\\s).*$/",
value: "/(?=^.{8,}$)(?=.*\\d)(?=.*[a-z])(?=.*[A-Z])(?=.*[!@#$%^&*()_+}{';'?>.<,])(?!.*\\s).*$/",
text: 'Strong Upper / Lowercase / Numbers / Special',
},
],
@@ -1643,10 +1642,7 @@ const systemFields = [
export async function up(knex: Knex) {
const fieldKeys = uniq(systemFields.map((field: any) => field.field));
await knex('directus_fields')
.delete()
.where('collection', 'like', 'directus_%')
.whereIn('field', fieldKeys);
await knex('directus_fields').delete().where('collection', 'like', 'directus_%').whereIn('field', fieldKeys);
}
export async function down(knex: Knex) {

View File

@@ -145,11 +145,7 @@ export async function down(knex: Knex) {
for (const constraint of update.constraints) {
table.dropForeign([constraint.column]);
table
.foreign(constraint.column)
.references(constraint.references)
.onUpdate('NO ACTION')
.onDelete('NO ACTION');
table.foreign(constraint.column).references(constraint.references).onUpdate('NO ACTION').onDelete('NO ACTION');
}
});
}

View File

@@ -2,6 +2,7 @@ import fse from 'fs-extra';
import Knex from 'knex';
import path from 'path';
import formatTitle from '@directus/format-title';
import env from '../../env';
type Migration = {
version: string;
@@ -12,27 +13,33 @@ type Migration = {
export default async function run(database: Knex, direction: 'up' | 'down' | 'latest') {
let migrationFiles = await fse.readdir(__dirname);
const customMigrationsPath = path.resolve(env.EXTENSIONS_PATH, 'migrations');
const customMigrationFiles =
((await fse.pathExists(customMigrationsPath)) && (await fse.readdir(customMigrationsPath))) || [];
migrationFiles = migrationFiles.filter(
(file: string) => file.startsWith('run') === false && file.endsWith('.d.ts') === false
);
const completedMigrations = await database
.select<Migration[]>('*')
.from('directus_migrations')
.orderBy('version');
const completedMigrations = await database.select<Migration[]>('*').from('directus_migrations').orderBy('version');
const migrations = migrationFiles.map((migrationFile) => {
const version = migrationFile.split('-')[0];
const name = formatTitle(migrationFile.split('-').slice(1).join('_').split('.')[0]);
const migrations = [
...migrationFiles.map((path) => parseFilePath(path)),
...customMigrationFiles.map((path) => parseFilePath(path, true)),
];
function parseFilePath(filePath: string, custom: boolean = false) {
const version = filePath.split('-')[0];
const name = formatTitle(filePath.split('-').slice(1).join('_').split('.')[0]);
const completed = !!completedMigrations.find((migration) => migration.version === version);
return {
file: migrationFile,
file: custom ? path.join(customMigrationsPath, filePath) : path.join(__dirname, filePath),
version,
name,
completed,
};
});
}
if (direction === 'up') await up();
if (direction === 'down') await down();
@@ -55,11 +62,9 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
throw Error('Nothing to upgrade');
}
const { up } = require(path.join(__dirname, nextVersion.file));
const { up } = require(nextVersion.file);
await up(database);
await database
.insert({ version: nextVersion.version, name: nextVersion.name })
.into('directus_migrations');
await database.insert({ version: nextVersion.version, name: nextVersion.name }).into('directus_migrations');
}
async function down() {
@@ -69,15 +74,13 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
throw Error('Nothing to downgrade');
}
const migration = migrations.find(
(migration) => migration.version === currentVersion.version
);
const migration = migrations.find((migration) => migration.version === currentVersion.version);
if (!migration) {
throw new Error('Couldnt find migration');
}
const { down } = require(path.join(__dirname, migration.file));
const { down } = require(migration.file);
await down(database);
await database('directus_migrations').delete().where({ version: migration.version });
}
@@ -85,11 +88,9 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
async function latest() {
for (const migration of migrations) {
if (migration.completed === false) {
const { up } = require(path.join(__dirname, migration.file));
const { up } = require(migration.file);
await up(database);
await database
.insert({ version: migration.version, name: migration.name })
.into('directus_migrations');
await database.insert({ version: migration.version, name: migration.name }).into('directus_migrations');
}
}
}

View File

@@ -26,11 +26,7 @@ export default async function runAST(
const results: { [collection: string]: null | Item | Item[] } = {};
for (const collection of ast.names) {
results[collection] = await run(
collection,
ast.children[collection],
ast.query[collection]
);
results[collection] = await run(collection, ast.children[collection], ast.query[collection]);
}
return results;
@@ -38,11 +34,7 @@ export default async function runAST(
return await run(ast.name, ast.children, options?.query || ast.query);
}
async function run(
collection: string,
children: (NestedCollectionNode | FieldNode)[],
query: Query
) {
async function run(collection: string, children: (NestedCollectionNode | FieldNode)[], query: Query) {
// Retrieve the database columns to select in the current AST
const { columnsToSelect, primaryKeyField, nestedCollectionNodes } = await parseCurrentLevel(
collection,
@@ -51,14 +43,7 @@ export default async function runAST(
);
// The actual knex query builder instance. This is a promise that resolves with the raw items from the db
const dbQuery = await getDBQuery(
knex,
collection,
columnsToSelect,
query,
primaryKeyField,
schema
);
const dbQuery = await getDBQuery(knex, collection, columnsToSelect, query, primaryKeyField, schema);
const rawItems: Item | Item[] = await dbQuery;
@@ -80,8 +65,8 @@ export default async function runAST(
// all nested items for all parent items at once. Because of this, we can't limit that query
// to the "standard" item limit. Instead of _n_ nested items per parent item, it would mean
// that there's _n_ items, which are then divided on the parent items. (no good)
if (nestedNode.type === 'o2m' && typeof nestedNode.query.limit === 'number') {
tempLimit = nestedNode.query.limit;
if (nestedNode.type === 'o2m') {
tempLimit = nestedNode.query.limit || 100;
nestedNode.query.limit = -1;
}
@@ -173,10 +158,7 @@ async function getDBQuery(
return dbQuery;
}
function applyParentFilters(
nestedCollectionNodes: NestedCollectionNode[],
parentItem: Item | Item[]
) {
function applyParentFilters(nestedCollectionNodes: NestedCollectionNode[], parentItem: Item | Item[]) {
const parentItems = toArray(parentItem);
for (const nestedNode of nestedCollectionNodes) {
@@ -188,9 +170,7 @@ function applyParentFilters(
filter: {
...(nestedNode.query.filter || {}),
[nestedNode.relation.one_primary!]: {
_in: uniq(
parentItems.map((res) => res[nestedNode.relation.many_field])
).filter((id) => id),
_in: uniq(parentItems.map((res) => res[nestedNode.relation.many_field])).filter((id) => id),
},
},
};
@@ -208,9 +188,7 @@ function applyParentFilters(
filter: {
...(nestedNode.query.filter || {}),
[nestedNode.relation.many_field]: {
_in: uniq(parentItems.map((res) => res[nestedNode.parentKey])).filter(
(id) => id
),
_in: uniq(parentItems.map((res) => res[nestedNode.parentKey])).filter((id) => id),
},
},
};
@@ -256,9 +234,7 @@ function mergeWithParentItems(
if (nestedNode.type === 'm2o') {
for (const parentItem of parentItems) {
const itemChild = nestedItems.find((nestedItem) => {
return (
nestedItem[nestedNode.relation.one_primary!] === parentItem[nestedNode.fieldKey]
);
return nestedItem[nestedNode.relation.one_primary!] == parentItem[nestedNode.fieldKey];
});
parentItem[nestedNode.fieldKey] = itemChild || null;
@@ -270,11 +246,9 @@ function mergeWithParentItems(
if (Array.isArray(nestedItem[nestedNode.relation.many_field])) return true;
return (
nestedItem[nestedNode.relation.many_field] ===
parentItem[nestedNode.relation.one_primary!] ||
nestedItem[nestedNode.relation.many_field]?.[
nestedNode.relation.one_primary!
] === parentItem[nestedNode.relation.one_primary!]
nestedItem[nestedNode.relation.many_field] == parentItem[nestedNode.relation.one_primary!] ||
nestedItem[nestedNode.relation.many_field]?.[nestedNode.relation.one_primary!] ==
parentItem[nestedNode.relation.one_primary!]
);
});
@@ -290,14 +264,9 @@ function mergeWithParentItems(
for (const parentItem of parentItems) {
const relatedCollection = parentItem[nestedNode.relation.one_collection_field!];
const itemChild = (nestedItem as Record<string, any[]>)[relatedCollection].find(
(nestedItem) => {
return (
nestedItem[nestedNode.relatedKey[relatedCollection]] ===
parentItem[nestedNode.fieldKey]
);
}
);
const itemChild = (nestedItem as Record<string, any[]>)[relatedCollection].find((nestedItem) => {
return nestedItem[nestedNode.relatedKey[relatedCollection]] == parentItem[nestedNode.fieldKey];
});
parentItem[nestedNode.fieldKey] = itemChild || null;
}
@@ -321,8 +290,7 @@ function removeTemporaryFields(
for (const relatedCollection of ast.names) {
if (!fields[relatedCollection]) fields[relatedCollection] = [];
if (!nestedCollectionNodes[relatedCollection])
nestedCollectionNodes[relatedCollection] = [];
if (!nestedCollectionNodes[relatedCollection]) nestedCollectionNodes[relatedCollection] = [];
for (const child of ast.children[relatedCollection]) {
if (child.type === 'field') {
@@ -350,10 +318,7 @@ function removeTemporaryFields(
);
}
item =
fields[relatedCollection].length > 0
? pick(rawItem, fields[relatedCollection])
: rawItem[primaryKeyField];
item = fields[relatedCollection].length > 0 ? pick(rawItem, fields[relatedCollection]) : rawItem[primaryKeyField];
items.push(item);
}
@@ -379,9 +344,7 @@ function removeTemporaryFields(
item[nestedNode.fieldKey] = removeTemporaryFields(
item[nestedNode.fieldKey],
nestedNode,
nestedNode.type === 'm2o'
? nestedNode.relation.one_primary!
: nestedNode.relation.many_primary,
nestedNode.type === 'm2o' ? nestedNode.relation.one_primary! : nestedNode.relation.many_primary,
item
);
}

View File

@@ -86,9 +86,7 @@ export default async function runSeed(database: Knex) {
}
if (columnInfo.references) {
column
.references(columnInfo.references.column)
.inTable(columnInfo.references.table);
column.references(columnInfo.references.column).inTable(columnInfo.references.table);
}
}
});

View File

@@ -21,8 +21,10 @@ data:
- collection: directus_files
icon: folder
note: Metadata for all managed file assets
display_template: "{{ title }}"
- collection: directus_folders
note: Provides virtual directories for files
display_template: "{{ name }}"
- collection: directus_migrations
note: What version of the database you're using
- collection: directus_permissions
@@ -50,5 +52,6 @@ data:
unarchive_value: draft
icon: people_alt
note: System users for the platform
display_template: "{{ first_name }} {{ last_name }}"
- collection: directus_webhooks
note: Configuration for event-based HTTP requests

View File

@@ -4,8 +4,6 @@ import { CollectionMeta } from '../../../types';
const systemData = requireYAML(require.resolve('./collections.yaml'));
export const systemCollectionRows: CollectionMeta[] = systemData.data.map(
(row: Record<string, any>) => {
return merge({ system: true }, systemData.defaults, row);
}
);
export const systemCollectionRows: CollectionMeta[] = systemData.data.map((row: Record<string, any>) => {
return merge({ system: true }, systemData.defaults, row);
});

View File

@@ -30,7 +30,7 @@ fields:
translations:
language: en-US
translations: Brand Color
width: half
width: full
- field: project_logo
interface: file
@@ -136,7 +136,6 @@ fields:
text: Fit inside
- value: outside
text: Fit outside
required: true
width: half
- field: width
name: Width
@@ -166,7 +165,6 @@ fields:
max: 100
min: 0
step: 1
required: true
width: half
- field: withoutEnlargement
type: boolean

View File

@@ -64,71 +64,7 @@ fields:
width: full
- field: language
interface: dropdown
options:
choices:
- text: Afrikaans (South Africa)
value: af-ZA
- text: Arabic (Saudi Arabia)
value: ar-SA
- text: Catalan (Spain)
value: ca-ES
- text: Chinese (Simplified)
value: zh-CN
- text: Czech (Czech Republic)
value: cs-CZ
- text: Danish (Denmark)
value: da-DK
- text: Dutch (Netherlands)
value: nl-NL
- text: English (United States)
value: en-US
- text: Finnish (Finland)
value: fi-FI
- text: French (France)
value: fr-FR
- text: German (Germany)
value: de-DE
- text: Greek (Greece)
value: el-GR
- text: Hebrew (Israel)
value: he-IL
- text: Hungarian (Hungary)
value: hu-HU
- text: Icelandic (Iceland)
value: is-IS
- text: Indonesian (Indonesia)
value: id-ID
- text: Italian (Italy)
value: it-IT
- text: Japanese (Japan)
value: ja-JP
- text: Korean (Korea)
value: ko-KR
- text: Malay (Malaysia)
value: ms-MY
- text: Norwegian (Norway)
value: no-NO
- text: Polish (Poland)
value: pl-PL
- text: Portuguese (Brazil)
value: pt-BR
- text: Portuguese (Portugal)
value: pt-PT
- text: Russian (Russian Federation)
value: ru-RU
- text: Spanish (Spain)
value: es-ES
- text: Spanish (Latin America)
value: es-419
- text: Taiwanese Mandarin (Taiwan)
value: zh-TW
- text: Turkish (Turkey)
value: tr-TR
- text: Ukrainian (Ukraine)
value: uk-UA
- text: Vietnamese (Vietnam)
value: vi-VN
interface: system-language
width: half
- field: theme

View File

@@ -3,6 +3,6 @@ import { EventEmitter2 } from 'eventemitter2';
const emitter = new EventEmitter2({ wildcard: true, verboseMemoryLeak: true, delimiter: '.' });
// No-op function to ensure we never end up with no data
emitter.on('*.*.before', input => input);
emitter.on('*.*.before', (input) => input);
export default emitter;

View File

@@ -33,6 +33,8 @@ const defaults: Record<string, any> = {
CACHE_STORE: 'memory',
CACHE_TTL: '30m',
CACHE_NAMESPACE: 'system-cache',
CACHE_AUTO_PURGE: false,
ASSETS_CACHE_TTL: '30m',
OAUTH_PROVIDERS: '',
@@ -62,7 +64,7 @@ function processValues(env: Record<string, any>) {
if (value === 'true') env[key] = true;
if (value === 'false') env[key] = false;
if (value === 'null') env[key] = null;
if (isNaN(value) === false && value.length > 0) env[key] = Number(value);
if (String(value).startsWith('0') === false && isNaN(value) === false && value.length > 0) env[key] = Number(value);
}
return env;

View File

@@ -33,12 +33,9 @@ export async function listExtensions(type: string) {
return await listFolders(location);
} catch (err) {
if (err.code === 'ENOENT') {
throw new ServiceUnavailableException(
`Extension folder "extensions/${type}" couldn't be opened`,
{
service: 'extensions',
}
);
throw new ServiceUnavailableException(`Extension folder "extensions/${type}" couldn't be opened`, {
service: 'extensions',
});
}
throw err;
}
@@ -78,9 +75,7 @@ function registerHooks(hooks: string[]) {
function registerHook(hook: string) {
const hookPath = path.resolve(extensionsPath, 'hooks', hook, 'index.js');
const hookInstance:
| HookRegisterFunction
| { default?: HookRegisterFunction } = require(hookPath);
const hookInstance: HookRegisterFunction | { default?: HookRegisterFunction } = require(hookPath);
let register: HookRegisterFunction = hookInstance as HookRegisterFunction;
if (typeof hookInstance !== 'function') {
@@ -110,9 +105,7 @@ function registerEndpoints(endpoints: string[], router: Router) {
function registerEndpoint(endpoint: string) {
const endpointPath = path.resolve(extensionsPath, 'endpoints', endpoint, 'index.js');
const endpointInstance:
| EndpointRegisterFunction
| { default?: EndpointRegisterFunction } = require(endpointPath);
const endpointInstance: EndpointRegisterFunction | { default?: EndpointRegisterFunction } = require(endpointPath);
let register: EndpointRegisterFunction = endpointInstance as EndpointRegisterFunction;
if (typeof endpointInstance !== 'function') {

View File

@@ -14,7 +14,7 @@ const liquidEngine = new Liquid({
extname: '.liquid',
});
let transporter: Transporter;
let transporter: Transporter | null = null;
if (env.EMAIL_TRANSPORT === 'sendmail') {
transporter = nodemailer.createTransport({
@@ -24,15 +24,28 @@ if (env.EMAIL_TRANSPORT === 'sendmail') {
});
} else if (env.EMAIL_TRANSPORT.toLowerCase() === 'smtp') {
transporter = nodemailer.createTransport({
pool: env.EMAIL_SMTP_POOL === 'true',
pool: env.EMAIL_SMTP_POOL,
host: env.EMAIL_SMTP_HOST,
port: Number(env.EMAIL_SMTP_PORT),
secure: env.EMAIL_SMTP_SECURE === 'true',
port: env.EMAIL_SMTP_PORT,
secure: env.EMAIL_SMTP_SECURE,
auth: {
user: env.EMAIL_SMTP_USER,
pass: env.EMAIL_SMTP_PASSWORD,
},
} as any);
} else {
logger.warn('Illegal transport given for email. Check the EMAIL_TRANSPORT env var.');
}
if (transporter) {
transporter.verify((error) => {
if (error) {
logger.warn(`Couldn't connect to email server.`);
logger.warn(`Email verification error: ${error}`);
} else {
logger.info(`Email connection established`);
}
});
}
export type EmailOptions = {
@@ -72,6 +85,8 @@ async function getDefaultTemplateOptions() {
}
export default async function sendMail(options: EmailOptions) {
if (!transporter) return;
const templateString = await readFile(path.join(__dirname, 'templates/base.liquid'), 'utf8');
const html = await liquidEngine.parseAndRender(templateString, { html: options.html });
@@ -86,6 +101,8 @@ export default async function sendMail(options: EmailOptions) {
}
export async function sendInviteMail(email: string, url: string) {
if (!transporter) return;
const defaultOptions = await getDefaultTemplateOptions();
const html = await liquidEngine.renderFile('user-invitation', {
@@ -103,6 +120,8 @@ export async function sendInviteMail(email: string, url: string) {
}
export async function sendPasswordResetMail(email: string, url: string) {
if (!transporter) return;
const defaultOptions = await getDefaultTemplateOptions();
const html = await liquidEngine.renderFile('password-reset', {

View File

@@ -74,9 +74,7 @@ const authenticate: RequestHandler = asyncHandler(async (req, res, next) => {
}
if (req.accountability?.user) {
await database('directus_users')
.update({ last_access: new Date() })
.where({ id: req.accountability.user });
await database('directus_users').update({ last_access: new Date() }).where({ id: req.accountability.user });
}
return next();

View File

@@ -14,6 +14,14 @@ const checkCacheMiddleware: RequestHandler = asyncHandler(async (req, res, next)
const cachedData = await cache.get(key);
if (cachedData) {
// Set cache-control header
if (env.CACHE_AUTO_PURGE !== true) {
const expiresAt = await cache.get(`${key}__expires_at`);
const maxAge = `max-age="${expiresAt - Date.now()}"`;
const access = !!req.accountability?.role === false ? 'public' : 'private';
res.setHeader('Cache-Control', `${access}, ${maxAge}`);
}
return res.json(cachedData);
} else {
return next();

View File

@@ -12,7 +12,6 @@ export const checkIP: RequestHandler = asyncHandler(async (req, res, next) => {
const ipAllowlist = (role?.ip_access || '').split(',').filter((ip: string) => ip);
if (ipAllowlist.length > 0 && ipAllowlist.includes(req.accountability!.ip) === false)
throw new InvalidIPException();
if (ipAllowlist.length > 0 && ipAllowlist.includes(req.accountability!.ip) === false) throw new InvalidIPException();
return next();
});

View File

@@ -27,13 +27,10 @@ if (env.RATE_LIMITER_ENABLED === true) {
if (rateLimiterRes instanceof Error) throw rateLimiterRes;
res.set('Retry-After', String(rateLimiterRes.msBeforeNext / 1000));
throw new HitRateLimitException(
`Too many requests, retry after ${ms(rateLimiterRes.msBeforeNext)}.`,
{
limit: +env.RATE_LIMITER_POINTS,
reset: new Date(Date.now() + rateLimiterRes.msBeforeNext),
}
);
throw new HitRateLimitException(`Too many requests, retry after ${ms(rateLimiterRes.msBeforeNext)}.`, {
limit: +env.RATE_LIMITER_POINTS,
reset: new Date(Date.now() + rateLimiterRes.msBeforeNext),
});
}
next();
@@ -56,25 +53,18 @@ function getRateLimiter() {
function getConfig(store?: 'memory'): IRateLimiterOptions;
function getConfig(store: 'redis' | 'memcache'): IRateLimiterStoreOptions;
function getConfig(
store: 'memory' | 'redis' | 'memcache' = 'memory'
): IRateLimiterOptions | IRateLimiterStoreOptions {
function getConfig(store: 'memory' | 'redis' | 'memcache' = 'memory'): IRateLimiterOptions | IRateLimiterStoreOptions {
const config: any = getConfigFromEnv('RATE_LIMITER_', `RATE_LIMITER_${store}_`);
if (store === 'redis') {
const Redis = require('ioredis');
delete config.redis;
config.storeClient = new Redis(
env.RATE_LIMITER_REDIS || getConfigFromEnv('RATE_LIMITER_REDIS_')
);
config.storeClient = new Redis(env.RATE_LIMITER_REDIS || getConfigFromEnv('RATE_LIMITER_REDIS_'));
}
if (store === 'memcache') {
const Memcached = require('memcached');
config.storeClient = new Memcached(
env.RATE_LIMITER_MEMCACHE,
getConfigFromEnv('RATE_LIMITER_MEMCACHE_')
);
config.storeClient = new Memcached(env.RATE_LIMITER_MEMCACHE, getConfigFromEnv('RATE_LIMITER_MEMCACHE_'));
}
delete config.enabled;

View File

@@ -5,16 +5,20 @@ import { getCacheKey } from '../utils/get-cache-key';
import cache from '../cache';
import { Transform, transforms } from 'json2csv';
import { PassThrough } from 'stream';
import ms from 'ms';
export const respond: RequestHandler = asyncHandler(async (req, res) => {
if (
req.method.toLowerCase() === 'get' &&
env.CACHE_ENABLED === true &&
cache &&
!req.sanitizedQuery.export
) {
if (req.method.toLowerCase() === 'get' && env.CACHE_ENABLED === true && cache && !req.sanitizedQuery.export) {
const key = getCacheKey(req);
await cache.set(key, res.locals.payload);
await cache.set(key, res.locals.payload, ms(env.CACHE_TTL as string));
await cache.set(`${key}__expires_at`, Date.now() + ms(env.CACHE_TTL as string));
// Set cache-control header
if (env.CACHE_AUTO_PURGE !== true) {
const maxAge = `max-age="${ms(env.CACHE_TTL as string)}"`;
const access = !!req.accountability?.role === false ? 'public' : 'private';
res.setHeader('Cache-Control', `${access}, ${maxAge}`);
}
}
if (req.sanitizedQuery.export) {

View File

@@ -1,9 +1,18 @@
import { RequestHandler } from 'express';
import asyncHandler from 'express-async-handler';
import { schemaInspector } from '../database';
import logger from '../logger';
const getSchema: RequestHandler = asyncHandler(async (req, res, next) => {
const schemaOverview = await schemaInspector.overview();
for (const [collection, info] of Object.entries(schemaOverview)) {
if (!info.primary) {
logger.warn(`Collection "${collection}" doesn't have a primary key column and will be ignored`);
delete schemaOverview[collection];
}
}
req.schema = schemaOverview;
return next();

View File

@@ -37,10 +37,7 @@ export default async function createServer() {
// Compatibility when supporting serving with certificates
const protocol = server instanceof https.Server ? 'https' : 'http';
const url = new URL(
(req.originalUrl || req.url) as string,
`${protocol}://${req.headers.host}`
);
const url = new URL((req.originalUrl || req.url) as string, `${protocol}://${req.headers.host}`);
const query = url.search.startsWith('?') ? url.search.substr(1) : url.search;
const info = {
@@ -62,10 +59,7 @@ export default async function createServer() {
size: metrics.out,
headers: res.getHeaders(),
},
ip:
req.headers['x-forwarded-for'] ||
req.connection?.remoteAddress ||
req.socket?.remoteAddress,
ip: req.headers['x-forwarded-for'] || req.connection?.remoteAddress || req.socket?.remoteAddress,
duration: elapsedMilliseconds.toFixed(),
};

View File

@@ -3,11 +3,7 @@ import jwt from 'jsonwebtoken';
import argon2 from 'argon2';
import { nanoid } from 'nanoid';
import ms from 'ms';
import {
InvalidCredentialsException,
InvalidPayloadException,
InvalidOTPException,
} from '../exceptions';
import { InvalidCredentialsException, InvalidPayloadException, InvalidOTPException } from '../exceptions';
import { Session, Accountability, AbstractServiceOptions, Action } from '../types';
import Knex from 'knex';
import { ActivityService } from '../services/activity';
@@ -158,21 +154,13 @@ export class AuthenticationService {
}
async generateOTPAuthURL(pk: string, secret: string) {
const user = await this.knex
.select('first_name', 'last_name')
.from('directus_users')
.where({ id: pk })
.first();
const user = await this.knex.select('first_name', 'last_name').from('directus_users').where({ id: pk }).first();
const name = `${user.first_name} ${user.last_name}`;
return authenticator.keyuri(name, 'Directus', secret);
}
async verifyOTP(pk: string, otp: string): Promise<boolean> {
const user = await this.knex
.select('tfa_secret')
.from('directus_users')
.where({ id: pk })
.first();
const user = await this.knex.select('tfa_secret').from('directus_users').where({ id: pk }).first();
if (!user.tfa_secret) {
throw new InvalidPayloadException(`User "${pk}" doesn't have TFA enabled.`);
@@ -183,11 +171,7 @@ export class AuthenticationService {
}
async verifyPassword(pk: string, password: string) {
const userRecord = await this.knex
.select('password')
.from('directus_users')
.where({ id: pk })
.first();
const userRecord = await this.knex.select('password').from('directus_users').where({ id: pk }).first();
if (!userRecord || !userRecord.password) {
throw new InvalidCredentialsException();

View File

@@ -56,27 +56,19 @@ export class AuthorizationService {
)) as Permission[];
// If the permissions don't match the collections, you don't have permission to read all of them
const uniqueCollectionsRequestedCount = uniq(
collectionsRequested.map(({ collection }) => collection)
).length;
const uniqueCollectionsRequestedCount = uniq(collectionsRequested.map(({ collection }) => collection)).length;
if (uniqueCollectionsRequestedCount !== permissionsForCollections.length) {
// Find the first collection that doesn't have permissions configured
const { collection, field } = collectionsRequested.find(
({ collection }) =>
permissionsForCollections.find(
(permission) => permission.collection === collection
) === undefined
permissionsForCollections.find((permission) => permission.collection === collection) === undefined
)!;
if (field) {
throw new ForbiddenException(
`You don't have permission to access the "${field}" field.`
);
throw new ForbiddenException(`You don't have permission to access the "${field}" field.`);
} else {
throw new ForbiddenException(
`You don't have permission to access the "${collection}" collection.`
);
throw new ForbiddenException(`You don't have permission to access the "${collection}" collection.`);
}
}
@@ -88,15 +80,11 @@ export class AuthorizationService {
/**
* Traverses the AST and returns an array of all collections that are being fetched
*/
function getCollectionsFromAST(
ast: AST | NestedCollectionNode
): { collection: string; field: string }[] {
function getCollectionsFromAST(ast: AST | NestedCollectionNode): { collection: string; field: string }[] {
const collections = [];
if (ast.type === 'm2a') {
collections.push(
...ast.names.map((name) => ({ collection: name, field: ast.fieldKey }))
);
collections.push(...ast.names.map((name) => ({ collection: name, field: ast.fieldKey })));
/** @TODO add nestedNode */
} else {
@@ -121,9 +109,7 @@ export class AuthorizationService {
const collection = ast.name;
// We check the availability of the permissions in the step before this is run
const permissions = permissionsForCollections.find(
(permission) => permission.collection === collection
)!;
const permissions = permissionsForCollections.find((permission) => permission.collection === collection)!;
const allowedFields = permissions.fields || [];
@@ -138,9 +124,7 @@ export class AuthorizationService {
const fieldKey = childNode.name;
if (allowedFields.includes(fieldKey) === false) {
throw new ForbiddenException(
`You don't have permission to access the "${fieldKey}" field.`
);
throw new ForbiddenException(`You don't have permission to access the "${fieldKey}" field.`);
}
}
}
@@ -155,9 +139,7 @@ export class AuthorizationService {
const collection = ast.name;
// We check the availability of the permissions in the step before this is run
const permissions = permissionsForCollections.find(
(permission) => permission.collection === collection
)!;
const permissions = permissionsForCollections.find((permission) => permission.collection === collection)!;
const parsedPermissions = parseFilter(permissions.permissions, accountability);
@@ -174,9 +156,7 @@ export class AuthorizationService {
if (ast.query.filter._and.length === 0) delete ast.query.filter._and;
if (permissions.limit && ast.query.limit && ast.query.limit > permissions.limit) {
throw new ForbiddenException(
`You can't read more than ${permissions.limit} items at a time.`
);
throw new ForbiddenException(`You can't read more than ${permissions.limit} items at a time.`);
}
// Default to the permissions limit if limit hasn't been set
@@ -197,16 +177,8 @@ export class AuthorizationService {
/**
* Checks if the provided payload matches the configured permissions, and adds the presets to the payload.
*/
validatePayload(
action: PermissionsAction,
collection: string,
payloads: Partial<Item>[]
): Promise<Partial<Item>[]>;
validatePayload(
action: PermissionsAction,
collection: string,
payload: Partial<Item>
): Promise<Partial<Item>>;
validatePayload(action: PermissionsAction, collection: string, payloads: Partial<Item>[]): Promise<Partial<Item>[]>;
validatePayload(action: PermissionsAction, collection: string, payload: Partial<Item>): Promise<Partial<Item>>;
async validatePayload(
action: PermissionsAction,
collection: string,
@@ -239,10 +211,7 @@ export class AuthorizationService {
if (!permission) throw new ForbiddenException();
permission = (await this.payloadService.processValues(
'read',
permission as Item
)) as Permission;
permission = (await this.payloadService.processValues('read', permission as Item)) as Permission;
// Check if you have permission to access the fields you're trying to acces
@@ -251,9 +220,7 @@ export class AuthorizationService {
if (allowedFields.includes('*') === false) {
for (const payload of payloads) {
const keysInData = Object.keys(payload);
const invalidKeys = keysInData.filter(
(fieldKey) => allowedFields.includes(fieldKey) === false
);
const invalidKeys = keysInData.filter((fieldKey) => allowedFields.includes(fieldKey) === false);
if (invalidKeys.length > 0) {
throw new ForbiddenException(
@@ -280,24 +247,16 @@ export class AuthorizationService {
.where({ collection, field: column.column_name })
.first()) ||
systemFieldRows.find(
(fieldMeta) =>
fieldMeta.field === column.column_name &&
fieldMeta.collection === collection
(fieldMeta) => fieldMeta.field === column.column_name && fieldMeta.collection === collection
);
const specials = field?.special ? toArray(field.special) : [];
const hasGenerateSpecial = [
'uuid',
'date-created',
'role-created',
'user-created',
].some((name) => specials.includes(name));
const hasGenerateSpecial = ['uuid', 'date-created', 'role-created', 'user-created'].some((name) =>
specials.includes(name)
);
const isRequired =
column.is_nullable === false &&
column.default_value === null &&
hasGenerateSpecial === false;
const isRequired = column.is_nullable === false && column.default_value === null && hasGenerateSpecial === false;
if (isRequired) {
requiredColumns.push(column.column_name);
@@ -350,9 +309,7 @@ export class AuthorizationService {
if (Object.keys(validation)[0] === '_and') {
const subValidation = Object.values(validation)[0];
const nestedErrors = flatten<FailedValidationException>(
subValidation.map((subObj: Record<string, any>) =>
this.validateJoi(subObj, payloads)
)
subValidation.map((subObj: Record<string, any>) => this.validateJoi(subObj, payloads))
).filter((err?: FailedValidationException) => err);
errors.push(...nestedErrors);
}
@@ -360,9 +317,7 @@ export class AuthorizationService {
if (Object.keys(validation)[0] === '_or') {
const subValidation = Object.values(validation)[0];
const nestedErrors = flatten<FailedValidationException>(
subValidation.map((subObj: Record<string, any>) =>
this.validateJoi(subObj, payloads)
)
subValidation.map((subObj: Record<string, any>) => this.validateJoi(subObj, payloads))
);
const allErrored = nestedErrors.every((err?: FailedValidationException) => err);
@@ -377,20 +332,14 @@ export class AuthorizationService {
const { error } = schema.validate(payload, { abortEarly: false });
if (error) {
errors.push(
...error.details.map((details) => new FailedValidationException(details))
);
errors.push(...error.details.map((details) => new FailedValidationException(details)));
}
}
return errors;
}
async checkAccess(
action: PermissionsAction,
collection: string,
pk: PrimaryKey | PrimaryKey[]
) {
async checkAccess(action: PermissionsAction, collection: string, pk: PrimaryKey | PrimaryKey[]) {
if (this.accountability?.admin === true) return;
const itemsService = new ItemsService(collection, {
@@ -409,14 +358,11 @@ export class AuthorizationService {
if (!result) throw '';
if (Array.isArray(pk) && pk.length > 1 && result.length !== pk.length) throw '';
} catch {
throw new ForbiddenException(
`You're not allowed to ${action} item "${pk}" in collection "${collection}".`,
{
collection,
item: pk,
action,
}
);
throw new ForbiddenException(`You're not allowed to ${action} item "${pk}" in collection "${collection}".`, {
collection,
item: pk,
action,
});
}
}
}

View File

@@ -1,12 +1,5 @@
import database, { schemaInspector } from '../database';
import {
AbstractServiceOptions,
Accountability,
Collection,
CollectionMeta,
Relation,
SchemaOverview,
} from '../types';
import { AbstractServiceOptions, Accountability, Collection, CollectionMeta, Relation, SchemaOverview } from '../types';
import Knex from 'knex';
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
import { FieldsService } from '../services/fields';
@@ -14,6 +7,7 @@ import { ItemsService } from '../services/items';
import cache from '../cache';
import { toArray } from '../utils/to-array';
import { systemCollectionRows } from '../database/system-data/collections';
import env from '../env';
export class CollectionsService {
knex: Knex;
@@ -78,9 +72,7 @@ export class CollectionsService {
}
if (payload.collection in this.schema) {
throw new InvalidPayloadException(
`Collection "${payload.collection}" already exists.`
);
throw new InvalidPayloadException(`Collection "${payload.collection}" already exists.`);
}
await trx.schema.createTable(payload.collection, (table) => {
@@ -94,9 +86,7 @@ export class CollectionsService {
collection: payload.collection,
});
const fieldPayloads = payload
.fields!.filter((field) => field.meta)
.map((field) => field.meta);
const fieldPayloads = payload.fields!.filter((field) => field.meta).map((field) => field.meta);
await fieldItemsService.create(fieldPayloads);
@@ -104,7 +94,7 @@ export class CollectionsService {
}
});
if (cache) {
if (cache && env.CACHE_AUTO_PURGE) {
await cache.clear();
}
@@ -131,15 +121,11 @@ export class CollectionsService {
.whereIn('collection', collectionKeys);
if (collectionKeys.length !== permissions.length) {
const collectionsYouHavePermissionToRead = permissions.map(
({ collection }) => collection
);
const collectionsYouHavePermissionToRead = permissions.map(({ collection }) => collection);
for (const collectionKey of collectionKeys) {
if (collectionsYouHavePermissionToRead.includes(collectionKey) === false) {
throw new ForbiddenException(
`You don't have access to the "${collectionKey}" collection.`
);
throw new ForbiddenException(`You don't have access to the "${collectionKey}" collection.`);
}
}
}
@@ -218,10 +204,7 @@ export class CollectionsService {
update(data: Partial<Collection>, keys: string[]): Promise<string[]>;
update(data: Partial<Collection>, key: string): Promise<string>;
update(data: Partial<Collection>[]): Promise<string[]>;
async update(
data: Partial<Collection> | Partial<Collection>[],
key?: string | string[]
): Promise<string | string[]> {
async update(data: Partial<Collection> | Partial<Collection>[], key?: string | string[]): Promise<string | string[]> {
const collectionItemsService = new ItemsService('directus_collections', {
knex: this.knex,
accountability: this.accountability,
@@ -239,11 +222,8 @@ export class CollectionsService {
for (const key of keys) {
const exists =
(await this.knex
.select('collection')
.from('directus_collections')
.where({ collection: key })
.first()) !== undefined;
(await this.knex.select('collection').from('directus_collections').where({ collection: key }).first()) !==
undefined;
if (exists) {
await collectionItemsService.update(payload.meta, key);
@@ -266,7 +246,7 @@ export class CollectionsService {
await collectionItemsService.update(collectionUpdates);
if (cache) {
if (cache && env.CACHE_AUTO_PURGE) {
await cache.clear();
}
@@ -311,15 +291,13 @@ export class CollectionsService {
for (const relation of relations) {
const isM2O = relation.many_collection === collection;
/** @TODO M2A — Handle m2a case here */
if (isM2O) {
await this.knex('directus_relations')
.delete()
.where({ many_collection: collection, many_field: relation.many_field });
await fieldsService.deleteField(relation.one_collection!, relation.one_field!);
} else {
} else if (!!relation.one_collection) {
await this.knex('directus_relations')
.update({ one_field: null })
.where({ one_collection: collection, one_field: relation.one_field });
@@ -339,7 +317,7 @@ export class CollectionsService {
await this.knex.schema.dropTable(collectionKey);
}
if (cache) {
if (cache && env.CACHE_AUTO_PURGE) {
await cache.clear();
}

View File

@@ -1,12 +1,6 @@
import database, { schemaInspector } from '../database';
import { Field } from '../types/field';
import {
Accountability,
AbstractServiceOptions,
FieldMeta,
Relation,
SchemaOverview,
} from '../types';
import { Accountability, AbstractServiceOptions, FieldMeta, Relation, SchemaOverview } from '../types';
import { ItemsService } from '../services/items';
import { ColumnBuilder } from 'knex';
import getLocalType from '../utils/get-local-type';
@@ -18,6 +12,7 @@ import getDefaultValue from '../utils/get-default-value';
import cache from '../cache';
import SchemaInspector from '@directus/schema';
import { toArray } from '../utils/to-array';
import env from '../env';
import { systemFieldRows } from '../database/system-data/fields/';
@@ -53,9 +48,7 @@ export class FieldsService {
limit: -1,
})) as FieldMeta[];
fields.push(
...systemFieldRows.filter((fieldMeta) => fieldMeta.collection === collection)
);
fields.push(...systemFieldRows.filter((fieldMeta) => fieldMeta.collection === collection));
} else {
fields = (await nonAuthorizedItemsService.readByQuery({ limit: -1 })) as FieldMeta[];
fields.push(...systemFieldRows);
@@ -92,19 +85,15 @@ export class FieldsService {
aliasQuery.andWhere('collection', collection);
}
let aliasFields = [
...((await this.payloadService.processValues('read', await aliasQuery)) as FieldMeta[]),
];
let aliasFields = [...((await this.payloadService.processValues('read', await aliasQuery)) as FieldMeta[])];
if (collection) {
aliasFields.push(
...systemFieldRows.filter((fieldMeta) => fieldMeta.collection === collection)
);
aliasFields.push(...systemFieldRows.filter((fieldMeta) => fieldMeta.collection === collection));
} else {
aliasFields.push(...systemFieldRows);
}
const aliasTypes = ['alias', 'o2m', 'm2m', 'files', 'files', 'translations'];
const aliasTypes = ['alias', 'o2m', 'm2m', 'm2a', 'files', 'files', 'translations'];
aliasFields = aliasFields.filter((field) => {
const specials = toArray(field.special);
@@ -139,9 +128,7 @@ export class FieldsService {
const allowedFieldsInCollection: Record<string, string[]> = {};
permissions.forEach((permission) => {
allowedFieldsInCollection[permission.collection] = (permission.fields || '').split(
','
);
allowedFieldsInCollection[permission.collection] = (permission.fields || '').split(',');
});
if (collection && allowedFieldsInCollection.hasOwnProperty(collection) === false) {
@@ -149,8 +136,7 @@ export class FieldsService {
}
return result.filter((field) => {
if (allowedFieldsInCollection.hasOwnProperty(field.collection) === false)
return false;
if (allowedFieldsInCollection.hasOwnProperty(field.collection) === false) return false;
const allowedFields = allowedFieldsInCollection[field.collection];
if (allowedFields[0] === '*') return true;
return allowedFields.includes(field.field);
@@ -180,11 +166,7 @@ export class FieldsService {
}
let column;
let fieldInfo = await this.knex
.select('*')
.from('directus_fields')
.where({ collection, field })
.first();
let fieldInfo = await this.knex.select('*').from('directus_fields').where({ collection, field }).first();
if (fieldInfo) {
fieldInfo = (await this.payloadService.processValues('read', fieldInfo)) as FieldMeta[];
@@ -192,9 +174,7 @@ export class FieldsService {
fieldInfo =
fieldInfo ||
systemFieldRows.find(
(fieldMeta) => fieldMeta.collection === collection && fieldMeta.field === field
);
systemFieldRows.find((fieldMeta) => fieldMeta.collection === collection && fieldMeta.field === field);
try {
column = await this.schemaInspector.columnInfo(collection, field);
@@ -223,19 +203,11 @@ export class FieldsService {
// Check if field already exists, either as a column, or as a row in directus_fields
if (field.field in this.schema[collection].columns) {
throw new InvalidPayloadException(
`Field "${field.field}" already exists in collection "${collection}"`
);
throw new InvalidPayloadException(`Field "${field.field}" already exists in collection "${collection}"`);
} else if (
!!(await this.knex
.select('id')
.from('directus_fields')
.where({ collection, field: field.field })
.first())
!!(await this.knex.select('id').from('directus_fields').where({ collection, field: field.field }).first())
) {
throw new InvalidPayloadException(
`Field "${field.field}" already exists in collection "${collection}"`
);
throw new InvalidPayloadException(`Field "${field.field}" already exists in collection "${collection}"`);
}
if (field.schema) {
@@ -256,13 +228,11 @@ export class FieldsService {
});
}
if (cache) {
if (cache && env.CACHE_AUTO_PURGE) {
await cache.clear();
}
}
/** @todo research how to make this happen in SQLite / Redshift */
async updateField(collection: string, field: RawField) {
if (this.accountability && this.accountability.admin !== true) {
throw new ForbiddenException('Only admins can perform this action');
@@ -270,46 +240,8 @@ export class FieldsService {
if (field.schema) {
await this.knex.schema.alterTable(collection, (table) => {
let column: ColumnBuilder;
if (!field.schema) return;
if (field.type === 'string') {
column = table.string(
field.field,
field.schema.max_length !== null ? field.schema.max_length : undefined
);
} else if (['float', 'decimal'].includes(field.type)) {
const type = field.type as 'float' | 'decimal';
column = table[type](
field.field,
field.schema?.numeric_precision || 10,
field.schema?.numeric_scale || 5
);
} else if (field.type === 'csv') {
column = table.string(field.field);
} else {
column = table[field.type](field.field);
}
if (field.schema.default_value !== undefined) {
if (
typeof field.schema.default_value === 'string' &&
field.schema.default_value.toLowerCase() === 'now()'
) {
column.defaultTo(this.knex.fn.now());
} else {
column.defaultTo(field.schema.default_value);
}
}
if (field.schema.is_nullable !== undefined && field.schema.is_nullable === false) {
column.notNullable();
} else {
column.nullable();
}
column.alter();
this.addColumnToTable(table, field, true);
});
}
@@ -338,7 +270,7 @@ export class FieldsService {
}
}
if (cache) {
if (cache && env.CACHE_AUTO_PURGE) {
await cache.clear();
}
@@ -371,9 +303,7 @@ export class FieldsService {
/** @TODO M2A — Handle m2a case here */
if (isM2O) {
await this.knex('directus_relations')
.delete()
.where({ many_collection: collection, many_field: field });
await this.knex('directus_relations').delete().where({ many_collection: collection, many_field: field });
await this.deleteField(relation.one_collection!, relation.one_field!);
} else {
await this.knex('directus_relations')
@@ -382,35 +312,38 @@ export class FieldsService {
}
}
if (cache) {
if (cache && env.CACHE_AUTO_PURGE) {
await cache.clear();
}
}
public addColumnToTable(table: CreateTableBuilder, field: Field) {
public addColumnToTable(table: CreateTableBuilder, field: RawField | Field, alter: boolean = false) {
if (!field.schema) return;
let column: ColumnBuilder;
if (field.schema?.has_auto_increment) {
column = table.increments(field.field);
} else if (field.type === 'string') {
column = table.string(field.field, field.schema?.max_length || undefined);
column = table.string(field.field, field.schema.max_length !== null ? field.schema.max_length : undefined);
} else if (['float', 'decimal'].includes(field.type)) {
const type = field.type as 'float' | 'decimal';
/** @todo add precision and scale support */
column = table[type](field.field /* precision, scale */);
column = table[type](field.field, field.schema?.numeric_precision || 10, field.schema?.numeric_scale || 5);
} else if (field.type === 'csv') {
column = table.string(field.field);
} else if (field.type === 'dateTime') {
column = table.dateTime(field.field, { useTz: false });
} else {
column = table[field.type](field.field);
}
if (field.schema?.default_value) {
column.defaultTo(field.schema.default_value);
if (field.schema.default_value !== undefined) {
if (typeof field.schema.default_value === 'string' && field.schema.default_value.toLowerCase() === 'now()') {
column.defaultTo(this.knex.fn.now());
} else {
column.defaultTo(field.schema.default_value);
}
}
if (field.schema?.is_nullable !== undefined && field.schema.is_nullable === false) {
if (field.schema.is_nullable !== undefined && field.schema.is_nullable === false) {
column.notNullable();
} else {
column.nullable();
@@ -419,5 +352,9 @@ export class FieldsService {
if (field.schema?.is_primary_key) {
column.primary();
}
if (alter) {
column.alter();
}
}
}

View File

@@ -11,6 +11,7 @@ import { ForbiddenException } from '../exceptions';
import { toArray } from '../utils/to-array';
import { extension } from 'mime-types';
import path from 'path';
import env from '../env';
export class FilesService extends ItemsService {
constructor(options: AbstractServiceOptions) {
@@ -38,8 +39,7 @@ export class FilesService extends ItemsService {
primaryKey = await this.create(payload);
}
const fileExtension =
(payload.type && extension(payload.type)) || path.extname(payload.filename_download);
const fileExtension = (payload.type && extension(payload.type)) || path.extname(payload.filename_download);
payload.filename_disk = primaryKey + '.' + fileExtension;
@@ -87,7 +87,7 @@ export class FilesService extends ItemsService {
});
await sudoService.update(payload, primaryKey);
if (cache) {
if (cache && env.CACHE_AUTO_PURGE) {
await cache.clear();
}
@@ -117,7 +117,7 @@ export class FilesService extends ItemsService {
await super.delete(keys);
if (cache) {
if (cache && env.CACHE_AUTO_PURGE) {
await cache.clear();
}

View File

@@ -1,14 +1,6 @@
import Knex from 'knex';
import database from '../database';
import {
AbstractServiceOptions,
Accountability,
Collection,
Field,
Relation,
Query,
SchemaOverview,
} from '../types';
import { AbstractServiceOptions, Accountability, Collection, Field, Relation, Query, SchemaOverview } from '../types';
import {
GraphQLString,
GraphQLSchema,
@@ -91,11 +83,7 @@ export class GraphQLService {
const fieldsInSystem = await this.fieldsService.readAll();
const relationsInSystem = (await this.relationsService.readByQuery({})) as Relation[];
const schema = this.getGraphQLSchema(
collectionsInSystem,
fieldsInSystem,
relationsInSystem
);
const schema = this.getGraphQLSchema(collectionsInSystem, fieldsInSystem, relationsInSystem);
return schema;
}
@@ -113,17 +101,13 @@ export class GraphQLService {
description: collection.meta?.note,
fields: () => {
const fieldsObject: GraphQLFieldConfigMap<any, any> = {};
const fieldsInCollection = fields.filter(
(field) => field.collection === collection.collection
);
const fieldsInCollection = fields.filter((field) => field.collection === collection.collection);
for (const field of fieldsInCollection) {
const relationForField = relations.find((relation) => {
return (
(relation.many_collection === collection.collection &&
relation.many_field === field.field) ||
(relation.one_collection === collection.collection &&
relation.one_field === field.field)
(relation.many_collection === collection.collection && relation.many_field === field.field) ||
(relation.one_collection === collection.collection && relation.one_field === field.field)
);
});
@@ -135,9 +119,7 @@ export class GraphQLService {
});
if (relationType === 'm2o') {
const relatedIsSystem = relationForField.one_collection!.startsWith(
'directus_'
);
const relatedIsSystem = relationForField.one_collection!.startsWith('directus_');
const relatedType = relatedIsSystem
? schema[relationForField.one_collection!.substring(9)].type
@@ -147,9 +129,7 @@ export class GraphQLService {
type: relatedType,
};
} else if (relationType === 'o2m') {
const relatedIsSystem = relationForField.many_collection.startsWith(
'directus_'
);
const relatedIsSystem = relationForField.many_collection.startsWith('directus_');
const relatedType = relatedIsSystem
? schema[relationForField.many_collection.substring(9)].type
@@ -170,9 +150,7 @@ export class GraphQLService {
const types: any = [];
for (const relatedCollection of relatedCollections) {
const relatedType = relatedCollection.startsWith(
'directus_'
)
const relatedType = relatedCollection.startsWith('directus_')
? schema[relatedCollection.substring(9)].type
: schema.items[relatedCollection].type;
@@ -195,9 +173,7 @@ export class GraphQLService {
}
} else {
fieldsObject[field.field] = {
type: field.schema?.is_primary_key
? GraphQLID
: getGraphQLType(field.type),
type: field.schema?.is_primary_key ? GraphQLID : getGraphQLType(field.type),
};
}
@@ -293,17 +269,13 @@ export class GraphQLService {
},
};
const fieldsInCollection = fields.filter(
(field) => field.collection === collection.collection
);
const fieldsInCollection = fields.filter((field) => field.collection === collection.collection);
for (const field of fieldsInCollection) {
const relationForField = relations.find((relation) => {
return (
(relation.many_collection === collection.collection &&
relation.many_field === field.field) ||
(relation.one_collection === collection.collection &&
relation.one_field === field.field)
(relation.many_collection === collection.collection && relation.many_field === field.field) ||
(relation.one_collection === collection.collection && relation.one_field === field.field)
);
});
@@ -332,9 +304,7 @@ export class GraphQLService {
* Figure out how to setup filter fields for a union type output
*/
} else {
const fieldType = field.schema?.is_primary_key
? GraphQLID
: getGraphQLType(field.type);
const fieldType = field.schema?.is_primary_key ? GraphQLID : getGraphQLType(field.type);
filterFields[field.field] = {
type: new GraphQLInputObjectType({
@@ -402,18 +372,13 @@ export class GraphQLService {
const collection = systemField ? `directus_${info.fieldName}` : info.fieldName;
const selections = info.fieldNodes[0]?.selectionSet?.selections?.filter(
(node) => node.kind === 'Field'
) as FieldNode[] | undefined;
const selections = info.fieldNodes[0]?.selectionSet?.selections?.filter((node) => node.kind === 'Field') as
| FieldNode[]
| undefined;
if (!selections) return null;
return await this.getData(
collection,
selections,
info.fieldNodes[0].arguments || [],
info.variableValues
);
return await this.getData(collection, selections, info.fieldNodes[0].arguments || [], info.variableValues);
}
async getData(
@@ -436,9 +401,7 @@ export class GraphQLService {
fields.push(current);
} else {
const children = parseFields(
selection.selectionSet.selections.filter(
(selection) => selection.kind === 'Field'
) as FieldNode[],
selection.selectionSet.selections.filter((selection) => selection.kind === 'Field') as FieldNode[],
current
);
fields.push(...children);
@@ -447,10 +410,7 @@ export class GraphQLService {
if (selection.arguments && selection.arguments.length > 0) {
if (!query.deep) query.deep = {};
const args: Record<string, any> = this.parseArgs(
selection.arguments,
variableValues
);
const args: Record<string, any> = this.parseArgs(selection.arguments, variableValues);
query.deep[current] = sanitizeQuery(args, this.accountability);
}
}
@@ -458,9 +418,7 @@ export class GraphQLService {
return fields;
};
query.fields = parseFields(
selections.filter((selection) => selection.kind === 'Field') as FieldNode[]
);
query.fields = parseFields(selections.filter((selection) => selection.kind === 'Field') as FieldNode[]);
let service: ItemsService;
@@ -550,18 +508,10 @@ export class GraphQLService {
}
const collectionInfo =
(await this.knex
.select('singleton')
.from('directus_collections')
.where({ collection: collection })
.first()) ||
systemCollectionRows.find(
(collectionMeta) => collectionMeta?.collection === collection
);
(await this.knex.select('singleton').from('directus_collections').where({ collection: collection }).first()) ||
systemCollectionRows.find((collectionMeta) => collectionMeta?.collection === collection);
const result = collectionInfo?.singleton
? await service.readSingleton(query)
: await service.readByQuery(query);
const result = collectionInfo?.singleton ? await service.readSingleton(query) : await service.readByQuery(query);
return result;
}
@@ -596,10 +546,7 @@ export class GraphQLService {
argsObject[argument.name.value] = values;
} else {
argsObject[argument.name.value] = (argument.value as
| IntValueNode
| StringValueNode
| BooleanValueNode).value;
argsObject[argument.name.value] = (argument.value as IntValueNode | StringValueNode | BooleanValueNode).value;
}
}

View File

@@ -17,6 +17,7 @@ import cache from '../cache';
import emitter from '../emitter';
import logger from '../logger';
import { toArray } from '../utils/to-array';
import env from '../env';
import { PayloadService } from './payload';
import { AuthorizationService } from './authorization';
@@ -37,9 +38,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
this.collection = collection;
this.knex = options.knex || database;
this.accountability = options.accountability || null;
this.eventScope = this.collection.startsWith('directus_')
? this.collection.substring(9)
: 'items';
this.eventScope = this.collection.startsWith('directus_') ? this.collection.substring(9) : 'items';
this.schema = options.schema;
return this;
@@ -60,19 +59,15 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
schema: this.schema,
});
const customProcessed = await emitter.emitAsync(
`${this.eventScope}.create.before`,
payloads,
{
event: `${this.eventScope}.create.before`,
accountability: this.accountability,
collection: this.collection,
item: null,
action: 'create',
payload: payloads,
schema: this.schema,
}
);
const customProcessed = await emitter.emitAsync(`${this.eventScope}.create.before`, payloads, {
event: `${this.eventScope}.create.before`,
accountability: this.accountability,
collection: this.collection,
item: null,
action: 'create',
payload: payloads,
schema: this.schema,
});
if (customProcessed) {
payloads = customProcessed[customProcessed.length - 1];
@@ -85,21 +80,15 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
schema: this.schema,
});
payloads = await authorizationService.validatePayload(
'create',
this.collection,
payloads
);
payloads = await authorizationService.validatePayload('create', this.collection, payloads);
}
payloads = await payloadService.processM2O(payloads);
payloads = await payloadService.processA2O(payloads);
let payloadsWithoutAliases = payloads.map((payload) => pick(payload, columns));
payloadsWithoutAliases = await payloadService.processValues(
'create',
payloadsWithoutAliases
);
payloadsWithoutAliases = await payloadService.processValues('create', payloadsWithoutAliases);
const primaryKeys: PrimaryKey[] = [];
@@ -148,11 +137,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
let primaryKey;
const result = await trx
.select('id')
.from('directus_activity')
.orderBy('id', 'desc')
.first();
const result = await trx.select('id').from('directus_activity').orderBy('id', 'desc').first();
primaryKey = result.id;
@@ -170,7 +155,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
await trx.insert(revisionRecords).into('directus_revisions');
}
if (cache) {
if (cache && env.CACHE_AUTO_PURGE) {
await cache.clear();
}
@@ -212,16 +197,8 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
return records as Partial<Item> | Partial<Item>[] | null;
}
readByKey(
keys: PrimaryKey[],
query?: Query,
action?: PermissionsAction
): Promise<null | Partial<Item>[]>;
readByKey(
key: PrimaryKey,
query?: Query,
action?: PermissionsAction
): Promise<null | Partial<Item>>;
readByKey(keys: PrimaryKey[], query?: Query, action?: PermissionsAction): Promise<null | Partial<Item>[]>;
readByKey(key: PrimaryKey, query?: Query, action?: PermissionsAction): Promise<null | Partial<Item>>;
async readByKey(
key: PrimaryKey | PrimaryKey[],
query: Query = {},
@@ -284,19 +261,15 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
let payload: Partial<AnyItem> | Partial<AnyItem>[] = clone(data);
const customProcessed = await emitter.emitAsync(
`${this.eventScope}.update.before`,
const customProcessed = await emitter.emitAsync(`${this.eventScope}.update.before`, payload, {
event: `${this.eventScope}.update.before`,
accountability: this.accountability,
collection: this.collection,
item: key,
action: 'update',
payload,
{
event: `${this.eventScope}.update.before`,
accountability: this.accountability,
collection: this.collection,
item: key,
action: 'update',
payload,
schema: this.schema,
}
);
schema: this.schema,
});
if (customProcessed) {
payload = customProcessed[customProcessed.length - 1];
@@ -311,11 +284,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
await authorizationService.checkAccess('update', this.collection, keys);
payload = await authorizationService.validatePayload(
'update',
this.collection,
payload
);
payload = await authorizationService.validatePayload('update', this.collection, payload);
}
await this.knex.transaction(async (trx) => {
@@ -326,18 +295,14 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
});
payload = await payloadService.processM2O(payload);
payload = await payloadService.processA2O(payload);
let payloadWithoutAliases = pick(payload, columns);
payloadWithoutAliases = await payloadService.processValues(
'update',
payloadWithoutAliases
);
payloadWithoutAliases = await payloadService.processValues('update', payloadWithoutAliases);
if (Object.keys(payloadWithoutAliases).length > 0) {
await trx(this.collection)
.update(payloadWithoutAliases)
.whereIn(primaryKeyField, keys);
await trx(this.collection).update(payloadWithoutAliases).whereIn(primaryKeyField, keys);
}
for (const key of keys) {
@@ -360,11 +325,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
await trx.insert(activityRecord).into('directus_activity');
let primaryKey;
const result = await trx
.select('id')
.from('directus_activity')
.orderBy('id', 'desc')
.first();
const result = await trx.select('id').from('directus_activity').orderBy('id', 'desc').first();
primaryKey = result.id;
activityPrimaryKeys.push(primaryKey);
@@ -381,9 +342,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
collection: this.collection,
item: keys[index],
data:
snapshots && Array.isArray(snapshots)
? JSON.stringify(snapshots?.[index])
: JSON.stringify(snapshots),
snapshots && Array.isArray(snapshots) ? JSON.stringify(snapshots?.[index]) : JSON.stringify(snapshots),
delta: JSON.stringify(payloadWithoutAliases),
}));
@@ -391,7 +350,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
}
});
if (cache) {
if (cache && env.CACHE_AUTO_PURGE) {
await cache.clear();
}
@@ -452,9 +411,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
let itemsToUpdate = await itemsService.readByQuery(readQuery);
itemsToUpdate = toArray(itemsToUpdate);
const keys: PrimaryKey[] = itemsToUpdate.map(
(item: Partial<Item>) => item[primaryKeyField]
);
const keys: PrimaryKey[] = itemsToUpdate.map((item: Partial<Item>) => item[primaryKeyField]);
return await this.update(data, keys);
}
@@ -530,7 +487,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
}
});
if (cache) {
if (cache && env.CACHE_AUTO_PURGE) {
await cache.clear();
}
@@ -563,9 +520,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
let itemsToDelete = await itemsService.readByQuery(readQuery);
itemsToDelete = toArray(itemsToDelete);
const keys: PrimaryKey[] = itemsToDelete.map(
(item: Partial<Item>) => item[primaryKeyField]
);
const keys: PrimaryKey[] = itemsToDelete.map((item: Partial<Item>) => item[primaryKeyField]);
return await this.delete(keys);
}
@@ -598,11 +553,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
async upsertSingleton(data: Partial<Item>) {
const primaryKeyField = this.schema[this.collection].primary;
const record = await this.knex
.select(primaryKeyField)
.from(this.collection)
.limit(1)
.first();
const record = await this.knex.select(primaryKeyField).from(this.collection).limit(1).first();
if (record) {
return await this.update(data, record.id);

View File

@@ -7,14 +7,7 @@ import argon2 from 'argon2';
import { v4 as uuidv4 } from 'uuid';
import database from '../database';
import { clone, isObject, cloneDeep } from 'lodash';
import {
Relation,
Item,
AbstractServiceOptions,
Accountability,
PrimaryKey,
SchemaOverview,
} from '../types';
import { Relation, Item, AbstractServiceOptions, Accountability, PrimaryKey, SchemaOverview } from '../types';
import { ItemsService } from './items';
import { URL } from 'url';
import Knex from 'knex';
@@ -26,6 +19,8 @@ import { toArray } from '../utils/to-array';
import { FieldMeta } from '../types';
import { systemFieldRows } from '../database/system-data/fields';
import { systemRelationRows } from '../database/system-data/relations';
import { InvalidPayloadException } from '../exceptions';
import { isPlainObject } from 'lodash';
type Action = 'create' | 'read' | 'update';
@@ -165,9 +160,7 @@ export class PayloadService {
.where({ collection: this.collection })
.whereNotNull('special');
specialFieldsInCollection.push(
...systemFieldRows.filter((fieldMeta) => fieldMeta.collection === this.collection)
);
specialFieldsInCollection.push(...systemFieldRows.filter((fieldMeta) => fieldMeta.collection === this.collection));
if (action === 'read') {
specialFieldsInCollection = specialFieldsInCollection.filter((fieldMeta) => {
@@ -179,12 +172,7 @@ export class PayloadService {
processedPayload.map(async (record: any) => {
await Promise.all(
specialFieldsInCollection.map(async (field) => {
const newValue = await this.processField(
field,
record,
action,
this.accountability
);
const newValue = await this.processField(field, record, action, this.accountability);
if (newValue !== undefined) record[field.field] = newValue;
})
);
@@ -198,12 +186,7 @@ export class PayloadService {
if (['create', 'update'].includes(action)) {
processedPayload.forEach((record) => {
for (const [key, value] of Object.entries(record)) {
if (
Array.isArray(value) ||
(typeof value === 'object' &&
value instanceof Date !== true &&
value !== null)
) {
if (Array.isArray(value) || (typeof value === 'object' && value instanceof Date !== true && value !== null)) {
record[key] = JSON.stringify(value);
}
}
@@ -217,12 +200,7 @@ export class PayloadService {
return processedPayload[0];
}
async processField(
field: FieldMeta,
payload: Partial<Item>,
action: Action,
accountability: Accountability | null
) {
async processField(field: FieldMeta, payload: Partial<Item>, action: Action, accountability: Accountability | null) {
if (!field.special) return payload[field.field];
const fieldSpecials = field.special ? toArray(field.special) : [];
@@ -254,9 +232,7 @@ export class PayloadService {
type: getLocalType(column),
}));
const dateColumns = columnsWithType.filter((column) =>
['dateTime', 'date', 'timestamp'].includes(column.type)
);
const dateColumns = columnsWithType.filter((column) => ['dateTime', 'date', 'timestamp'].includes(column.type));
if (dateColumns.length === 0) return payloads;
@@ -296,34 +272,99 @@ export class PayloadService {
}
/**
* Recursively save/update all nested related m2o items
* Recursively save/update all nested related Any-to-One items
*/
processM2O(payloads: Partial<Item>[]): Promise<Partial<Item>[]>;
processM2O(payloads: Partial<Item>): Promise<Partial<Item>>;
async processM2O(
payload: Partial<Item> | Partial<Item>[]
): Promise<Partial<Item> | Partial<Item>[]> {
processA2O(payloads: Partial<Item>[]): Promise<Partial<Item>[]>;
processA2O(payloads: Partial<Item>): Promise<Partial<Item>>;
async processA2O(payload: Partial<Item> | Partial<Item>[]): Promise<Partial<Item> | Partial<Item>[]> {
const relations = [
...(await this.knex
.select<Relation[]>('*')
.from('directus_relations')
.where({ many_collection: this.collection })),
...systemRelationRows.filter(
(systemRelation) => systemRelation.many_collection === this.collection
),
...systemRelationRows.filter((systemRelation) => systemRelation.many_collection === this.collection),
];
const payloads = clone(Array.isArray(payload) ? payload : [payload]);
const payloads = clone(toArray(payload));
for (let i = 0; i < payloads.length; i++) {
let payload = payloads[i];
// Only process related records that are actually in the payload
const relationsToProcess = relations.filter((relation) => {
return (
payload.hasOwnProperty(relation.many_field) &&
isObject(payload[relation.many_field])
);
return payload.hasOwnProperty(relation.many_field) && isObject(payload[relation.many_field]);
});
for (const relation of relationsToProcess) {
if (!relation.one_collection_field || !relation.one_allowed_collections) continue;
if (isPlainObject(payload[relation.many_field]) === false) continue;
const relatedCollection = payload[relation.one_collection_field];
if (!relatedCollection) {
throw new InvalidPayloadException(
`Can't update nested record "${relation.many_collection}.${relation.many_field}" without field "${relation.many_collection}.${relation.one_collection_field}" being set`
);
}
const allowedCollections = relation.one_allowed_collections.split(',');
if (allowedCollections.includes(relatedCollection) === false) {
throw new InvalidPayloadException(
`"${relation.many_collection}.${relation.many_field}" can't be linked to collection "${relatedCollection}`
);
}
const itemsService = new ItemsService(relatedCollection, {
accountability: this.accountability,
knex: this.knex,
schema: this.schema,
});
const relatedPrimary = this.schema[relatedCollection].primary;
const relatedRecord: Partial<Item> = payload[relation.many_field];
const hasPrimaryKey = relatedRecord.hasOwnProperty(relatedPrimary);
let relatedPrimaryKey: PrimaryKey = relatedRecord[relatedPrimary];
const exists = hasPrimaryKey && !!(await this.knex.select(relatedPrimary).from(relatedCollection).first());
if (exists) {
await itemsService.update(relatedRecord, relatedPrimaryKey);
} else {
relatedPrimaryKey = await itemsService.create(relatedRecord);
}
// Overwrite the nested object with just the primary key, so the parent level can be saved correctly
payload[relation.many_field] = relatedPrimaryKey;
}
}
return Array.isArray(payload) ? payloads : payloads[0];
}
/**
* Recursively save/update all nested related m2o items
*/
processM2O(payloads: Partial<Item>[]): Promise<Partial<Item>[]>;
processM2O(payloads: Partial<Item>): Promise<Partial<Item>>;
async processM2O(payload: Partial<Item> | Partial<Item>[]): Promise<Partial<Item> | Partial<Item>[]> {
const relations = [
...(await this.knex
.select<Relation[]>('*')
.from('directus_relations')
.where({ many_collection: this.collection })),
...systemRelationRows.filter((systemRelation) => systemRelation.many_collection === this.collection),
];
const payloads = clone(toArray(payload));
for (let i = 0; i < payloads.length; i++) {
let payload = payloads[i];
// Only process related records that are actually in the payload
const relationsToProcess = relations.filter((relation) => {
return payload.hasOwnProperty(relation.many_field) && isObject(payload[relation.many_field]);
});
for (const relation of relationsToProcess) {
@@ -341,7 +382,8 @@ export class PayloadService {
if (['string', 'number'].includes(typeof relatedRecord)) continue;
let relatedPrimaryKey: PrimaryKey = relatedRecord[relation.one_primary];
const exists = hasPrimaryKey && !!(await itemsService.readByKey(relatedPrimaryKey));
const exists =
hasPrimaryKey && !!(await this.knex.select(relation.one_primary).from(relation.one_collection).first());
if (exists) {
await itemsService.update(relatedRecord, relatedPrimaryKey);
@@ -366,9 +408,7 @@ export class PayloadService {
.select<Relation[]>('*')
.from('directus_relations')
.where({ one_collection: this.collection })),
...systemRelationRows.filter(
(systemRelation) => systemRelation.one_collection === this.collection
),
...systemRelationRows.filter((systemRelation) => systemRelation.one_collection === this.collection),
];
const payloads = clone(toArray(payload));
@@ -397,10 +437,7 @@ export class PayloadService {
for (const relatedRecord of payload[relation.one_field!] || []) {
let record = cloneDeep(relatedRecord);
if (
typeof relatedRecord === 'string' ||
typeof relatedRecord === 'number'
) {
if (typeof relatedRecord === 'string' || typeof relatedRecord === 'number') {
const exists = !!(await this.knex
.select(relation.many_primary)
.from(relation.many_collection)

View File

@@ -7,19 +7,13 @@ export class PermissionsService extends ItemsService {
}
async getAllowedCollections(role: string | null, action: PermissionsAction) {
const query = this.knex
.select('collection')
.from('directus_permissions')
.where({ role, action });
const query = this.knex.select('collection').from('directus_permissions').where({ role, action });
const results = await query;
return results.map((result) => result.collection);
}
async getAllowedFields(role: string | null, action: PermissionsAction, collection?: string) {
const query = this.knex
.select('collection', 'fields')
.from('directus_permissions')
.where({ role, action });
const query = this.knex.select('collection', 'fields').from('directus_permissions').where({ role, action });
if (collection) {
query.andWhere({ collection });

View File

@@ -26,10 +26,7 @@ export class RelationsService extends ItemsService {
knex: this.knex,
schema: this.schema,
});
const results = (await service.readByQuery(query)) as
| ParsedRelation
| ParsedRelation[]
| null;
const results = (await service.readByQuery(query)) as ParsedRelation | ParsedRelation[] | null;
if (results && Array.isArray(results)) {
results.push(...(systemRelationRows as ParsedRelation[]));
@@ -40,11 +37,7 @@ export class RelationsService extends ItemsService {
return filteredResults;
}
readByKey(
keys: PrimaryKey[],
query?: Query,
action?: PermissionsAction
): Promise<null | Relation[]>;
readByKey(keys: PrimaryKey[], query?: Query, action?: PermissionsAction): Promise<null | Relation[]>;
readByKey(key: PrimaryKey, query?: Query, action?: PermissionsAction): Promise<null | Relation>;
async readByKey(
key: PrimaryKey | PrimaryKey[],
@@ -55,10 +48,7 @@ export class RelationsService extends ItemsService {
knex: this.knex,
schema: this.schema,
});
const results = (await service.readByKey(key as any, query, action)) as
| ParsedRelation
| ParsedRelation[]
| null;
const results = (await service.readByKey(key as any, query, action)) as ParsedRelation | ParsedRelation[] | null;
// No need to merge system relations here. They don't have PKs so can never be directly
// targetted
@@ -76,10 +66,7 @@ export class RelationsService extends ItemsService {
'read'
);
const allowedFields = await this.permissionsService.getAllowedFields(
this.accountability?.role || null,
'read'
);
const allowedFields = await this.permissionsService.getAllowedFields(this.accountability?.role || null, 'read');
relations = toArray(relations);
@@ -91,18 +78,13 @@ export class RelationsService extends ItemsService {
collectionsAllowed = false;
}
if (
relation.one_collection &&
allowedCollections.includes(relation.one_collection) === false
) {
if (relation.one_collection && allowedCollections.includes(relation.one_collection) === false) {
collectionsAllowed = false;
}
if (
relation.one_allowed_collections &&
relation.one_allowed_collections.every((collection) =>
allowedCollections.includes(collection)
) === false
relation.one_allowed_collections.every((collection) => allowedCollections.includes(collection)) === false
) {
collectionsAllowed = false;
}
@@ -120,8 +102,7 @@ export class RelationsService extends ItemsService {
relation.one_field &&
(!allowedFields[relation.one_collection] ||
(allowedFields[relation.one_collection].includes('*') === false &&
allowedFields[relation.one_collection].includes(relation.one_field) ===
false))
allowedFields[relation.one_collection].includes(relation.one_field) === false))
) {
fieldsAllowed = false;
}

View File

@@ -15,8 +15,7 @@ export class RevisionsService extends ItemsService {
const revision = (await super.readByKey(pk)) as Revision | null;
if (!revision) throw new ForbiddenException();
if (!revision.data)
throw new InvalidPayloadException(`Revision doesn't contain data to revert to`);
if (!revision.data) throw new InvalidPayloadException(`Revision doesn't contain data to revert to`);
const service = new ItemsService(revision.collection, {
accountability: this.accountability,

View File

@@ -24,8 +24,7 @@ export class RolesService extends ItemsService {
.andWhere({ admin_access: true })
.first();
const otherAdminRolesCount = +(otherAdminRoles?.count || 0);
if (otherAdminRolesCount === 0)
throw new UnprocessableEntityException(`You can't delete the last admin role.`);
if (otherAdminRolesCount === 0) throw new UnprocessableEntityException(`You can't delete the last admin role.`);
// Remove all permissions associated with this role
const permissionsService = new PermissionsService({

View File

@@ -40,10 +40,7 @@ export class ServerService {
if (this.accountability?.admin === true) {
const osType = os.type() === 'Darwin' ? 'macOS' : os.type();
const osVersion =
osType === 'macOS'
? `${macosRelease().name} (${macosRelease().version})`
: os.release();
const osVersion = osType === 'macOS' ? `${macosRelease().name} (${macosRelease().version})` : os.release();
info.directus = {
version,

View File

@@ -14,13 +14,7 @@ import formatTitle from '@directus/format-title';
import { cloneDeep, mergeWith } from 'lodash';
import { RelationsService } from './relations';
import env from '../env';
import {
OpenAPIObject,
PathItemObject,
OperationObject,
TagObject,
SchemaObject,
} from 'openapi3-ts';
import { OpenAPIObject, PathItemObject, OperationObject, TagObject, SchemaObject } from 'openapi3-ts';
// @ts-ignore
import { version } from '../../package.json';
@@ -110,8 +104,7 @@ class OASService implements SpecificationSubService {
openapi: '3.0.1',
info: {
title: 'Dynamic API Specification',
description:
'This is a dynamicly generated API specification for all endpoints existing on the current .',
description: 'This is a dynamicly generated API specification for all endpoints existing on the current .',
version: version,
},
servers: [
@@ -164,18 +157,13 @@ class OASService implements SpecificationSubService {
return tags.filter((tag) => tag.name !== 'Items');
}
private async generatePaths(
permissions: Permission[],
tags: OpenAPIObject['tags']
): Promise<OpenAPIObject['paths']> {
private async generatePaths(permissions: Permission[], tags: OpenAPIObject['tags']): Promise<OpenAPIObject['paths']> {
const paths: OpenAPIObject['paths'] = {};
if (!tags) return paths;
for (const tag of tags) {
const isSystem =
tag.hasOwnProperty('x-collection') === false ||
tag['x-collection'].startsWith('directus_');
const isSystem = tag.hasOwnProperty('x-collection') === false || tag['x-collection'].startsWith('directus_');
if (isSystem) {
for (const [path, pathItem] of Object.entries<PathItemObject>(openapi.paths)) {
@@ -210,23 +198,18 @@ class OASService implements SpecificationSubService {
this.accountability?.admin === true ||
!!permissions.find(
(permission) =>
permission.collection === collection &&
permission.action === this.getActionForMethod(method)
permission.collection === collection && permission.action === this.getActionForMethod(method)
);
if (hasPermission) {
if (!paths[`/items/${collection}`]) paths[`/items/${collection}`] = {};
if (!paths[`/items/${collection}/{id}`])
paths[`/items/${collection}/{id}`] = {};
if (!paths[`/items/${collection}/{id}`]) paths[`/items/${collection}/{id}`] = {};
if (listBase[method]) {
paths[`/items/${collection}`][method] = mergeWith(
cloneDeep(listBase[method]),
{
description: listBase[method].description.replace(
'item',
collection + ' item'
),
description: listBase[method].description.replace('item', collection + ' item'),
tags: [tag.name],
operationId: `${this.getActionForMethod(method)}${tag.name}`,
requestBody: ['get', 'delete'].includes(method)
@@ -281,14 +264,9 @@ class OASService implements SpecificationSubService {
paths[`/items/${collection}/{id}`][method] = mergeWith(
cloneDeep(detailBase[method]),
{
description: detailBase[method].description.replace(
'item',
collection + ' item'
),
description: detailBase[method].description.replace('item', collection + ' item'),
tags: [tag.name],
operationId: `${this.getActionForMethod(method)}Single${
tag.name
}`,
operationId: `${this.getActionForMethod(method)}Single${tag.name}`,
requestBody: ['get', 'delete'].includes(method)
? undefined
: {
@@ -355,23 +333,17 @@ class OASService implements SpecificationSubService {
const isSystem = collection.collection.startsWith('directus_');
const fieldsInCollection = fields.filter(
(field) => field.collection === collection.collection
);
const fieldsInCollection = fields.filter((field) => field.collection === collection.collection);
if (isSystem) {
const schemaComponent: SchemaObject = cloneDeep(
openapi.components!.schemas![tag.name]
);
const schemaComponent: SchemaObject = cloneDeep(openapi.components!.schemas![tag.name]);
schemaComponent.properties = {};
for (const field of fieldsInCollection) {
schemaComponent.properties[field.field] =
(cloneDeep(
(openapi.components!.schemas![tag.name] as SchemaObject).properties![
field.field
]
(openapi.components!.schemas![tag.name] as SchemaObject).properties![field.field]
) as SchemaObject) || this.generateField(field, relations, tags, fields);
}
@@ -384,12 +356,7 @@ class OASService implements SpecificationSubService {
};
for (const field of fieldsInCollection) {
schemaComponent.properties![field.field] = this.generateField(
field,
relations,
tags,
fields
);
schemaComponent.properties![field.field] = this.generateField(field, relations, tags, fields);
}
components.schemas[tag.name] = schemaComponent;
@@ -413,12 +380,7 @@ class OASService implements SpecificationSubService {
}
}
private generateField(
field: Field,
relations: Relation[],
tags: TagObject[],
fields: Field[]
): SchemaObject {
private generateField(field: Field, relations: Relation[], tags: TagObject[], fields: Field[]): SchemaObject {
let propertyObject: SchemaObject = {
nullable: field.schema?.is_nullable,
description: field.meta?.note || undefined,
@@ -426,8 +388,7 @@ class OASService implements SpecificationSubService {
const relation = relations.find(
(relation) =>
(relation.many_collection === field.collection &&
relation.many_field === field.field) ||
(relation.many_collection === field.collection && relation.many_field === field.field) ||
(relation.one_collection === field.collection && relation.one_field === field.field)
);
@@ -444,12 +405,9 @@ class OASService implements SpecificationSubService {
});
if (relationType === 'm2o') {
const relatedTag = tags.find(
(tag) => tag['x-collection'] === relation.one_collection
);
const relatedTag = tags.find((tag) => tag['x-collection'] === relation.one_collection);
const relatedPrimaryKeyField = fields.find(
(field) =>
field.collection === relation.one_collection && field.schema?.is_primary_key
(field) => field.collection === relation.one_collection && field.schema?.is_primary_key
);
if (!relatedTag || !relatedPrimaryKeyField) return propertyObject;
@@ -463,13 +421,9 @@ class OASService implements SpecificationSubService {
},
];
} else if (relationType === 'o2m') {
const relatedTag = tags.find(
(tag) => tag['x-collection'] === relation.many_collection
);
const relatedTag = tags.find((tag) => tag['x-collection'] === relation.many_collection);
const relatedPrimaryKeyField = fields.find(
(field) =>
field.collection === relation.many_collection &&
field.schema?.is_primary_key
(field) => field.collection === relation.many_collection && field.schema?.is_primary_key
);
if (!relatedTag || !relatedPrimaryKeyField) return propertyObject;
@@ -486,9 +440,7 @@ class OASService implements SpecificationSubService {
],
};
} else if (relationType === 'm2a') {
const relatedTags = tags.filter((tag) =>
relation.one_allowed_collections!.includes(tag['x-collection'])
);
const relatedTags = tags.filter((tag) => relation.one_allowed_collections!.includes(tag['x-collection']));
propertyObject.type = 'array';
propertyObject.items = {
@@ -510,15 +462,7 @@ class OASService implements SpecificationSubService {
private fieldTypes: Record<
typeof types[number],
{
type:
| 'string'
| 'number'
| 'boolean'
| 'object'
| 'array'
| 'integer'
| 'null'
| undefined;
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'integer' | 'null' | undefined;
format?: string;
items?: any;
}

View File

@@ -4,11 +4,7 @@ import jwt from 'jsonwebtoken';
import { sendInviteMail, sendPasswordResetMail } from '../mail';
import database from '../database';
import argon2 from 'argon2';
import {
InvalidPayloadException,
ForbiddenException,
UnprocessableEntityException,
} from '../exceptions';
import { InvalidPayloadException, ForbiddenException, UnprocessableEntityException } from '../exceptions';
import { Accountability, PrimaryKey, Item, AbstractServiceOptions, SchemaOverview } from '../types';
import Knex from 'knex';
import env from '../env';
@@ -50,7 +46,7 @@ export class UsersService extends ItemsService {
}
}
if (cache) {
if (cache && env.CACHE_AUTO_PURGE) {
await cache.clear();
}
@@ -104,11 +100,7 @@ export class UsersService extends ItemsService {
if (scope !== 'invite') throw new ForbiddenException();
const user = await this.knex
.select('id', 'status')
.from('directus_users')
.where({ email })
.first();
const user = await this.knex.select('id', 'status').from('directus_users').where({ email }).first();
if (!user || user.status !== 'invited') {
throw new InvalidPayloadException(`Email address ${email} hasn't been invited.`);
@@ -116,11 +108,9 @@ export class UsersService extends ItemsService {
const passwordHashed = await argon2.hash(password);
await this.knex('directus_users')
.update({ password: passwordHashed, status: 'active' })
.where({ id: user.id });
await this.knex('directus_users').update({ password: passwordHashed, status: 'active' }).where({ id: user.id });
if (cache) {
if (cache && env.CACHE_AUTO_PURGE) {
await cache.clear();
}
}
@@ -146,11 +136,7 @@ export class UsersService extends ItemsService {
if (scope !== 'password-reset') throw new ForbiddenException();
const user = await this.knex
.select('id', 'status')
.from('directus_users')
.where({ email })
.first();
const user = await this.knex.select('id', 'status').from('directus_users').where({ email }).first();
if (!user || user.status !== 'active') {
throw new ForbiddenException();
@@ -158,21 +144,15 @@ export class UsersService extends ItemsService {
const passwordHashed = await argon2.hash(password);
await this.knex('directus_users')
.update({ password: passwordHashed, status: 'active' })
.where({ id: user.id });
await this.knex('directus_users').update({ password: passwordHashed, status: 'active' }).where({ id: user.id });
if (cache) {
if (cache && env.CACHE_AUTO_PURGE) {
await cache.clear();
}
}
async enableTFA(pk: string) {
const user = await this.knex
.select('tfa_secret')
.from('directus_users')
.where({ id: pk })
.first();
const user = await this.knex.select('tfa_secret').from('directus_users').where({ id: pk }).first();
if (user?.tfa_secret !== null) {
throw new InvalidPayloadException('TFA Secret is already set for this user');

View File

@@ -17,18 +17,13 @@ export class UtilsService {
async sort(collection: string, { item, to }: { item: PrimaryKey; to: PrimaryKey }) {
const sortFieldResponse =
(await this.knex
.select('sort_field')
.from('directus_collections')
.where({ collection })
.first()) || systemCollectionRows;
(await this.knex.select('sort_field').from('directus_collections').where({ collection }).first()) ||
systemCollectionRows;
const sortField = sortFieldResponse?.sort_field;
if (!sortField) {
throw new InvalidPayloadException(
`Collection "${collection}" doesn't have a sort field.`
);
throw new InvalidPayloadException(`Collection "${collection}" doesn't have a sort field.`);
}
if (this.accountability?.admin !== true) {
@@ -56,11 +51,7 @@ export class UtilsService {
const primaryKeyField = this.schema[collection].primary;
// Make sure all rows have a sort value
const countResponse = await this.knex
.count('* as count')
.from(collection)
.whereNull(sortField)
.first();
const countResponse = await this.knex.count('* as count').from(collection).whereNull(sortField).first();
if (countResponse?.count && +countResponse.count !== 0) {
const lastSortValueResponse = await this.knex.max(sortField).from(collection).first();

View File

@@ -1,9 +1,4 @@
import {
StorageManager,
LocalFileSystemStorage,
StorageManagerConfig,
Storage,
} from '@slynova/flydrive';
import { StorageManager, LocalFileSystemStorage, StorageManagerConfig, Storage } from '@slynova/flydrive';
import env from './env';
import { validateEnv } from './utils/validate-env';
import { getConfigFromEnv } from './utils/get-config-from-env';

View File

@@ -3,6 +3,7 @@ import { Query, Filter, Relation, SchemaOverview } from '../types';
import Knex from 'knex';
import { clone, isPlainObject } from 'lodash';
import { systemRelationRows } from '../database/system-data/relations';
import { nanoid } from 'nanoid';
export default async function applyQuery(
knex: Knex,
@@ -42,9 +43,7 @@ export default async function applyQuery(
columns
/** @todo Check if this scales between SQL vendors */
.filter(
(column) =>
column.data_type.toLowerCase().includes('text') ||
column.data_type.toLowerCase().includes('char')
(column) => column.data_type.toLowerCase().includes('text') || column.data_type.toLowerCase().includes('char')
)
.forEach((column) => {
this.orWhereRaw(`LOWER(??) LIKE ?`, [column.column_name, `%${query.search!}%`]);
@@ -53,195 +52,17 @@ export default async function applyQuery(
}
}
export async function applyFilter(
knex: Knex,
rootQuery: QueryBuilder,
rootFilter: Filter,
collection: string
) {
const relations: Relation[] = [
...(await knex.select('*').from('directus_relations')),
...systemRelationRows,
];
export async function applyFilter(knex: Knex, rootQuery: QueryBuilder, rootFilter: Filter, collection: string) {
const relations: Relation[] = [...(await knex.select('*').from('directus_relations')), ...systemRelationRows];
const aliasMap: Record<string, string> = {};
addWhereClauses(rootQuery, rootFilter, collection);
addJoins(rootQuery, rootFilter, collection);
addWhereClauses(rootQuery, rootFilter, collection);
function addWhereClauses(dbQuery: QueryBuilder, filter: Filter, collection: string) {
for (const [key, value] of Object.entries(filter)) {
if (key === '_or') {
/** @NOTE these callback functions aren't called until Knex runs the query */
dbQuery.orWhere((subQuery) => {
value.forEach((subFilter: Record<string, any>) => {
addWhereClauses(subQuery, subFilter, collection);
});
});
continue;
}
if (key === '_and') {
/** @NOTE these callback functions aren't called until Knex runs the query */
dbQuery.andWhere((subQuery) => {
value.forEach((subFilter: Record<string, any>) => {
addWhereClauses(subQuery, subFilter, collection);
});
});
continue;
}
const filterPath = getFilterPath(key, value);
const { operator: filterOperator, value: filterValue } = getOperation(key, value);
if (filterPath.length > 1) {
const columnName = getWhereColumn(filterPath, collection);
applyFilterToQuery(columnName, filterOperator, filterValue);
} else {
applyFilterToQuery(`${collection}.${filterPath[0]}`, filterOperator, filterValue);
}
}
function applyFilterToQuery(key: string, operator: string, compareValue: any) {
if (operator === '_eq') {
dbQuery.where({ [key]: compareValue });
}
if (operator === '_neq') {
dbQuery.whereNot({ [key]: compareValue });
}
if (operator === '_contains') {
dbQuery.where(key, 'like', `%${compareValue}%`);
}
if (operator === '_ncontains') {
dbQuery.whereNot(key, 'like', `%${compareValue}%`);
}
if (operator === '_gt') {
dbQuery.where(key, '>', compareValue);
}
if (operator === '_gte') {
dbQuery.where(key, '>=', compareValue);
}
if (operator === '_lt') {
dbQuery.where(key, '<', compareValue);
}
if (operator === '_lte') {
dbQuery.where(key, '<=', compareValue);
}
if (operator === '_in') {
let value = compareValue;
if (typeof value === 'string') value = value.split(',');
dbQuery.whereIn(key, value as string[]);
}
if (operator === '_nin') {
let value = compareValue;
if (typeof value === 'string') value = value.split(',');
dbQuery.whereNotIn(key, value as string[]);
}
if (operator === '_null') {
dbQuery.whereNull(key);
}
if (operator === '_nnull') {
dbQuery.whereNotNull(key);
}
if (operator === '_empty') {
dbQuery.andWhere((query) => {
query.whereNull(key);
query.orWhere(key, '=', '');
});
}
if (operator === '_nempty') {
dbQuery.andWhere((query) => {
query.whereNotNull(key);
query.orWhere(key, '!=', '');
});
}
if (operator === '_between') {
let value = compareValue;
if (typeof value === 'string') value = value.split(',');
dbQuery.whereBetween(key, value);
}
if (operator === '_nbetween') {
let value = compareValue;
if (typeof value === 'string') value = value.split(',');
dbQuery.whereNotBetween(key, value);
}
}
function getWhereColumn(path: string[], collection: string) {
path = clone(path);
let columnName = '';
followRelation(path);
return columnName;
function followRelation(pathParts: string[], parentCollection: string = collection) {
const relation = relations.find((relation) => {
return (
(relation.many_collection === parentCollection &&
relation.many_field === pathParts[0]) ||
(relation.one_collection === parentCollection &&
relation.one_field === pathParts[0])
);
});
if (!relation) return;
const isM2O =
relation.many_collection === parentCollection &&
relation.many_field === pathParts[0];
pathParts.shift();
const parent = isM2O ? relation.one_collection! : relation.many_collection;
if (pathParts.length === 1) {
columnName = `${parent}.${pathParts[0]}`;
}
if (pathParts.length) {
followRelation(pathParts, parent);
}
}
}
}
/**
* @NOTE Yes this is very similar in structure and functionality as the other loop. However,
* due to the order of execution that Knex has in the nested andWhere / orWhere structures,
* joins that are added in there aren't added in time
*/
function addJoins(dbQuery: QueryBuilder, filter: Filter, collection: string) {
for (const [key, value] of Object.entries(filter)) {
if (key === '_or') {
value.forEach((subFilter: Record<string, any>) => {
addJoins(dbQuery, subFilter, collection);
});
continue;
}
if (key === '_and') {
if (key === '_or' || key === '_and') {
value.forEach((subFilter: Record<string, any>) => {
addJoins(dbQuery, subFilter, collection);
});
@@ -261,33 +82,32 @@ export async function applyFilter(
followRelation(path);
function followRelation(pathParts: string[], parentCollection: string = collection) {
function followRelation(pathParts: string[], parentCollection: string = collection, parentAlias?: string) {
const relation = relations.find((relation) => {
return (
(relation.many_collection === parentCollection &&
relation.many_field === pathParts[0]) ||
(relation.one_collection === parentCollection &&
relation.one_field === pathParts[0])
(relation.many_collection === parentCollection && relation.many_field === pathParts[0]) ||
(relation.one_collection === parentCollection && relation.one_field === pathParts[0])
);
});
if (!relation) return;
const isM2O =
relation.many_collection === parentCollection &&
relation.many_field === pathParts[0];
const isM2O = relation.many_collection === parentCollection && relation.many_field === pathParts[0];
const alias = nanoid(8);
aliasMap[pathParts.join('+')] = alias;
if (isM2O) {
dbQuery.leftJoin(
relation.one_collection!,
`${parentCollection}.${relation.many_field}`,
`${relation.one_collection}.${relation.one_primary}`
{ [alias]: relation.one_collection! },
`${parentAlias || parentCollection}.${relation.many_field}`,
`${alias}.${relation.one_primary}`
);
} else {
dbQuery.leftJoin(
relation.many_collection,
`${parentCollection}.${relation.one_primary}`,
`${relation.many_collection}.${relation.many_field}`
{ [alias]: relation.many_collection },
`${parentAlias || parentCollection}.${relation.one_primary}`,
`${alias}.${relation.many_field}`
);
}
@@ -295,6 +115,151 @@ export async function applyFilter(
const parent = isM2O ? relation.one_collection! : relation.many_collection;
if (pathParts.length) {
followRelation(pathParts, parent, alias);
}
}
}
}
function addWhereClauses(dbQuery: QueryBuilder, filter: Filter, collection: string, logical: 'and' | 'or' = 'and') {
for (const [key, value] of Object.entries(filter)) {
if (key === '_or' || key === '_and') {
/** @NOTE this callback function isn't called until Knex runs the query */
dbQuery.where((subQuery) => {
value.forEach((subFilter: Record<string, any>) => {
addWhereClauses(subQuery, subFilter, collection, key === '_and' ? 'and' : 'or');
});
});
continue;
}
const filterPath = getFilterPath(key, value);
const { operator: filterOperator, value: filterValue } = getOperation(key, value);
if (filterPath.length > 1) {
const columnName = getWhereColumn(filterPath, collection);
applyFilterToQuery(columnName, filterOperator, filterValue, logical);
} else {
applyFilterToQuery(`${collection}.${filterPath[0]}`, filterOperator, filterValue, logical);
}
}
function applyFilterToQuery(key: string, operator: string, compareValue: any, logical: 'and' | 'or' = 'and') {
if (operator === '_eq') {
dbQuery[logical].where({ [key]: compareValue });
}
if (operator === '_neq') {
dbQuery[logical].whereNot({ [key]: compareValue });
}
if (operator === '_contains') {
dbQuery[logical].where(key, 'like', `%${compareValue}%`);
}
if (operator === '_ncontains') {
dbQuery[logical].whereNot(key, 'like', `%${compareValue}%`);
}
if (operator === '_gt') {
dbQuery[logical].where(key, '>', compareValue);
}
if (operator === '_gte') {
dbQuery[logical].where(key, '>=', compareValue);
}
if (operator === '_lt') {
dbQuery[logical].where(key, '<', compareValue);
}
if (operator === '_lte') {
dbQuery[logical].where(key, '<=', compareValue);
}
if (operator === '_in') {
let value = compareValue;
if (typeof value === 'string') value = value.split(',');
dbQuery[logical].whereIn(key, value as string[]);
}
if (operator === '_nin') {
let value = compareValue;
if (typeof value === 'string') value = value.split(',');
dbQuery[logical].whereNotIn(key, value as string[]);
}
if (operator === '_null') {
dbQuery[logical].whereNull(key);
}
if (operator === '_nnull') {
dbQuery[logical].whereNotNull(key);
}
if (operator === '_empty') {
dbQuery[logical].andWhere((query) => {
query.whereNull(key);
query.orWhere(key, '=', '');
});
}
if (operator === '_nempty') {
dbQuery[logical].andWhere((query) => {
query.whereNotNull(key);
query.orWhere(key, '!=', '');
});
}
if (operator === '_between') {
let value = compareValue;
if (typeof value === 'string') value = value.split(',');
dbQuery[logical].whereBetween(key, value);
}
if (operator === '_nbetween') {
let value = compareValue;
if (typeof value === 'string') value = value.split(',');
dbQuery[logical].whereNotBetween(key, value);
}
}
function getWhereColumn(path: string[], collection: string) {
path = clone(path);
let columnName = '';
followRelation(path);
return columnName;
function followRelation(pathParts: string[], parentCollection: string = collection) {
const relation = relations.find((relation) => {
return (
(relation.many_collection === parentCollection && relation.many_field === pathParts[0]) ||
(relation.one_collection === parentCollection && relation.one_field === pathParts[0])
);
});
if (!relation) return;
const isM2O = relation.many_collection === parentCollection && relation.many_field === pathParts[0];
const alias = aliasMap[pathParts.join('+')];
pathParts.shift();
const parent = isM2O ? relation.one_collection! : relation.many_collection;
if (pathParts.length === 1) {
columnName = `${alias || parent}.${pathParts[0]}`;
}
if (pathParts.length) {
followRelation(pathParts, parent);
}

View File

@@ -5,9 +5,7 @@ export function deepMap(
): any {
if (Array.isArray(object)) {
return object.map(function (val, key) {
return typeof val === 'object'
? deepMap(val, iterator, context)
: iterator.call(context, val, key);
return typeof val === 'object' ? deepMap(val, iterator, context) : iterator.call(context, val, key);
});
} else if (typeof object === 'object') {
const res: Record<string, any> = {};

View File

@@ -64,7 +64,7 @@ export default function generateJoi(filter: Filter | null): AnySchema {
if (!schema) schema = {};
const operator = Object.keys(value)[0];
const val = Object.keys(value)[1];
const val = Object.values(value)[0];
schema[key] = getJoi(operator, val);
}

View File

@@ -25,6 +25,10 @@ type GetASTOptions = {
knex?: Knex;
};
type anyNested = {
[collectionScope: string]: string[];
};
export default async function getASTFromQuery(
collection: string,
query: Query,
@@ -41,10 +45,7 @@ export default async function getASTFromQuery(
* we might not need al this info at all times, but it's easier to fetch it all once, than trying to fetch it for every
* requested field. @todo look into utilizing graphql/dataloader for this purpose
*/
const relations = [
...(await knex.select<Relation[]>('*').from('directus_relations')),
...systemRelationRows,
];
const relations = [...(await knex.select<Relation[]>('*').from('directus_relations')), ...systemRelationRows];
const permissions =
accountability && accountability.admin !== true
@@ -72,39 +73,58 @@ export default async function getASTFromQuery(
return ast;
async function parseFields(
parentCollection: string,
fields: string[],
deep?: Record<string, Query>
) {
async function parseFields(parentCollection: string, fields: string[] | null, deep?: Record<string, Query>) {
if (!fields) return [];
fields = await convertWildcards(parentCollection, fields);
if (!fields) return [];
const children: (NestedCollectionNode | FieldNode)[] = [];
const relationalStructure: Record<string, string[]> = {};
const relationalStructure: Record<string, string[] | anyNested> = {};
for (const field of fields) {
const isRelational =
field.includes('.') ||
// We'll always treat top level o2m fields as a related item. This is an alias field, otherwise it won't return
// anything
!!relations.find(
(relation) =>
relation.one_collection === parentCollection && relation.one_field === field
);
!!relations.find((relation) => relation.one_collection === parentCollection && relation.one_field === field);
if (isRelational) {
// field is relational
const parts = field.split('.');
if (relationalStructure.hasOwnProperty(parts[0]) === false) {
relationalStructure[parts[0]] = [];
let fieldKey = parts[0];
let collectionScope: string | null = null;
// m2a related collection scoped field selector `fields=sections.section_id:headings.title`
if (fieldKey.includes(':')) {
const [key, scope] = fieldKey.split(':');
fieldKey = key;
collectionScope = scope;
}
if (relationalStructure.hasOwnProperty(fieldKey) === false) {
if (collectionScope) {
relationalStructure[fieldKey] = { [collectionScope]: [] };
} else {
relationalStructure[fieldKey] = [];
}
}
if (parts.length > 1) {
relationalStructure[parts[0]].push(parts.slice(1).join('.'));
const childKey = parts.slice(1).join('.');
if (collectionScope) {
if (collectionScope in relationalStructure[fieldKey] === false) {
(relationalStructure[fieldKey] as anyNested)[collectionScope] = [];
}
(relationalStructure[fieldKey] as anyNested)[collectionScope].push(childKey);
} else {
(relationalStructure[fieldKey] as string[]).push(childKey);
}
}
} else {
children.push({ type: 'field', name: field });
@@ -128,14 +148,10 @@ export default async function getASTFromQuery(
let child: NestedCollectionNode | null = null;
if (relationType === 'm2a') {
const allowedCollections = relation
.one_allowed_collections!.split(',')
.filter((collection) => {
if (!permissions) return true;
return permissions.some(
(permission) => permission.collection === collection
);
});
const allowedCollections = relation.one_allowed_collections!.split(',').filter((collection) => {
if (!permissions) return true;
return permissions.some((permission) => permission.collection === collection);
});
child = {
type: 'm2a',
@@ -151,18 +167,13 @@ export default async function getASTFromQuery(
for (const relatedCollection of allowedCollections) {
child.children[relatedCollection] = await parseFields(
relatedCollection,
nestedFields
Array.isArray(nestedFields) ? nestedFields : (nestedFields as anyNested)[relatedCollection] || ['*']
);
child.query[relatedCollection] = {};
child.relatedKey[relatedCollection] = schema[relatedCollection].primary;
}
} else if (relatedCollection) {
if (
permissions &&
permissions.some(
(permission) => permission.collection === relatedCollection
) === false
) {
if (permissions && permissions.some((permission) => permission.collection === relatedCollection) === false) {
continue;
}
@@ -174,7 +185,7 @@ export default async function getASTFromQuery(
relatedKey: schema[relatedCollection].primary,
relation: relation,
query: deep?.[relationalField] || {},
children: await parseFields(relatedCollection, nestedFields),
children: await parseFields(relatedCollection, nestedFields as string[]),
};
}
@@ -192,9 +203,7 @@ export default async function getASTFromQuery(
const fieldsInCollection = await getFieldsInCollection(parentCollection);
const allowedFields = permissions
? permissions
.find((permission) => parentCollection === permission.collection)
?.fields?.split(',')
? permissions.find((permission) => parentCollection === permission.collection)?.fields?.split(',')
: fieldsInCollection;
if (!allowedFields || allowedFields.length === 0) return [];
@@ -222,8 +231,7 @@ export default async function getASTFromQuery(
? relations
.filter(
(relation) =>
relation.many_collection === parentCollection ||
relation.one_collection === parentCollection
relation.many_collection === parentCollection || relation.one_collection === parentCollection
)
.map((relation) => {
const isMany = relation.many_collection === parentCollection;
@@ -231,9 +239,7 @@ export default async function getASTFromQuery(
})
: allowedFields.filter((fieldKey) => !!getRelation(parentCollection, fieldKey));
const nonRelationalFields = fieldsInCollection.filter(
(fieldKey) => relationalFields.includes(fieldKey) === false
);
const nonRelationalFields = allowedFields.filter((fieldKey) => relationalFields.includes(fieldKey) === false);
fields.splice(
index,
@@ -281,12 +287,8 @@ export default async function getASTFromQuery(
async function getFieldsInCollection(collection: string) {
const columns = Object.keys(schema[collection].columns);
const fields = [
...(await knex.select('field').from('directus_fields').where({ collection })).map(
(field) => field.field
),
...systemFieldRows
.filter((fieldMeta) => fieldMeta.collection === collection)
.map((fieldMeta) => fieldMeta.field),
...(await knex.select('field').from('directus_fields').where({ collection })).map((field) => field.field),
...systemFieldRows.filter((fieldMeta) => fieldMeta.collection === collection).map((fieldMeta) => fieldMeta.field),
];
const fieldsInCollection = [

View File

@@ -3,8 +3,6 @@ import url from 'url';
export function getCacheKey(req: Request) {
const path = url.parse(req.originalUrl).pathname;
const key = `${req.accountability?.user || 'null'}-${path}-${JSON.stringify(
req.sanitizedQuery
)}`;
const key = `${req.accountability?.user || 'null'}-${path}-${JSON.stringify(req.sanitizedQuery)}`;
return key;
}

View File

@@ -1,13 +1,33 @@
import camelcase from 'camelcase';
import env from '../env';
import { set } from 'lodash';
export function getConfigFromEnv(prefix: string, omitPrefix?: string) {
export function getConfigFromEnv(prefix: string, omitPrefix?: string | string[]) {
const config: any = {};
for (const [key, value] of Object.entries(env)) {
if (key.toLowerCase().startsWith(prefix.toLowerCase()) === false) continue;
if (omitPrefix && key.toLowerCase().startsWith(omitPrefix.toLowerCase()) === true) continue;
config[camelcase(key.slice(prefix.length))] = value;
if (omitPrefix) {
let matches = false;
if (Array.isArray(omitPrefix)) {
matches = omitPrefix.some((prefix) => key.toLowerCase().startsWith(prefix.toLowerCase()));
} else {
matches = key.toLowerCase().startsWith(omitPrefix.toLowerCase());
}
if (matches) continue;
}
if (key.includes('__')) {
const path = key
.split('__')
.map((key, index) => (index === 0 ? camelcase(camelcase(key.slice(prefix.length))) : camelcase(key)));
set(config, path.join('.'), value);
} else {
config[camelcase(key.slice(prefix.length))] = value;
}
}
return config;

View File

@@ -2,19 +2,19 @@ import getLocalType from './get-local-type';
import { Column } from '@directus/schema/dist/types/column';
import { SchemaOverview } from '../types';
export default function getDefaultValue(
column: SchemaOverview[string]['columns'][string] | Column
) {
export default function getDefaultValue(column: SchemaOverview[string]['columns'][string] | Column) {
const type = getLocalType(column);
let defaultValue = column.default_value || null;
if (defaultValue === null) return null;
if (defaultValue === 'null') return null;
if (defaultValue === 'NULL') return null;
// Check if the default is wrapped in an extra pair of quotes, this happens in SQLite
if (
typeof defaultValue === 'string' &&
defaultValue.startsWith(`'`) &&
defaultValue.endsWith(`'`)
((defaultValue.startsWith(`'`) && defaultValue.endsWith(`'`)) ||
(defaultValue.startsWith(`"`) && defaultValue.endsWith(`"`)))
) {
defaultValue = defaultValue.slice(1, -1);
}

View File

@@ -16,13 +16,15 @@ const profileMap: Record<string, string> = {};
* This is used in the SSO flow to extract the users
*/
export default function getEmailFromProfile(provider: string, profile: Record<string, any>) {
const path =
profileMap[provider] || env[`OAUTH_${provider.toUpperCase()}_PROFILE_EMAIL`] || 'email';
const path = profileMap[provider] || env[`OAUTH_${provider.toUpperCase()}_PROFILE_EMAIL`] || 'email';
const email = get(profile, path);
if (!email) {
throw new ServiceUnavailableException("Couldn't extract email address from SSO provider response", { service: 'oauth', provider });
throw new ServiceUnavailableException("Couldn't extract email address from SSO provider response", {
service: 'oauth',
provider,
});
}
return email;

View File

@@ -87,11 +87,7 @@ export default function getLocalType(
const type = localTypeMap[column.data_type.toLowerCase().split('(')[0]];
/** Handle Postgres numeric decimals */
if (
column.data_type === 'numeric' &&
column.numeric_precision !== null &&
column.numeric_scale !== null
) {
if (column.data_type === 'numeric' && column.numeric_precision !== null && column.numeric_scale !== null) {
return 'decimal';
}

View File

@@ -20,10 +20,7 @@ export default function parseIPTC(buffer: Buffer) {
let lastIptcEntryPos = buffer.indexOf(IPTC_ENTRY_MARKER);
while (lastIptcEntryPos !== -1) {
lastIptcEntryPos = buffer.indexOf(
IPTC_ENTRY_MARKER,
lastIptcEntryPos + IPTC_ENTRY_MARKER.byteLength
);
lastIptcEntryPos = buffer.indexOf(IPTC_ENTRY_MARKER, lastIptcEntryPos + IPTC_ENTRY_MARKER.byteLength);
let iptcBlockTypePos = lastIptcEntryPos + IPTC_ENTRY_MARKER.byteLength;
let iptcBlockSizePos = iptcBlockTypePos + 1;

View File

@@ -3,10 +3,7 @@ import logger from '../logger';
import { parseFilter } from '../utils/parse-filter';
import { flatten } from 'lodash';
export function sanitizeQuery(
rawQuery: Record<string, any>,
accountability: Accountability | null
) {
export function sanitizeQuery(rawQuery: Record<string, any>, accountability: Accountability | null) {
const query: Query = {};
if (rawQuery.limit !== undefined) {
@@ -75,6 +72,8 @@ function sanitizeFields(rawFields: any) {
// Case where array item includes CSV (fe fields[]=id,name):
fields = flatten(fields.map((field) => (field.includes(',') ? field.split(',') : field)));
fields = fields.map((field) => field.trim());
return fields;
}

View File

@@ -79,13 +79,8 @@ function validateFilter(filter: Query['filter']) {
}
function validateFilterPrimitive(value: any, key: string) {
if (
(typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') ===
false
) {
throw new InvalidQueryException(
`The filter value for "${key}" has to be a string or a number`
);
if ((typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') === false) {
throw new InvalidQueryException(`The filter value for "${key}" has to be a string or a number`);
}
if (typeof value === 'number' && Number.isNaN(value)) {

View File

@@ -10,10 +10,7 @@ let registered: { event: string; handler: ListenerFn }[] = [];
export async function register() {
unregister();
const webhooks = await database
.select<Webhook[]>('*')
.from('directus_webhooks')
.where({ status: 'active' });
const webhooks = await database.select<Webhook[]>('*').from('directus_webhooks').where({ status: 'active' });
for (const webhook of webhooks) {
if (webhook.actions === '*') {
@@ -43,11 +40,7 @@ export function unregister() {
function createHandler(webhook: Webhook): ListenerFn {
return async (data) => {
const collectionAllowList = webhook.collections.split(',');
if (
collectionAllowList.includes('*') === false &&
collectionAllowList.includes(data.collection) === false
)
return;
if (collectionAllowList.includes('*') === false && collectionAllowList.includes(data.collection) === false) return;
try {
await axios({

View File

@@ -14,6 +14,7 @@
"declaration": true
},
"exclude": [
"node_modules"
"node_modules",
"dist"
]
}

View File

@@ -1,7 +0,0 @@
{
"defaultSeverity": "error",
"extends": ["tslint:recommended"],
"jsRules": {},
"rules": {},
"rulesDirectory": []
}

View File

@@ -5,7 +5,6 @@ end_of_line = lf
insert_final_newline = true
charset = utf-8
indent_style = tab
indent_size = 4
trim_trailing_whitespace = true
[{package.json,*.yml,*.yaml}]

3
app/.eslintignore Normal file
View File

@@ -0,0 +1,3 @@
node_modules
dist
.eslintrc.js

View File

@@ -1,35 +1,11 @@
const parentConfig = require('../.eslintrc.js');
module.exports = {
root: true,
env: {
node: true,
},
extends: [
'plugin:vue/essential',
'@vue/typescript/recommended',
'@vue/prettier',
'@vue/prettier/@typescript-eslint',
],
...parentConfig,
extends: ['plugin:vue/essential', '@vue/typescript/recommended', '@vue/prettier', '@vue/prettier/@typescript-eslint'],
rules: {
'no-console': process.env.NODE_ENV === 'production' ? 'error' : 'off',
'no-debugger': process.env.NODE_ENV === 'production' ? 'error' : 'off',
'prettier/prettier': ['error', { singleQuote: true }],
'@typescript-eslint/camelcase': 0,
'@typescript-eslint/no-use-before-define': 0,
'@typescript-eslint/ban-ts-ignore': 0,
'@typescript-eslint/no-explicit-any': 0,
...parentConfig.rules,
'vue/valid-v-slot': 0,
'comma-dangle': [
'error',
{
arrays: 'always-multiline',
exports: 'always-multiline',
functions: 'never',
imports: 'always-multiline',
objects: 'always-multiline',
},
],
},
parserOptions: {
parser: '@typescript-eslint/parser',
},
};

View File

@@ -1,6 +0,0 @@
{
"htmlWhitespaceSensitivity": "ignore",
"printWidth": 120,
"singleQuote": true,
"useTabs": true
}

5
app/.prettierrc.js Normal file
View File

@@ -0,0 +1,5 @@
const parentConfig = require('../.prettierrc.js');
module.exports = {
...parentConfig,
};

View File

@@ -1,5 +0,0 @@
files:
- source: /src/lang/en-US/*.json
ignore:
- /src/lang/en-US/date-format.json
translation: /src/lang/%locale%/%original_file_name%

View File

@@ -1,6 +1,6 @@
{
"name": "@directus/app",
"version": "9.0.0-rc.14",
"version": "9.0.0-rc.23",
"private": false,
"description": "Directus is an Open-Source Headless CMS & API for Managing Custom Databases",
"author": "Rijk van Zanten <rijk@rngr.org>",
@@ -12,7 +12,7 @@
],
"repository": {
"type": "git",
"url": "git+https://github.com/directus/next.git"
"url": "git+https://github.com/directus/directus.git"
},
"publishConfig": {
"access": "public"
@@ -20,17 +20,19 @@
"scripts": {
"dev": "vue-cli-service serve",
"build": "vue-cli-service build",
"test": "vue-cli-service test:unit",
"lint": "vue-cli-service lint",
"lint:styles": "stylelint \"**/*.{vue,scss}\"",
"fix": "prettier --write \"src/**/*.{js,vue,ts}\"",
"fix:styles": "stylelint --fix \"**/*.{vue,scss}\"",
"storybook": "start-storybook -p 6006",
"build-storybook": "build-storybook",
"prepublishOnly": "npm run build"
"prepublishOnly": "npm run build",
"prettier": "prettier --write \"src/**/*.ts\""
},
"dependencies": {},
"gitHead": "4476da28dbbc2824e680137aa28b2b91b5afabec",
"dependencies": {
"@directus/format-title": "file:../packages/format-title"
},
"devDependencies": {
"@vue/cli-plugin-babel": "^4.5.8",
"@vue/cli-plugin-eslint": "^4.5.8",
@@ -40,6 +42,8 @@
"@vue/cli-service": "^4.5.8",
"@vue/eslint-config-prettier": "^6.0.0",
"@vue/eslint-config-typescript": "^7.0.0",
"@vue/test-utils": "^1.1.1"
"@vue/test-utils": "^1.1.1",
"prettier": "^2.2.1",
"vue-cli-plugin-yaml": "^1.0.2"
}
}

View File

@@ -6,6 +6,11 @@ import getRootPath from '@/utils/get-root-path';
const api = axios.create({
baseURL: getRootPath(),
withCredentials: true,
headers: {
'Cache-Control': 'no-cache',
Pragma: 'no-cache',
Expires: '0',
},
});
interface RequestConfig extends AxiosRequestConfig {

View File

@@ -40,9 +40,7 @@ export default function (expandedParentClass = '', xAxis = false) {
void el.offsetHeight; // force reflow
el.style.transition =
initialStyle.transition !== ''
? initialStyle.transition
: `${sizeProperty} var(--medium) var(--transition)`;
initialStyle.transition !== '' ? initialStyle.transition : `${sizeProperty} var(--medium) var(--transition)`;
if (expandedParentClass && el._parent) {
el._parent.classList.add(expandedParentClass);

View File

@@ -8,7 +8,7 @@
</v-avatar>
<v-avatar>
<v-icon name="person">
<v-icon name="person" />
</v-avatar>
```

Some files were not shown because too many files have changed in this diff Show More