mirror of
https://github.com/directus/directus.git
synced 2026-02-15 11:15:03 -05:00
Merge branch 'main' into fix-356
This commit is contained in:
@@ -5,7 +5,6 @@ end_of_line = lf
|
||||
insert_final_newline = true
|
||||
charset = utf-8
|
||||
indent_style = tab
|
||||
indent_size = 4
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[{package.json,*.yml,*.yaml}]
|
||||
@@ -13,9 +12,7 @@ indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[Dockerfile]
|
||||
indent_size = 2
|
||||
indent_style = tab
|
||||
|
||||
[Makefile]
|
||||
indent_size = 2
|
||||
indent_style = tab
|
||||
|
||||
31
.eslintrc.js
Normal file
31
.eslintrc.js
Normal file
@@ -0,0 +1,31 @@
|
||||
module.exports = {
|
||||
root: true,
|
||||
env: {
|
||||
node: true,
|
||||
},
|
||||
extends: ['plugin:@typescript-eslint/recommended', 'plugin:prettier/recommended'],
|
||||
plugins: ['@typescript-eslint', 'prettier'],
|
||||
rules: {
|
||||
'no-console': process.env.NODE_ENV === 'production' ? 'error' : 'off',
|
||||
'no-debugger': process.env.NODE_ENV === 'production' ? 'error' : 'off',
|
||||
'@typescript-eslint/camelcase': 0,
|
||||
'@typescript-eslint/no-use-before-define': 0,
|
||||
'@typescript-eslint/ban-ts-ignore': 0,
|
||||
'@typescript-eslint/no-explicit-any': 0,
|
||||
'@typescript-eslint/no-var-requires': 0,
|
||||
'prettier/prettier': ['error', { singleQuote: true }],
|
||||
'comma-dangle': [
|
||||
'error',
|
||||
{
|
||||
arrays: 'always-multiline',
|
||||
exports: 'always-multiline',
|
||||
functions: 'never',
|
||||
imports: 'always-multiline',
|
||||
objects: 'always-multiline',
|
||||
},
|
||||
],
|
||||
},
|
||||
parserOptions: {
|
||||
parser: '@typescript-eslint/parser',
|
||||
},
|
||||
};
|
||||
8
.github/FUNDING.yml
vendored
Normal file
8
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: [directus, benhaynes, rijkvanzanten]
|
||||
patreon: directus # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
custom: # Replace with a single custom sponsorship URL
|
||||
30
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
30
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
---
|
||||
|
||||
<!--
|
||||
|
||||
Hi, thank you for taking the time to create an issue.
|
||||
|
||||
Can you please provide:
|
||||
|
||||
1) The issue and what you expected to happen
|
||||
The _ _ does _ _ when _ _ while it should _ _
|
||||
|
||||
2) Exact steps to reproduce this issue
|
||||
Click this, tap that, see error _ _
|
||||
|
||||
3) Your environment:
|
||||
Which DBMS are you using (MySQL 8, Postgres 12, ...).
|
||||
Which deployment are you using (npx, Docker, ...).
|
||||
What browser are you using (Chrome 87, Safari 14, ...).
|
||||
|
||||
4) Any other relevant information we might need to reproduce this issue
|
||||
A SQL dump of the setup.
|
||||
What third party services you rely on (S3, managed database, ...).
|
||||
|
||||
-->
|
||||
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Feature Request
|
||||
url: https://github.com/directus/directus/discussions/new
|
||||
about: Share your ideas on how to make Directus better.
|
||||
- name: Directus Community Support
|
||||
url: https://directus.chat/
|
||||
about: Please ask and answer questions here.
|
||||
2
.github/actions/Makefile
vendored
2
.github/actions/Makefile
vendored
@@ -5,7 +5,7 @@ tag=$(version)
|
||||
cmd=
|
||||
user=directus
|
||||
registry=ghcr.io
|
||||
repository=directus/next
|
||||
repository=directus/directus
|
||||
|
||||
.PHONY: build
|
||||
|
||||
|
||||
33
.github/actions/build-images/rootfs/directus/images/main/examples/docker-compose.yml
vendored
Normal file
33
.github/actions/build-images/rootfs/directus/images/main/examples/docker-compose.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
version: "3"
|
||||
services:
|
||||
database:
|
||||
image: postgres:12
|
||||
networks:
|
||||
- "directus"
|
||||
environment:
|
||||
POSTGRES_DB: "directus"
|
||||
POSTGRES_USER: "directus"
|
||||
POSTGRES_PASSWORD: "directus"
|
||||
|
||||
directus:
|
||||
build:
|
||||
context: "../"
|
||||
args:
|
||||
VERSION: "v9.0.0-rc.5"
|
||||
REPOSITORY: "directus/directus"
|
||||
ports:
|
||||
- 8055:8055
|
||||
networks:
|
||||
- "directus"
|
||||
environment:
|
||||
KEY: mykey
|
||||
SECRET: mysecret
|
||||
DB_CLIENT: "pg"
|
||||
DB_HOST: "database"
|
||||
DB_PORT: "5432"
|
||||
DB_USER: "directus"
|
||||
DB_PASSWORD: "directus"
|
||||
DB_DATABASE: "directus"
|
||||
|
||||
networks:
|
||||
directus:
|
||||
@@ -2,40 +2,6 @@
|
||||
|
||||
set -e
|
||||
|
||||
function seed() {
|
||||
# TODO: move users to a separate check, outside database installation
|
||||
local show=false
|
||||
local email=${DIRECTUS_ADMIN_EMAIL:-"admin@example.com"}
|
||||
local password=${DIRECTUS_ADMIN_PASSWORD:-""}
|
||||
|
||||
if [ "${password}" == "" ] ; then
|
||||
password=$(node -e 'console.log(require("nanoid").nanoid(12))')
|
||||
show=true
|
||||
fi
|
||||
|
||||
print --level=info "Creating administrator role"
|
||||
local role=$(npx directus roles create --name Administrator --admin)
|
||||
|
||||
print --level=info "Creating administrator user"
|
||||
local user=$(npx directus users create --email "${email}" --password "${password}" --role "${role}")
|
||||
|
||||
if [ "${show}" == "true" ] ; then
|
||||
print --level=info --stdin <<MSG
|
||||
>
|
||||
> Email: $email
|
||||
> Password: $password
|
||||
>
|
||||
MSG
|
||||
else
|
||||
print --level=info --stdin <<MSG
|
||||
>
|
||||
> Email: $email
|
||||
> Password: <env>
|
||||
>
|
||||
MSG
|
||||
fi
|
||||
}
|
||||
|
||||
function bootstrap() {
|
||||
local warn=false
|
||||
|
||||
@@ -52,20 +18,20 @@ function bootstrap() {
|
||||
if [ "${warn}" == "true" ] ; then
|
||||
print --level=warn --stdin <<WARN
|
||||
>
|
||||
> WARNING!
|
||||
> WARNING!
|
||||
>
|
||||
> The KEY and SECRET environment variables are not set.
|
||||
> Some temporar
|
||||
y variables were generated to fill the gap,
|
||||
> but in production this is going to cause problems.
|
||||
> The KEY and SECRET environment variables are not set. Some
|
||||
> temporary variables were generated to fill the gap, but in
|
||||
> production this is going to cause problems.
|
||||
>
|
||||
> Reference:
|
||||
> https://docs.directus.io/reference/environment-variables.html
|
||||
>
|
||||
> Please refer to the docs at https://docs.directus.io/
|
||||
> on how and why to configure them properly
|
||||
>
|
||||
WARN
|
||||
fi
|
||||
|
||||
# Install database if using sqlite and file doesn't exist
|
||||
# Create folder if using sqlite and file doesn't exist
|
||||
if [ "${DB_CLIENT}" == "sqlite3" ] ; then
|
||||
if [ "${DB_FILENAME}" == "" ] ; then
|
||||
print --level=error "Missing DB_FILENAME environment variable"
|
||||
@@ -77,19 +43,7 @@ WARN
|
||||
fi
|
||||
fi
|
||||
|
||||
should_seed=false
|
||||
|
||||
set +e
|
||||
npx directus database install &>/dev/null
|
||||
if [ "$?" == "0" ] ; then
|
||||
print --level=info "Database installed"
|
||||
should_seed=true
|
||||
fi
|
||||
set -e
|
||||
|
||||
if [ "${should_seed}" == "true" ] ; then
|
||||
seed
|
||||
fi
|
||||
npx directus bootstrap
|
||||
}
|
||||
|
||||
command=""
|
||||
|
||||
@@ -69,7 +69,7 @@ function main() {
|
||||
registry=$(argument registry "")
|
||||
registry=$(echo "${registry}" | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
repository=$(argument repository "directus/next")
|
||||
repository=$(argument repository "directus/directus")
|
||||
repository=$(echo "${repository}" | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
version=$(argument version "")
|
||||
|
||||
5
.github/workflows/build-images.yml
vendored
5
.github/workflows/build-images.yml
vendored
@@ -8,6 +8,11 @@ jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Sleep for 30 seconds
|
||||
uses: jakejarvis/wait-action@master
|
||||
with:
|
||||
time: '30s'
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
|
||||
21
.github/workflows/website-docs-deploy.yml
vendored
Normal file
21
.github/workflows/website-docs-deploy.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
name: Deploy Website / Docs
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '59 23 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: satak/webrequest-action@master
|
||||
with:
|
||||
url: ${{ secrets.BUILD_HOOK_WEBSITE }}
|
||||
method: POST
|
||||
|
||||
- uses: satak/webrequest-action@master
|
||||
with:
|
||||
url: ${{ secrets.BUILD_HOOK_DOCS }}
|
||||
method: POST
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -1,6 +1,6 @@
|
||||
.DS_Store
|
||||
node_modules
|
||||
.vs_code
|
||||
.vscode
|
||||
.env
|
||||
.secrets
|
||||
npm-debug.log
|
||||
@@ -8,3 +8,7 @@ lerna-debug.log
|
||||
.nova
|
||||
*.code-workspace
|
||||
dist
|
||||
*.sublime-settings
|
||||
*.db
|
||||
.nyc_output
|
||||
/.idea/
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"printWidth": 100,
|
||||
"singleQuote": true,
|
||||
"useTabs": true
|
||||
}
|
||||
7
.prettierrc.js
Normal file
7
.prettierrc.js
Normal file
@@ -0,0 +1,7 @@
|
||||
module.exports = {
|
||||
htmlWhitespaceSensitivity: 'ignore',
|
||||
printWidth: 120,
|
||||
singleQuote: true,
|
||||
useTabs: true,
|
||||
proseWrap: 'always',
|
||||
};
|
||||
@@ -5,7 +5,6 @@ end_of_line = lf
|
||||
insert_final_newline = true
|
||||
charset = utf-8
|
||||
indent_style = tab
|
||||
indent_size = 4
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[{package.json,*.yml,*.yaml}]
|
||||
|
||||
5
api/.eslintrc.js
Normal file
5
api/.eslintrc.js
Normal file
@@ -0,0 +1,5 @@
|
||||
const parentConfig = require('../.eslintrc.js');
|
||||
|
||||
module.exports = {
|
||||
...parentConfig,
|
||||
};
|
||||
5
api/.prettierrc.js
Normal file
5
api/.prettierrc.js
Normal file
@@ -0,0 +1,5 @@
|
||||
const parentConfig = require('../.prettierrc.js');
|
||||
|
||||
module.exports = {
|
||||
...parentConfig,
|
||||
};
|
||||
@@ -1,31 +1,56 @@
|
||||
<img width="250" alt="Logo" src="https://user-images.githubusercontent.com/9141017/88821768-0dc99800-d191-11ea-8c66-09c55ab451a2.png">
|
||||
<p> </p>
|
||||
|
||||
## 🐰 Introduction
|
||||
<a href="https://directus.io" target="_blank" rel="noopener noreferrer"><img width="250" alt="Logo" src="https://user-images.githubusercontent.com/9141017/88821768-0dc99800-d191-11ea-8c66-09c55ab451a2.png"></a>
|
||||
|
||||
Welcome to the preview release of the next major version of Directus.
|
||||
<p> </p>
|
||||
|
||||
**NOTE:** This is pre-release software and should be treated as such. DO NOT use this in production.
|
||||
Migrations between versions aren't provided, and breaking changes might happen at any release.
|
||||
## Introduction
|
||||
|
||||
## ⚙️ Installation
|
||||
**Directus is a free and open-source data platform for headless content management**. It can be installed on top of any
|
||||
new or existing SQL database, instantly providing a dynamic API (REST+GraphQL) and accompanying App for managing
|
||||
content. Built entirely in TypeScript (in Node and Vue), Directus is completely modular and end-to-end extensible...
|
||||
with absolutely no paywalls or artificial limitations.
|
||||
|
||||
_Directus requires NodeJS 10+_
|
||||
Modern and intuitive, the Directus App enables no-code data discovery, allowing for even the most non-technical users to
|
||||
view, author, and manage your raw database content. Our performant and flexible API is able to adapt to any relational
|
||||
schema, and includes rule-based permissions, event/web hooks, custom endpoints, numerous auth options, configurable
|
||||
storage adapters, and much more.
|
||||
|
||||
We've created a little CLI tool you can use to quickly start up a Directus project. You can use it by running:
|
||||
Current database support includes: PostgreSQL, MySQL, SQLite, MS-SQL Server, OracleDB, MariaDB, and varients such as AWS
|
||||
Aurora/Redshift or Google Cloud Platform SQL.
|
||||
|
||||
Learn more at...
|
||||
|
||||
- [Website](https://directus.io/)
|
||||
- [GitHub](https://github.com/directus/directus)
|
||||
- [Community](https://directus.chat/)
|
||||
- [Twitter](https://twitter.com/directus)
|
||||
- [Docs](https://docs.directus.io/)
|
||||
- [Marketplace](https://directus.market/)
|
||||
- [Cloud](http://directus.cloud/)
|
||||
|
||||
<p> </p>
|
||||
|
||||
## Installing
|
||||
|
||||
Directus requires NodeJS 10+. Create a new project with our simple CLI tool:
|
||||
|
||||
```
|
||||
npx create-directus-project my-project
|
||||
```
|
||||
|
||||
or using yarn:
|
||||
Or using yarn:
|
||||
|
||||
```
|
||||
yarn create directus-project my-project
|
||||
```
|
||||
|
||||
on the command line. This will create the given directory, setup the configuration, and install the database.
|
||||
The above command will create a directory with your project name, then walk you through the database configuration and
|
||||
creation of your first admin user.
|
||||
|
||||
## ✨ Updating
|
||||
<p> </p>
|
||||
|
||||
## Updating
|
||||
|
||||
To update an existing Directus project, navigate to your project directory and run:
|
||||
|
||||
@@ -33,19 +58,31 @@ To update an existing Directus project, navigate to your project directory and r
|
||||
npm update
|
||||
```
|
||||
|
||||
## 🔧 Contributing
|
||||
<p> </p>
|
||||
|
||||
Please report any and all quirks / issues you come across as [an issue](https://github.com/directus/next/issues/new).
|
||||
## Contributing
|
||||
|
||||
Pull requests are more than welcome and always appreciated. Seeing this is in active development, please make sure to reach out a member of the core team in an issue or [on Discord](http://discord.gg/directus) before you start working on a feature or bug to ensure you don't work on the same thing as somebody else :)
|
||||
Please report any and all issues [on our GitHub](https://github.com/directus/directus/issues/new).
|
||||
|
||||
## ❤️ Supporting Directus
|
||||
Pull-requests are more than welcome, and always appreciated. Please read our
|
||||
[Contributors Guide](https://docs.directus.io/getting-started/contributing.html) before starting work on a new feature
|
||||
or bug, or reach out a member of the Core Team via [GitHub](https://github.com/directus/directus/discussions) or
|
||||
[Discord](https://directus.chat) with any questions.
|
||||
|
||||
Directus is a GPLv3-licensed open source project with development made possible by support from our core team, contributors, and sponsors. It's not easy building premium open-source software; if you would like to help ensure Directus stays free, please consider becoming a sponsor.
|
||||
<p> </p>
|
||||
|
||||
- [Support us through GitHub Sponsors](https://github.com/sponsors/directus)
|
||||
- [One-time donation through PayPal](https://www.paypal.me/supportdirectus)
|
||||
## Supporting
|
||||
|
||||
## 📄 License
|
||||
Directus is a free and open-source project with development made possible by support from our passionate core team,
|
||||
amazing contributors, and generous sponsors. It's not easy building premium open-source software; if you would like to
|
||||
help ensure Directus stays free, please consider becoming a sponsor.
|
||||
|
||||
Directus is released under [the GPLv3 license](./license). Monospace Inc. owns all Directus trademarks and logos on behalf of our project's community. Copyright © 2006-2020, Monospace Inc.
|
||||
- [Support us through GitHub Sponsors](https://github.com/sponsors/directus)
|
||||
- [One-time donation through PayPal](https://www.paypal.me/supportdirectus)
|
||||
|
||||
<p> </p>
|
||||
|
||||
## License
|
||||
|
||||
Directus is released under the [GPLv3 license](./license). Monospace Inc owns all Directus trademarks, logos, and
|
||||
intellectual property on behalf of our project's community. Copyright © 2004-2020, Monospace Inc.
|
||||
|
||||
@@ -53,6 +53,9 @@ CACHE_ENABLED=true
|
||||
CACHE_TTL="30m"
|
||||
CACHE_NAMESPACE="directus-cache"
|
||||
CACHE_STORE=memory # memory | redis | memcache
|
||||
CACHE_AUTO_PURGE=true
|
||||
|
||||
ASSETS_CACHE_TTL="30m"
|
||||
|
||||
# CACHE_REDIS="redis://:authpassword@127.0.0.1:6380/4"
|
||||
# --OR--
|
||||
|
||||
5
api/index.js
Normal file
5
api/index.js
Normal file
@@ -0,0 +1,5 @@
|
||||
module.exports = {
|
||||
createApp: require('./dist/app').default,
|
||||
...require('./dist/exceptions'),
|
||||
...require('./dist/services'),
|
||||
};
|
||||
12229
api/package-lock.json
generated
12229
api/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"name": "directus",
|
||||
"version": "9.0.0-rc.2",
|
||||
"version": "9.0.0-rc.23",
|
||||
"license": "GPL-3.0-only",
|
||||
"homepage": "https://github.com/directus/next#readme",
|
||||
"homepage": "https://github.com/directus/directus#readme",
|
||||
"description": "Directus is a real-time API and App dashboard for managing SQL database content.",
|
||||
"keywords": [
|
||||
"directus",
|
||||
@@ -24,10 +24,10 @@
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/directus/next.git"
|
||||
"url": "git+https://github.com/directus/directus.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/directus/next/issues"
|
||||
"url": "https://github.com/directus/directus/issues"
|
||||
},
|
||||
"author": {
|
||||
"name": "Monospace Inc",
|
||||
@@ -46,16 +46,18 @@
|
||||
"url": "https://github.com/benhaynes"
|
||||
}
|
||||
],
|
||||
"main": "dist/app.js",
|
||||
"main": "index.js",
|
||||
"bin": {
|
||||
"directus": "cli.js"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "npx directus start",
|
||||
"build": "rm -rf dist && tsc --build && copyfiles \"src/**/*.*\" -e \"src/**/*.ts\" -u 1 dist",
|
||||
"dev": "cross-env NODE_ENV=development LOG_LEVEL=trace ts-node-dev --files src/start.ts --respawn --watch \"src/**/*.ts\" --transpile-only",
|
||||
"build": "rimraf dist && tsc --build && copyfiles \"src/**/*.*\" -e \"src/**/*.ts\" -u 1 dist",
|
||||
"dev": "cross-env NODE_ENV=development ts-node-dev --files src/start.ts --respawn --watch \"src/**/*.ts\" --watch \".env\" --transpile-only",
|
||||
"cli": "cross-env NODE_ENV=development ts-node --script-mode --transpile-only src/cli/index.ts",
|
||||
"prepublishOnly": "npm run build"
|
||||
"lint": "eslint \"src/**/*.ts\" cli.js index.js",
|
||||
"prepublishOnly": "npm run build",
|
||||
"prettier": "prettier --write \"src/**/*.ts\" cli.js index.js"
|
||||
},
|
||||
"files": [
|
||||
"dist",
|
||||
@@ -66,25 +68,27 @@
|
||||
"dependencies": {
|
||||
"@directus/app": "file:../app",
|
||||
"@directus/format-title": "file:../packages/format-title",
|
||||
"@directus/specs": "file:../packages/spec",
|
||||
"@directus/schema": "file:../packages/schema",
|
||||
"@directus/specs": "file:../packages/specs",
|
||||
"@godaddy/terminus": "^4.4.1",
|
||||
"@slynova/flydrive": "^1.0.2",
|
||||
"@slynova/flydrive-gcs": "^1.0.2",
|
||||
"@slynova/flydrive-s3": "^1.0.2",
|
||||
"argon2": "^0.26.2",
|
||||
"@slynova/flydrive": "^1.0.3",
|
||||
"@slynova/flydrive-gcs": "^1.0.3",
|
||||
"@slynova/flydrive-s3": "^1.0.3",
|
||||
"argon2": "^0.27.0",
|
||||
"atob": "^2.1.2",
|
||||
"axios": "^0.19.2",
|
||||
"axios": "^0.21.0",
|
||||
"body-parser": "^1.19.0",
|
||||
"busboy": "^0.3.1",
|
||||
"camelcase": "^6.0.0",
|
||||
"camelcase": "^6.2.0",
|
||||
"chalk": "^4.1.0",
|
||||
"commander": "^5.1.0",
|
||||
"commander": "^6.2.0",
|
||||
"cookie-parser": "^1.4.5",
|
||||
"cors": "^2.8.5",
|
||||
"date-fns": "^2.16.1",
|
||||
"deep-map": "^2.0.0",
|
||||
"dotenv": "^8.2.0",
|
||||
"eventemitter2": "^6.4.3",
|
||||
"execa": "^4.0.3",
|
||||
"execa": "^4.1.0",
|
||||
"exif-reader": "^1.0.3",
|
||||
"express": "^4.17.1",
|
||||
"express-async-handler": "^1.1.4",
|
||||
@@ -92,53 +96,59 @@
|
||||
"express-pino-logger": "^5.0.0",
|
||||
"express-session": "^1.17.1",
|
||||
"fs-extra": "^9.0.1",
|
||||
"grant": "^5.3.0",
|
||||
"graphql": "^15.3.0",
|
||||
"grant": "^5.4.5",
|
||||
"graphql": "^15.4.0",
|
||||
"graphql-type-json": "^0.3.2",
|
||||
"icc": "^2.0.0",
|
||||
"inquirer": "^7.3.3",
|
||||
"joi": "^17.1.1",
|
||||
"joi": "^17.3.0",
|
||||
"js-yaml": "^3.14.0",
|
||||
"json2csv": "^5.0.1",
|
||||
"json2csv": "^5.0.3",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"keyv": "^4.0.1",
|
||||
"knex": "^0.21.4",
|
||||
"knex-schema-inspector": "^0.0.21",
|
||||
"liquidjs": "^9.14.1",
|
||||
"lodash": "^4.17.19",
|
||||
"keyv": "^4.0.3",
|
||||
"knex": "^0.21.12",
|
||||
"liquidjs": "^9.16.1",
|
||||
"lodash": "^4.17.20",
|
||||
"macos-release": "^2.4.1",
|
||||
"mime-types": "^2.1.27",
|
||||
"ms": "^2.1.2",
|
||||
"nanoid": "^3.1.12",
|
||||
"nanoid": "^3.1.16",
|
||||
"node-machine-id": "^1.1.12",
|
||||
"nodemailer": "^6.4.11",
|
||||
"nodemailer": "^6.4.14",
|
||||
"openapi3-ts": "^2.0.0",
|
||||
"ora": "^4.1.1",
|
||||
"ora": "^5.1.0",
|
||||
"otplib": "^12.0.1",
|
||||
"pino": "^6.4.1",
|
||||
"pino": "^6.7.0",
|
||||
"pino-colada": "^2.1.0",
|
||||
"qs": "^6.9.4",
|
||||
"rate-limiter-flexible": "^2.1.10",
|
||||
"rate-limiter-flexible": "^2.1.13",
|
||||
"resolve-cwd": "^3.0.0",
|
||||
"sharp": "^0.25.4",
|
||||
"uuid": "^8.3.0",
|
||||
"sharp": "^0.26.2",
|
||||
"uuid": "^8.3.1",
|
||||
"uuid-validate": "0.0.3"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@keyv/redis": "^2.1.2",
|
||||
"ioredis": "^4.17.3",
|
||||
"keyv-memcache": "^0.8.0",
|
||||
"ioredis": "^4.19.2",
|
||||
"keyv-memcache": "^1.0.1",
|
||||
"memcached": "^2.2.2",
|
||||
"mssql": "^6.2.0",
|
||||
"mssql": "^6.2.3",
|
||||
"mysql": "^2.18.1",
|
||||
"oracledb": "^5.0.0",
|
||||
"pg": "^8.4.1",
|
||||
"pg": "^8.4.2",
|
||||
"sqlite3": "^5.0.0"
|
||||
},
|
||||
"gitHead": "4476da28dbbc2824e680137aa28b2b91b5afabec",
|
||||
"devDependencies": {
|
||||
"@typescript-eslint/eslint-plugin": "^4.9.1",
|
||||
"@typescript-eslint/parser": "^4.9.1",
|
||||
"copyfiles": "^2.4.0",
|
||||
"cross-env": "^7.0.2",
|
||||
"ts-node-dev": "^1.0.0-pre.64",
|
||||
"typescript": "^4.0.3"
|
||||
"eslint": "^7.15.0",
|
||||
"eslint-config-prettier": "^7.0.0",
|
||||
"eslint-plugin-prettier": "^3.2.0",
|
||||
"prettier": "^2.2.1",
|
||||
"ts-node-dev": "^1.0.0",
|
||||
"typescript": "^4.0.5"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,8 @@ import logger from './logger';
|
||||
import expressLogger from 'express-pino-logger';
|
||||
import path from 'path';
|
||||
|
||||
import { validateDBConnection, isInstalled } from './database';
|
||||
|
||||
import { validateEnv } from './utils/validate-env';
|
||||
import env from './env';
|
||||
import { track } from './utils/track';
|
||||
@@ -34,19 +36,29 @@ import usersRouter from './controllers/users';
|
||||
import utilsRouter from './controllers/utils';
|
||||
import webhooksRouter from './controllers/webhooks';
|
||||
import graphqlRouter from './controllers/graphql';
|
||||
import schema from './middleware/schema';
|
||||
|
||||
import notFoundHandler from './controllers/not-found';
|
||||
import sanitizeQuery from './middleware/sanitize-query';
|
||||
import { checkIP } from './middleware/check-ip';
|
||||
import { WebhooksService } from './services/webhooks';
|
||||
import { InvalidPayloadException } from './exceptions';
|
||||
|
||||
import { registerExtensions } from './extensions';
|
||||
import { register as registerWebhooks } from './webhooks';
|
||||
import emitter from './emitter';
|
||||
|
||||
import fse from 'fs-extra';
|
||||
|
||||
export default async function createApp() {
|
||||
validateEnv(['KEY', 'SECRET']);
|
||||
|
||||
await validateDBConnection();
|
||||
|
||||
if ((await isInstalled()) === false) {
|
||||
logger.fatal(`Database doesn't have Directus tables installed.`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const app = express();
|
||||
|
||||
const customRouter = express.Router();
|
||||
@@ -80,11 +92,18 @@ export default async function createApp() {
|
||||
|
||||
if (env.NODE_ENV !== 'development') {
|
||||
const adminPath = require.resolve('@directus/app/dist/index.html');
|
||||
const publicUrl = env.PUBLIC_URL.endsWith('/') ? env.PUBLIC_URL : env.PUBLIC_URL + '/';
|
||||
|
||||
app.get('/', (req, res) => res.redirect('/admin/'));
|
||||
// Prefix all href/src in the index html with the APIs public path
|
||||
let html = fse.readFileSync(adminPath, 'utf-8');
|
||||
html = html.replace(/href="\//g, `href="${publicUrl}`);
|
||||
html = html.replace(/src="\//g, `src="${publicUrl}`);
|
||||
|
||||
app.get('/', (req, res) => res.redirect(`./admin/`));
|
||||
app.get('/admin', (req, res) => res.send(html));
|
||||
app.use('/admin', express.static(path.join(adminPath, '..')));
|
||||
app.use('/admin/*', (req, res) => {
|
||||
res.sendFile(adminPath);
|
||||
res.send(html);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -93,16 +112,18 @@ export default async function createApp() {
|
||||
app.use(rateLimiter);
|
||||
}
|
||||
|
||||
app.use(sanitizeQuery);
|
||||
|
||||
app.use('/auth', authRouter);
|
||||
|
||||
app.use(authenticate);
|
||||
|
||||
app.use(checkIP);
|
||||
|
||||
app.use(sanitizeQuery);
|
||||
|
||||
app.use(cache);
|
||||
|
||||
app.use(schema);
|
||||
|
||||
app.use('/auth', authRouter);
|
||||
|
||||
app.use('/graphql', graphqlRouter);
|
||||
|
||||
app.use('/activity', activityRouter);
|
||||
@@ -128,8 +149,7 @@ export default async function createApp() {
|
||||
app.use(errorHandler);
|
||||
|
||||
// Register all webhooks
|
||||
const webhooksService = new WebhooksService();
|
||||
await webhooksService.register();
|
||||
await registerWebhooks();
|
||||
|
||||
// Register custom hooks / endpoints
|
||||
await registerExtensions(customRouter);
|
||||
|
||||
@@ -9,13 +9,13 @@ let cache: Keyv | null = null;
|
||||
|
||||
if (env.CACHE_ENABLED === true) {
|
||||
validateEnv(['CACHE_NAMESPACE', 'CACHE_TTL', 'CACHE_STORE']);
|
||||
cache = getKevyInstance();
|
||||
cache.on('error', logger.error);
|
||||
cache = getKeyvInstance();
|
||||
cache.on('error', (err) => logger.error(err));
|
||||
}
|
||||
|
||||
export default cache;
|
||||
|
||||
function getKevyInstance() {
|
||||
function getKeyvInstance() {
|
||||
switch (env.CACHE_STORE) {
|
||||
case 'redis':
|
||||
return new Keyv(getConfig('redis'));
|
||||
@@ -34,12 +34,8 @@ function getConfig(store: 'memory' | 'redis' | 'memcache' = 'memory'): Options<a
|
||||
};
|
||||
|
||||
if (store === 'redis') {
|
||||
const Redis = require('ioredis');
|
||||
const KeyvRedis = require('@keyv/redis');
|
||||
|
||||
config.store = new KeyvRedis(
|
||||
new Redis(env.CACHE_REDIS || getConfigFromEnv('CACHE_REDIS_'))
|
||||
);
|
||||
config.store = new KeyvRedis(env.CACHE_REDIS || getConfigFromEnv('CACHE_REDIS_'));
|
||||
}
|
||||
|
||||
if (store === 'memcache') {
|
||||
|
||||
74
api/src/cli/commands/bootstrap/index.ts
Normal file
74
api/src/cli/commands/bootstrap/index.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import env from '../../../env';
|
||||
import logger from '../../../logger';
|
||||
import installDatabase from '../../../database/seeds/run';
|
||||
import runMigrations from '../../../database/migrations/run';
|
||||
import { nanoid } from 'nanoid';
|
||||
|
||||
export default async function bootstrap() {
|
||||
logger.info('Initializing bootstrap...');
|
||||
|
||||
if ((await isDatabaseAvailable()) === false) {
|
||||
logger.error(`Can't connect to the database`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const { isInstalled, default: database, schemaInspector } = require('../../../database');
|
||||
const { RolesService } = require('../../../services/roles');
|
||||
const { UsersService } = require('../../../services/users');
|
||||
|
||||
if ((await isInstalled()) === false) {
|
||||
logger.info('Installing Directus system tables...');
|
||||
|
||||
await installDatabase(database);
|
||||
|
||||
const schema = await schemaInspector.overview();
|
||||
|
||||
logger.info('Setting up first admin role...');
|
||||
const rolesService = new RolesService({ schema });
|
||||
const role = await rolesService.create({ name: 'Admin', admin_access: true });
|
||||
|
||||
logger.info('Adding first admin user...');
|
||||
const usersService = new UsersService({ schema });
|
||||
|
||||
let adminEmail = env.ADMIN_EMAIL;
|
||||
|
||||
if (!adminEmail) {
|
||||
logger.info('No admin email provided. Defaulting to "admin@example.com"');
|
||||
adminEmail = 'admin@example.com';
|
||||
}
|
||||
|
||||
let adminPassword = env.ADMIN_PASSWORD;
|
||||
|
||||
if (!adminPassword) {
|
||||
adminPassword = nanoid(12);
|
||||
logger.info(`No admin password provided. Defaulting to "${adminPassword}"`);
|
||||
}
|
||||
|
||||
await usersService.create({ email: adminEmail, password: adminPassword, role });
|
||||
} else {
|
||||
logger.info('Database already initialized, skipping install');
|
||||
}
|
||||
|
||||
logger.info('Running migrations...');
|
||||
await runMigrations(database, 'latest');
|
||||
|
||||
logger.info('Done');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
async function isDatabaseAvailable() {
|
||||
const { hasDatabaseConnection } = require('../../../database');
|
||||
|
||||
const tries = 5;
|
||||
const secondsBetweenTries = 5;
|
||||
|
||||
for (var i = 0; i < tries; i++) {
|
||||
if (await hasDatabaseConnection()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, secondsBetweenTries * 1000));
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
export default async function rolesCreate(collection: string) {
|
||||
export default async function count(collection: string) {
|
||||
const database = require('../../../database/index').default;
|
||||
|
||||
if (!collection) {
|
||||
@@ -6,10 +6,16 @@ export default async function rolesCreate(collection: string) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const records = await database(collection).count('*', { as: 'count' });
|
||||
const count = Number(records[0].count);
|
||||
try {
|
||||
const records = await database(collection).count('*', { as: 'count' });
|
||||
const count = Number(records[0].count);
|
||||
|
||||
console.log(count);
|
||||
|
||||
database.destroy();
|
||||
console.log(count);
|
||||
database.destroy();
|
||||
process.exit(0);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
database.destroy();
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,18 @@
|
||||
import Knex from 'knex';
|
||||
import run from '../../../database/seeds/run';
|
||||
import installSeeds from '../../../database/seeds/run';
|
||||
import runMigrations from '../../../database/migrations/run';
|
||||
|
||||
export default async function start() {
|
||||
const database = require('../../../database/index').default as Knex;
|
||||
|
||||
try {
|
||||
await run(database);
|
||||
await installSeeds(database);
|
||||
await runMigrations(database, 'latest');
|
||||
database.destroy();
|
||||
process.exit(0);
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
process.exit(1);
|
||||
} finally {
|
||||
database.destroy();
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,25 @@
|
||||
import run from '../../../database/migrations/run';
|
||||
|
||||
import ora from 'ora';
|
||||
|
||||
export default async function migrate(direction: 'latest' | 'up' | 'down') {
|
||||
const database = require('../../../database').default;
|
||||
|
||||
try {
|
||||
const spinnerDriver = ora('Running migrations...').start();
|
||||
await run(database, direction);
|
||||
spinnerDriver.stop();
|
||||
|
||||
if (direction === 'down') {
|
||||
console.log('✨ Downgrade successful');
|
||||
} else {
|
||||
console.log('✨ Database up to date');
|
||||
}
|
||||
database.destroy();
|
||||
process.exit();
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
process.exit(1);
|
||||
} finally {
|
||||
database.destroy();
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ import ora from 'ora';
|
||||
import argon2 from 'argon2';
|
||||
|
||||
import runSeed from '../../../database/seeds/run';
|
||||
import runMigrations from '../../../database/migrations/run';
|
||||
|
||||
import createDBConnection, { Credentials } from '../../utils/create-db-connection';
|
||||
import Knex from 'knex';
|
||||
@@ -28,13 +29,9 @@ export default async function init(options: Record<string, any>) {
|
||||
|
||||
const dbClient = getDriverForClient(client)!;
|
||||
|
||||
try {
|
||||
require.resolve(dbClient);
|
||||
} catch {
|
||||
const spinnerDriver = ora('Installing Database Driver...').start();
|
||||
await execa('npm', ['install', dbClient, '--production']);
|
||||
spinnerDriver.stop();
|
||||
}
|
||||
const spinnerDriver = ora('Installing Database Driver...').start();
|
||||
await execa('npm', ['install', dbClient, '--production']);
|
||||
spinnerDriver.stop();
|
||||
|
||||
let attemptsRemaining = 5;
|
||||
|
||||
@@ -51,11 +48,12 @@ export default async function init(options: Record<string, any>) {
|
||||
|
||||
try {
|
||||
await runSeed(db);
|
||||
await runMigrations(db, 'latest');
|
||||
} catch (err) {
|
||||
console.log();
|
||||
console.log('Something went wrong while seeding the database:');
|
||||
console.log();
|
||||
console.log(`${err.code && chalk.red(`[${err.code}]`)} ${err.message}`);
|
||||
console.log(`${chalk.red(`[${err.code || 'Error'}]`)} ${err.message}`);
|
||||
console.log();
|
||||
console.log('Please try again');
|
||||
console.log();
|
||||
@@ -102,7 +100,7 @@ export default async function init(options: Record<string, any>) {
|
||||
await db('directus_roles').insert({
|
||||
id: roleID,
|
||||
name: 'Administrator',
|
||||
icon: 'verified_user',
|
||||
icon: 'verified',
|
||||
admin_access: true,
|
||||
description: 'Initial administrative role with unrestricted App/API access',
|
||||
});
|
||||
@@ -117,7 +115,7 @@ export default async function init(options: Record<string, any>) {
|
||||
role: roleID,
|
||||
});
|
||||
|
||||
db.destroy();
|
||||
await db.destroy();
|
||||
|
||||
console.log(`
|
||||
Your project has been created at ${chalk.green(rootPath)}.
|
||||
@@ -128,4 +126,6 @@ Start Directus by running:
|
||||
${chalk.blue('cd')} ${rootPath}
|
||||
${chalk.blue('npx directus')} start
|
||||
`);
|
||||
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
export default async function rolesCreate({ name, admin }: any) {
|
||||
const database = require('../../../database/index').default;
|
||||
const { default: database, schemaInspector } = require('../../../database/index');
|
||||
const { RolesService } = require('../../../services/roles');
|
||||
|
||||
if (!name) {
|
||||
@@ -7,8 +7,16 @@ export default async function rolesCreate({ name, admin }: any) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const service = new RolesService();
|
||||
const id = await service.create({ name, admin_access: admin });
|
||||
console.log(id);
|
||||
database.destroy();
|
||||
try {
|
||||
const schema = await schemaInspector.overview();
|
||||
const service = new RolesService({ schema: schema, knex: database });
|
||||
|
||||
const id = await service.create({ name, admin_access: admin });
|
||||
console.log(id);
|
||||
database.destroy();
|
||||
process.exit(0);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
export default async function usersCreate({ email, password, role }: any) {
|
||||
const database = require('../../../database/index').default;
|
||||
const { default: database, schemaInspector } = require('../../../database/index');
|
||||
const { UsersService } = require('../../../services/users');
|
||||
|
||||
if (!email || !password || !role) {
|
||||
@@ -7,8 +7,16 @@ export default async function usersCreate({ email, password, role }: any) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const service = new UsersService();
|
||||
const id = await service.create({ email, password, role, status: 'active' });
|
||||
console.log(id);
|
||||
database.destroy();
|
||||
try {
|
||||
const schema = await schemaInspector.overview();
|
||||
const service = new UsersService({ schema, knex: database });
|
||||
|
||||
const id = await service.create({ email, password, role, status: 'active' });
|
||||
console.log(id);
|
||||
database.destroy();
|
||||
process.exit(0);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import dbMigrate from './commands/database/migrate';
|
||||
import usersCreate from './commands/users/create';
|
||||
import rolesCreate from './commands/roles/create';
|
||||
import count from './commands/count';
|
||||
import bootstrap from './commands/bootstrap';
|
||||
|
||||
program.name('directus').usage('[command] [options]');
|
||||
program.version(pkg.version, '-v, --version');
|
||||
@@ -52,9 +53,11 @@ rolesCommand
|
||||
.option('--admin', `whether or not the role has admin access`)
|
||||
.action(rolesCreate);
|
||||
|
||||
program
|
||||
.command('count <collection>')
|
||||
.description('Count the amount of items in a given collection')
|
||||
.action(count);
|
||||
program.command('count <collection>').description('Count the amount of items in a given collection').action(count);
|
||||
|
||||
program.parse(process.argv);
|
||||
program.command('bootstrap').description('Initialize or update the database').action(bootstrap);
|
||||
|
||||
program.parseAsync(process.argv).catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
@@ -21,11 +21,7 @@ const defaults = {
|
||||
},
|
||||
};
|
||||
|
||||
export default async function createEnv(
|
||||
client: keyof typeof drivers,
|
||||
credentials: Credentials,
|
||||
directory: string
|
||||
) {
|
||||
export default async function createEnv(client: keyof typeof drivers, credentials: Credentials, directory: string) {
|
||||
const config: Record<string, any> = {
|
||||
...defaults,
|
||||
database: {
|
||||
|
||||
@@ -3,37 +3,37 @@ import { Transformation } from './types/assets';
|
||||
export const SYSTEM_ASSET_ALLOW_LIST: Transformation[] = [
|
||||
{
|
||||
key: 'system-small-cover',
|
||||
w: 64,
|
||||
h: 64,
|
||||
f: 'cover',
|
||||
width: 64,
|
||||
height: 64,
|
||||
fit: 'cover',
|
||||
},
|
||||
{
|
||||
key: 'system-small-contain',
|
||||
w: 64,
|
||||
f: 'contain',
|
||||
width: 64,
|
||||
fit: 'contain',
|
||||
},
|
||||
{
|
||||
key: 'system-medium-cover',
|
||||
w: 300,
|
||||
h: 300,
|
||||
f: 'cover',
|
||||
width: 300,
|
||||
height: 300,
|
||||
fit: 'cover',
|
||||
},
|
||||
{
|
||||
key: 'system-medium-contain',
|
||||
w: 300,
|
||||
f: 'contain',
|
||||
width: 300,
|
||||
fit: 'contain',
|
||||
},
|
||||
{
|
||||
key: 'system-large-cover',
|
||||
w: 800,
|
||||
h: 600,
|
||||
f: 'cover',
|
||||
width: 800,
|
||||
height: 600,
|
||||
fit: 'cover',
|
||||
},
|
||||
{
|
||||
key: 'system-large-contain',
|
||||
w: 800,
|
||||
f: 'contain',
|
||||
width: 800,
|
||||
fit: 'contain',
|
||||
},
|
||||
];
|
||||
|
||||
export const ASSET_TRANSFORM_QUERY_KEYS = ['key', 'w', 'h', 'f'];
|
||||
export const ASSET_TRANSFORM_QUERY_KEYS = ['key', 'width', 'height', 'fit', 'withoutEnlargement'];
|
||||
|
||||
@@ -13,8 +13,14 @@ router.use(useCollection('directus_activity'));
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new ActivityService({ accountability: req.accountability });
|
||||
const metaService = new MetaService({ accountability: req.accountability });
|
||||
const service = new ActivityService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const metaService = new MetaService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const records = await service.readByQuery(req.sanitizedQuery);
|
||||
const meta = await metaService.getMetaForQuery('directus_activity', req.sanitizedQuery);
|
||||
@@ -32,7 +38,10 @@ router.get(
|
||||
router.get(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new ActivityService({ accountability: req.accountability });
|
||||
const service = new ActivityService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const record = await service.readByKey(req.params.pk, req.sanitizedQuery);
|
||||
|
||||
res.locals.payload = {
|
||||
@@ -47,7 +56,10 @@ router.get(
|
||||
router.post(
|
||||
'/comment',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new ActivityService({ accountability: req.accountability });
|
||||
const service = new ActivityService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const primaryKey = await service.create({
|
||||
...req.body,
|
||||
@@ -79,7 +91,10 @@ router.post(
|
||||
router.patch(
|
||||
'/comment/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new ActivityService({ accountability: req.accountability });
|
||||
const service = new ActivityService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const primaryKey = await service.update(req.body, req.params.pk);
|
||||
|
||||
try {
|
||||
@@ -104,7 +119,10 @@ router.patch(
|
||||
router.delete(
|
||||
'/comment/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new ActivityService({ accountability: req.accountability });
|
||||
const service = new ActivityService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
await service.delete(req.params.pk);
|
||||
|
||||
return next();
|
||||
|
||||
@@ -9,7 +9,8 @@ import { Transformation } from '../types/assets';
|
||||
import storage from '../storage';
|
||||
import { PayloadService, AssetsService } from '../services';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
import env from '../env';
|
||||
import ms from 'ms';
|
||||
|
||||
const router = Router();
|
||||
|
||||
@@ -31,11 +32,7 @@ router.get(
|
||||
const isValidUUID = validate(id, 4);
|
||||
if (isValidUUID === false) throw new ForbiddenException();
|
||||
|
||||
const file = await database
|
||||
.select('id', 'storage', 'filename_disk')
|
||||
.from('directus_files')
|
||||
.where({ id })
|
||||
.first();
|
||||
const file = await database.select('id', 'storage', 'filename_disk').from('directus_files').where({ id }).first();
|
||||
|
||||
if (!file) throw new ForbiddenException();
|
||||
|
||||
@@ -48,7 +45,7 @@ router.get(
|
||||
|
||||
// Validate query params
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const payloadService = new PayloadService('directus_settings');
|
||||
const payloadService = new PayloadService('directus_settings', { schema: req.schema });
|
||||
const defaults = { storage_asset_presets: [], storage_asset_transform: 'all' };
|
||||
|
||||
let savedAssetSettings = await database
|
||||
@@ -65,52 +62,45 @@ router.get(
|
||||
const transformation = pick(req.query, ASSET_TRANSFORM_QUERY_KEYS);
|
||||
|
||||
if (transformation.hasOwnProperty('key') && Object.keys(transformation).length > 1) {
|
||||
throw new InvalidQueryException(
|
||||
`You can't combine the "key" query parameter with any other transformation.`
|
||||
);
|
||||
throw new InvalidQueryException(`You can't combine the "key" query parameter with any other transformation.`);
|
||||
}
|
||||
|
||||
const systemKeys = SYSTEM_ASSET_ALLOW_LIST.map((transformation) => transformation.key);
|
||||
const allKeys: string[] = [
|
||||
...systemKeys,
|
||||
...(assetSettings.storage_asset_presets || []).map(
|
||||
(transformation: Transformation) => transformation.key
|
||||
),
|
||||
...(assetSettings.storage_asset_presets || []).map((transformation: Transformation) => transformation.key),
|
||||
];
|
||||
|
||||
// For use in the next request handler
|
||||
res.locals.shortcuts = [...SYSTEM_ASSET_ALLOW_LIST, assetSettings.storage_asset_presets];
|
||||
res.locals.shortcuts = [...SYSTEM_ASSET_ALLOW_LIST, ...(assetSettings.storage_asset_presets || [])];
|
||||
res.locals.transformation = transformation;
|
||||
|
||||
if (Object.keys(transformation).length === 0) {
|
||||
return next();
|
||||
}
|
||||
|
||||
if (assetSettings.asset_generation === 'all') {
|
||||
if (assetSettings.storage_asset_transform === 'all') {
|
||||
if (transformation.key && allKeys.includes(transformation.key as string) === false)
|
||||
throw new InvalidQueryException(`Key "${transformation.key}" isn't configured.`);
|
||||
return next();
|
||||
} else if (assetSettings.asset_generation === 'shortcut') {
|
||||
} else if (assetSettings.storage_asset_transform === 'shortcut') {
|
||||
if (allKeys.includes(transformation.key as string)) return next();
|
||||
throw new InvalidQueryException(
|
||||
`Only configured shortcuts can be used in asset generation.`
|
||||
);
|
||||
throw new InvalidQueryException(`Only configured shortcuts can be used in asset generation.`);
|
||||
} else {
|
||||
if (transformation.key && systemKeys.includes(transformation.key as string))
|
||||
return next();
|
||||
throw new InvalidQueryException(
|
||||
`Dynamic asset generation has been disabled for this project.`
|
||||
);
|
||||
if (transformation.key && systemKeys.includes(transformation.key as string)) return next();
|
||||
throw new InvalidQueryException(`Dynamic asset generation has been disabled for this project.`);
|
||||
}
|
||||
}),
|
||||
|
||||
// Return file
|
||||
asyncHandler(async (req, res) => {
|
||||
const service = new AssetsService({ accountability: req.accountability });
|
||||
const service = new AssetsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const transformation: Transformation = res.locals.transformation.key
|
||||
? res.locals.shortcuts.find(
|
||||
(transformation: Transformation) =>
|
||||
transformation.key === res.locals.transformation.key
|
||||
(transformation: Transformation) => transformation.key === res.locals.transformation.key
|
||||
)
|
||||
: res.locals.transformation;
|
||||
|
||||
@@ -123,6 +113,8 @@ router.get(
|
||||
res.removeHeader('Content-Disposition');
|
||||
}
|
||||
|
||||
const access = !!req.accountability?.role ? 'private' : 'public';
|
||||
res.setHeader('Cache-Control', `${access}, max-age="${ms(env.ASSETS_CACHE_TTL as string)}"`);
|
||||
stream.pipe(res);
|
||||
})
|
||||
);
|
||||
|
||||
@@ -34,6 +34,7 @@ router.post(
|
||||
|
||||
const authenticationService = new AuthenticationService({
|
||||
accountability: accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const { error } = loginSchema.validate(req.body);
|
||||
@@ -46,15 +47,13 @@ router.post(
|
||||
const ip = req.ip;
|
||||
const userAgent = req.get('user-agent');
|
||||
|
||||
const { accessToken, refreshToken, expires, id } = await authenticationService.authenticate(
|
||||
{
|
||||
ip,
|
||||
userAgent,
|
||||
email,
|
||||
password,
|
||||
otp,
|
||||
}
|
||||
);
|
||||
const { accessToken, refreshToken, expires } = await authenticationService.authenticate({
|
||||
ip,
|
||||
userAgent,
|
||||
email,
|
||||
password,
|
||||
otp,
|
||||
});
|
||||
|
||||
const payload = {
|
||||
data: { access_token: accessToken, expires },
|
||||
@@ -69,8 +68,7 @@ router.post(
|
||||
httpOnly: true,
|
||||
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
|
||||
secure: env.REFRESH_TOKEN_COOKIE_SECURE === 'true' ? true : false,
|
||||
sameSite:
|
||||
(env.REFRESH_TOKEN_COOKIE_SAME_SITE as 'lax' | 'strict' | 'none') || 'strict',
|
||||
sameSite: (env.REFRESH_TOKEN_COOKIE_SAME_SITE as 'lax' | 'strict' | 'none') || 'strict',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -92,21 +90,18 @@ router.post(
|
||||
|
||||
const authenticationService = new AuthenticationService({
|
||||
accountability: accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const currentRefreshToken = req.body.refresh_token || req.cookies.directus_refresh_token;
|
||||
|
||||
if (!currentRefreshToken) {
|
||||
throw new InvalidPayloadException(
|
||||
`"refresh_token" is required in either the JSON payload or Cookie`
|
||||
);
|
||||
throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`);
|
||||
}
|
||||
|
||||
const mode: 'json' | 'cookie' = req.body.mode || req.body.refresh_token ? 'json' : 'cookie';
|
||||
|
||||
const { accessToken, refreshToken, expires } = await authenticationService.refresh(
|
||||
currentRefreshToken
|
||||
);
|
||||
const { accessToken, refreshToken, expires } = await authenticationService.refresh(currentRefreshToken);
|
||||
|
||||
const payload = {
|
||||
data: { access_token: accessToken, expires },
|
||||
@@ -121,8 +116,7 @@ router.post(
|
||||
httpOnly: true,
|
||||
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
|
||||
secure: env.REFRESH_TOKEN_COOKIE_SECURE === 'true' ? true : false,
|
||||
sameSite:
|
||||
(env.REFRESH_TOKEN_COOKIE_SAME_SITE as 'lax' | 'strict' | 'none') || 'strict',
|
||||
sameSite: (env.REFRESH_TOKEN_COOKIE_SAME_SITE as 'lax' | 'strict' | 'none') || 'strict',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -144,14 +138,13 @@ router.post(
|
||||
|
||||
const authenticationService = new AuthenticationService({
|
||||
accountability: accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const currentRefreshToken = req.body.refresh_token || req.cookies.directus_refresh_token;
|
||||
|
||||
if (!currentRefreshToken) {
|
||||
throw new InvalidPayloadException(
|
||||
`"refresh_token" is required in either the JSON payload or Cookie`
|
||||
);
|
||||
throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`);
|
||||
}
|
||||
|
||||
await authenticationService.logout(currentRefreshToken);
|
||||
@@ -173,10 +166,10 @@ router.post(
|
||||
role: null,
|
||||
};
|
||||
|
||||
const service = new UsersService({ accountability });
|
||||
const service = new UsersService({ accountability, schema: req.schema });
|
||||
|
||||
try {
|
||||
await service.requestPasswordReset(req.body.email);
|
||||
await service.requestPasswordReset(req.body.email, req.body.reset_url || null);
|
||||
} catch {
|
||||
// We don't want to give away what email addresses exist, so we'll always return a 200
|
||||
// from this endpoint
|
||||
@@ -204,7 +197,7 @@ router.post(
|
||||
role: null,
|
||||
};
|
||||
|
||||
const service = new UsersService({ accountability });
|
||||
const service = new UsersService({ accountability, schema: req.schema });
|
||||
await service.resetPassword(req.body.token, req.body.password);
|
||||
return next();
|
||||
}),
|
||||
@@ -221,10 +214,7 @@ router.get(
|
||||
respond
|
||||
);
|
||||
|
||||
router.use(
|
||||
'/oauth',
|
||||
session({ secret: env.SECRET as string, saveUninitialized: false, resave: false })
|
||||
);
|
||||
router.use('/oauth', session({ secret: env.SECRET as string, saveUninitialized: false, resave: false }));
|
||||
|
||||
router.get(
|
||||
'/oauth/:provider',
|
||||
@@ -239,7 +229,7 @@ router.get(
|
||||
}
|
||||
|
||||
if (req.query?.redirect && req.session) {
|
||||
req.session.redirect = req.query.redirect;
|
||||
req.session.redirect = req.query.redirect as string;
|
||||
}
|
||||
|
||||
next();
|
||||
@@ -252,7 +242,7 @@ router.use(grant.express()(grantConfig));
|
||||
router.get(
|
||||
'/oauth/:provider/callback',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const redirect = req.session?.redirect;
|
||||
const redirect = req.session.redirect;
|
||||
|
||||
const accountability = {
|
||||
ip: req.ip,
|
||||
@@ -262,12 +252,10 @@ router.get(
|
||||
|
||||
const authenticationService = new AuthenticationService({
|
||||
accountability: accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const email = getEmailFromProfile(
|
||||
req.params.provider,
|
||||
req.session!.grant.response?.profile
|
||||
);
|
||||
const email = getEmailFromProfile(req.params.provider, req.session.grant.response?.profile);
|
||||
|
||||
req.session?.destroy(() => {});
|
||||
|
||||
@@ -280,8 +268,7 @@ router.get(
|
||||
httpOnly: true,
|
||||
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
|
||||
secure: env.REFRESH_TOKEN_COOKIE_SECURE === 'true' ? true : false,
|
||||
sameSite:
|
||||
(env.REFRESH_TOKEN_COOKIE_SAME_SITE as 'lax' | 'strict' | 'none') || 'strict',
|
||||
sameSite: (env.REFRESH_TOKEN_COOKIE_SAME_SITE as 'lax' | 'strict' | 'none') || 'strict',
|
||||
});
|
||||
|
||||
return res.redirect(redirect);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Router } from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import { CollectionsService, MetaService } from '../services';
|
||||
import { ForbiddenException } from '../exceptions';
|
||||
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
|
||||
import { respond } from '../middleware/respond';
|
||||
|
||||
const router = Router();
|
||||
@@ -9,7 +9,10 @@ const router = Router();
|
||||
router.post(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const collectionsService = new CollectionsService({ accountability: req.accountability });
|
||||
const collectionsService = new CollectionsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const collectionKey = await collectionsService.create(req.body);
|
||||
const record = await collectionsService.readByKey(collectionKey);
|
||||
@@ -23,8 +26,14 @@ router.post(
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const collectionsService = new CollectionsService({ accountability: req.accountability });
|
||||
const metaService = new MetaService({ accountability: req.accountability });
|
||||
const collectionsService = new CollectionsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const metaService = new MetaService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const collections = await collectionsService.readByQuery();
|
||||
const meta = await metaService.getMetaForQuery('directus_collections', {});
|
||||
@@ -38,7 +47,10 @@ router.get(
|
||||
router.get(
|
||||
'/:collection',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const collectionsService = new CollectionsService({ accountability: req.accountability });
|
||||
const collectionsService = new CollectionsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const collectionKey = req.params.collection.includes(',')
|
||||
? req.params.collection.split(',')
|
||||
: req.params.collection;
|
||||
@@ -62,7 +74,10 @@ router.get(
|
||||
router.patch(
|
||||
'/:collection',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const collectionsService = new CollectionsService({ accountability: req.accountability });
|
||||
const collectionsService = new CollectionsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const collectionKey = req.params.collection.includes(',')
|
||||
? req.params.collection.split(',')
|
||||
: req.params.collection;
|
||||
@@ -84,10 +99,31 @@ router.patch(
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (!req.body || Array.isArray(req.body) === false) {
|
||||
throw new InvalidPayloadException(`Body has to be an array of primary keys`);
|
||||
}
|
||||
|
||||
const collectionsService = new CollectionsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
await collectionsService.delete(req.body as string[]);
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/:collection',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const collectionsService = new CollectionsService({ accountability: req.accountability });
|
||||
const collectionsService = new CollectionsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const collectionKey = req.params.collection.includes(',')
|
||||
? req.params.collection.split(',')
|
||||
: req.params.collection;
|
||||
|
||||
@@ -2,7 +2,6 @@ import { Router } from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import { FieldsService } from '../services/fields';
|
||||
import validateCollection from '../middleware/collection-exists';
|
||||
import { schemaInspector } from '../database';
|
||||
import { InvalidPayloadException, ForbiddenException } from '../exceptions';
|
||||
import Joi from 'joi';
|
||||
import { types, Field } from '../types';
|
||||
@@ -16,7 +15,10 @@ router.use(useCollection('directus_fields'));
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new FieldsService({ accountability: req.accountability });
|
||||
const service = new FieldsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const fields = await service.readAll();
|
||||
|
||||
res.locals.payload = { data: fields || null };
|
||||
@@ -29,7 +31,10 @@ router.get(
|
||||
'/:collection',
|
||||
validateCollection,
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new FieldsService({ accountability: req.accountability });
|
||||
const service = new FieldsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const fields = await service.readAll(req.params.collection);
|
||||
|
||||
res.locals.payload = { data: fields || null };
|
||||
@@ -42,10 +47,12 @@ router.get(
|
||||
'/:collection/:field',
|
||||
validateCollection,
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new FieldsService({ accountability: req.accountability });
|
||||
const service = new FieldsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const exists = await schemaInspector.hasColumn(req.params.collection, req.params.field);
|
||||
if (exists === false) throw new ForbiddenException();
|
||||
if (req.params.field in req.schema[req.params.collection].columns === false) throw new ForbiddenException();
|
||||
|
||||
const field = await service.readOne(req.params.collection, req.params.field);
|
||||
|
||||
@@ -72,10 +79,12 @@ router.post(
|
||||
'/:collection',
|
||||
validateCollection,
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (!req.body.schema && !req.body.meta)
|
||||
throw new InvalidPayloadException(`"schema" or "meta" is required`);
|
||||
if (!req.body.schema && !req.body.meta) throw new InvalidPayloadException(`"schema" or "meta" is required`);
|
||||
|
||||
const service = new FieldsService({ accountability: req.accountability });
|
||||
const service = new FieldsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const { error } = newFieldSchema.validate(req.body);
|
||||
|
||||
@@ -107,7 +116,10 @@ router.patch(
|
||||
'/:collection',
|
||||
validateCollection,
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new FieldsService({ accountability: req.accountability });
|
||||
const service = new FieldsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
if (Array.isArray(req.body) === false) {
|
||||
throw new InvalidPayloadException('Submitted body has to be an array.');
|
||||
@@ -142,7 +154,10 @@ router.patch(
|
||||
validateCollection,
|
||||
// @todo: validate field
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new FieldsService({ accountability: req.accountability });
|
||||
const service = new FieldsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const fieldData: Partial<Field> & { field: string; type: typeof types[number] } = req.body;
|
||||
|
||||
if (!fieldData.field) fieldData.field = req.params.field;
|
||||
@@ -169,7 +184,10 @@ router.delete(
|
||||
'/:collection/:field',
|
||||
validateCollection,
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new FieldsService({ accountability: req.accountability });
|
||||
const service = new FieldsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
await service.deleteField(req.params.collection, req.params.field);
|
||||
return next();
|
||||
}),
|
||||
|
||||
@@ -12,6 +12,7 @@ import url from 'url';
|
||||
import path from 'path';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
import { toArray } from '../utils/to-array';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -22,7 +23,7 @@ const multipartHandler = asyncHandler(async (req, res, next) => {
|
||||
|
||||
const busboy = new Busboy({ headers: req.headers });
|
||||
const savedFiles: PrimaryKey[] = [];
|
||||
const service = new FilesService({ accountability: req.accountability });
|
||||
const service = new FilesService({ accountability: req.accountability, schema: req.schema });
|
||||
|
||||
const existingPrimaryKey = req.params.pk || undefined;
|
||||
|
||||
@@ -32,7 +33,7 @@ const multipartHandler = asyncHandler(async (req, res, next) => {
|
||||
* the row in directus_files async during the upload of the actual file.
|
||||
*/
|
||||
|
||||
let disk: string = (env.STORAGE_LOCATIONS as string).split(',')[0].trim();
|
||||
let disk: string = toArray(env.STORAGE_LOCATIONS)[0];
|
||||
let payload: Partial<File> = {};
|
||||
let fileCount = 0;
|
||||
|
||||
@@ -67,11 +68,7 @@ const multipartHandler = asyncHandler(async (req, res, next) => {
|
||||
};
|
||||
|
||||
try {
|
||||
const primaryKey = await service.upload(
|
||||
fileStream,
|
||||
payloadWithRequiredFields,
|
||||
existingPrimaryKey
|
||||
);
|
||||
const primaryKey = await service.upload(fileStream, payloadWithRequiredFields, existingPrimaryKey);
|
||||
savedFiles.push(primaryKey);
|
||||
tryDone();
|
||||
} catch (error) {
|
||||
@@ -101,7 +98,10 @@ router.post(
|
||||
'/',
|
||||
multipartHandler,
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new FilesService({ accountability: req.accountability });
|
||||
const service = new FilesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
let keys: PrimaryKey | PrimaryKey[] = [];
|
||||
|
||||
if (req.is('multipart/form-data')) {
|
||||
@@ -144,7 +144,10 @@ router.post(
|
||||
throw new InvalidPayloadException(error.message);
|
||||
}
|
||||
|
||||
const service = new FilesService({ accountability: req.accountability });
|
||||
const service = new FilesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const fileResponse = await axios.get<NodeJS.ReadableStream>(req.body.url, {
|
||||
responseType: 'stream',
|
||||
@@ -155,7 +158,7 @@ router.post(
|
||||
|
||||
const payload = {
|
||||
filename_download: filename,
|
||||
storage: (env.STORAGE_LOCATIONS as string).split(',')[0].trim(),
|
||||
storage: toArray(env.STORAGE_LOCATIONS)[0],
|
||||
type: fileResponse.headers['content-type'],
|
||||
title: formatTitle(filename),
|
||||
...(req.body.data || {}),
|
||||
@@ -182,8 +185,14 @@ router.post(
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new FilesService({ accountability: req.accountability });
|
||||
const metaService = new MetaService({ accountability: req.accountability });
|
||||
const service = new FilesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const metaService = new MetaService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const records = await service.readByQuery(req.sanitizedQuery);
|
||||
const meta = await metaService.getMetaForQuery('directus_files', req.sanitizedQuery);
|
||||
@@ -198,7 +207,10 @@ router.get(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const keys = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const service = new FilesService({ accountability: req.accountability });
|
||||
const service = new FilesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const record = await service.readByKey(keys as any, req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
return next();
|
||||
@@ -210,7 +222,10 @@ router.patch(
|
||||
'/:pk',
|
||||
multipartHandler,
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new FilesService({ accountability: req.accountability });
|
||||
const service = new FilesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
let keys: PrimaryKey | PrimaryKey[] = [];
|
||||
|
||||
if (req.is('multipart/form-data')) {
|
||||
@@ -236,11 +251,31 @@ router.patch(
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (!req.body || Array.isArray(req.body) === false) {
|
||||
throw new InvalidPayloadException(`Body has to be an array of primary keys`);
|
||||
}
|
||||
|
||||
const service = new FilesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
await service.delete(req.body as PrimaryKey[]);
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const keys = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const service = new FilesService({ accountability: req.accountability });
|
||||
const service = new FilesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
await service.delete(keys as any);
|
||||
return next();
|
||||
}),
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import express from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import { FoldersService, MetaService } from '../services';
|
||||
import { ForbiddenException } from '../exceptions';
|
||||
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
import { PrimaryKey } from '../types';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -12,7 +13,10 @@ router.use(useCollection('directus_folders'));
|
||||
router.post(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new FoldersService({ accountability: req.accountability });
|
||||
const service = new FoldersService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const primaryKey = await service.create(req.body);
|
||||
|
||||
try {
|
||||
@@ -34,8 +38,14 @@ router.post(
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new FoldersService({ accountability: req.accountability });
|
||||
const metaService = new MetaService({ accountability: req.accountability });
|
||||
const service = new FoldersService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const metaService = new MetaService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const records = await service.readByQuery(req.sanitizedQuery);
|
||||
const meta = await metaService.getMetaForQuery('directus_files', req.sanitizedQuery);
|
||||
@@ -49,7 +59,10 @@ router.get(
|
||||
router.get(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new FoldersService({ accountability: req.accountability });
|
||||
const service = new FoldersService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const primaryKey = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const record = await service.readByKey(primaryKey as any, req.sanitizedQuery);
|
||||
|
||||
@@ -62,7 +75,10 @@ router.get(
|
||||
router.patch(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new FoldersService({ accountability: req.accountability });
|
||||
const service = new FoldersService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const primaryKey = await service.update(req.body, pk as any);
|
||||
|
||||
@@ -82,10 +98,30 @@ router.patch(
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (!req.body || Array.isArray(req.body) === false) {
|
||||
throw new InvalidPayloadException(`Body has to be an array of primary keys`);
|
||||
}
|
||||
|
||||
const service = new FoldersService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
await service.delete(req.body as PrimaryKey[]);
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new FoldersService({ accountability: req.accountability });
|
||||
const service = new FoldersService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const primaryKey = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
await service.delete(primaryKey as any);
|
||||
return next();
|
||||
|
||||
@@ -5,12 +5,16 @@ import asyncHandler from 'express-async-handler';
|
||||
|
||||
const router = Router();
|
||||
|
||||
router.use(asyncHandler(async (req, res) => {
|
||||
const service = new GraphQLService({ accountability: req.accountability });
|
||||
const schema = await service.getSchema();
|
||||
router.use(
|
||||
asyncHandler(async (req, res) => {
|
||||
const service = new GraphQLService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const schema = await service.getSchema();
|
||||
|
||||
graphqlHTTP({ schema, graphiql: true })(req, res);
|
||||
}));
|
||||
graphqlHTTP({ schema, graphiql: true })(req, res);
|
||||
})
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
|
||||
@@ -2,8 +2,11 @@ import express from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import collectionExists from '../middleware/collection-exists';
|
||||
import { ItemsService, MetaService } from '../services';
|
||||
import { RouteNotFoundException, ForbiddenException } from '../exceptions';
|
||||
import { RouteNotFoundException, ForbiddenException, FailedValidationException } from '../exceptions';
|
||||
import { respond } from '../middleware/respond';
|
||||
import { InvalidPayloadException } from '../exceptions';
|
||||
import { PrimaryKey } from '../types';
|
||||
import Joi from 'joi';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -15,7 +18,10 @@ router.post(
|
||||
throw new RouteNotFoundException(req.path);
|
||||
}
|
||||
|
||||
const service = new ItemsService(req.collection, { accountability: req.accountability });
|
||||
const service = new ItemsService(req.collection, {
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const primaryKey = await service.create(req.body);
|
||||
|
||||
try {
|
||||
@@ -38,8 +44,15 @@ router.get(
|
||||
'/:collection',
|
||||
collectionExists,
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new ItemsService(req.collection, { accountability: req.accountability });
|
||||
const metaService = new MetaService({ accountability: req.accountability });
|
||||
const service = new ItemsService(req.collection, {
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const metaService = new MetaService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const records = req.singleton
|
||||
? await service.readSingleton(req.sanitizedQuery)
|
||||
@@ -51,6 +64,7 @@ router.get(
|
||||
meta: meta,
|
||||
data: records || null,
|
||||
};
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
@@ -64,7 +78,10 @@ router.get(
|
||||
throw new RouteNotFoundException(req.path);
|
||||
}
|
||||
|
||||
const service = new ItemsService(req.collection, { accountability: req.accountability });
|
||||
const service = new ItemsService(req.collection, {
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const primaryKey = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const result = await service.readByKey(primaryKey as any, req.sanitizedQuery);
|
||||
|
||||
@@ -80,7 +97,10 @@ router.patch(
|
||||
'/:collection',
|
||||
collectionExists,
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new ItemsService(req.collection, { accountability: req.accountability });
|
||||
const service = new ItemsService(req.collection, {
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
if (req.singleton === true) {
|
||||
await service.upsertSingleton(req.body);
|
||||
@@ -90,7 +110,35 @@ router.patch(
|
||||
return next();
|
||||
}
|
||||
|
||||
const primaryKeys = await service.update(req.body);
|
||||
if (Array.isArray(req.body)) {
|
||||
const primaryKeys = await service.update(req.body);
|
||||
|
||||
try {
|
||||
const result = await service.readByKey(primaryKeys, req.sanitizedQuery);
|
||||
res.locals.payload = { data: result || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
}
|
||||
|
||||
const updateSchema = Joi.object({
|
||||
keys: Joi.array().items(Joi.alternatives(Joi.string(), Joi.number())).required(),
|
||||
data: Joi.object().required().unknown(),
|
||||
});
|
||||
|
||||
const { error } = updateSchema.validate(req.body);
|
||||
|
||||
if (error) {
|
||||
throw new FailedValidationException(error.details[0]);
|
||||
}
|
||||
|
||||
const primaryKeys = await service.update(req.body.data, req.body.keys);
|
||||
|
||||
try {
|
||||
const result = await service.readByKey(primaryKeys, req.sanitizedQuery);
|
||||
@@ -116,7 +164,10 @@ router.patch(
|
||||
throw new RouteNotFoundException(req.path);
|
||||
}
|
||||
|
||||
const service = new ItemsService(req.collection, { accountability: req.accountability });
|
||||
const service = new ItemsService(req.collection, {
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const primaryKey = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
|
||||
const updatedPrimaryKey = await service.update(req.body, primaryKey as any);
|
||||
@@ -137,11 +188,32 @@ router.patch(
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/:collection',
|
||||
collectionExists,
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (!req.body || Array.isArray(req.body) === false) {
|
||||
throw new InvalidPayloadException(`Body has to be an array of primary keys`);
|
||||
}
|
||||
|
||||
const service = new ItemsService(req.collection, {
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
await service.delete(req.body as PrimaryKey[]);
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/:collection/:pk',
|
||||
collectionExists,
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new ItemsService(req.collection, { accountability: req.accountability });
|
||||
const service = new ItemsService(req.collection, {
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
await service.delete(pk as any);
|
||||
return next();
|
||||
|
||||
@@ -2,9 +2,10 @@ import express from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import { PermissionsService, MetaService } from '../services';
|
||||
import { clone } from 'lodash';
|
||||
import { InvalidCredentialsException, ForbiddenException } from '../exceptions';
|
||||
import { InvalidCredentialsException, ForbiddenException, InvalidPayloadException } from '../exceptions';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
import { PrimaryKey } from '../types';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -13,7 +14,10 @@ router.use(useCollection('directus_permissions'));
|
||||
router.post(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new PermissionsService({ accountability: req.accountability });
|
||||
const service = new PermissionsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const primaryKey = await service.create(req.body);
|
||||
|
||||
try {
|
||||
@@ -34,8 +38,14 @@ router.post(
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new PermissionsService({ accountability: req.accountability });
|
||||
const metaService = new MetaService({ accountability: req.accountability });
|
||||
const service = new PermissionsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const metaService = new MetaService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const item = await service.readByQuery(req.sanitizedQuery);
|
||||
const meta = await metaService.getMetaForQuery('directus_permissions', req.sanitizedQuery);
|
||||
@@ -53,7 +63,7 @@ router.get(
|
||||
throw new InvalidCredentialsException();
|
||||
}
|
||||
|
||||
const service = new PermissionsService();
|
||||
const service = new PermissionsService({ schema: req.schema });
|
||||
const query = clone(req.sanitizedQuery || {});
|
||||
|
||||
query.filter = {
|
||||
@@ -75,7 +85,10 @@ router.get(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (req.path.endsWith('me')) return next();
|
||||
const service = new PermissionsService({ accountability: req.accountability });
|
||||
const service = new PermissionsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const primaryKey = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const record = await service.readByKey(primaryKey as any, req.sanitizedQuery);
|
||||
|
||||
@@ -88,7 +101,10 @@ router.get(
|
||||
router.patch(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new PermissionsService({ accountability: req.accountability });
|
||||
const service = new PermissionsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const primaryKey = await service.update(req.body, pk as any);
|
||||
|
||||
@@ -108,10 +124,30 @@ router.patch(
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (!req.body || Array.isArray(req.body) === false) {
|
||||
throw new InvalidPayloadException(`Body has to be an array of primary keys`);
|
||||
}
|
||||
|
||||
const service = new PermissionsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
await service.delete(req.body as PrimaryKey[]);
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new PermissionsService({ accountability: req.accountability });
|
||||
const service = new PermissionsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
await service.delete(pk as any);
|
||||
return next();
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import express from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import { PresetsService, MetaService } from '../services';
|
||||
import { ForbiddenException } from '../exceptions';
|
||||
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
import { PrimaryKey } from '../types';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -12,7 +13,10 @@ router.use(useCollection('directus_presets'));
|
||||
router.post(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new PresetsService({ accountability: req.accountability });
|
||||
const service = new PresetsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const primaryKey = await service.create(req.body);
|
||||
|
||||
try {
|
||||
@@ -34,8 +38,14 @@ router.post(
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new PresetsService({ accountability: req.accountability });
|
||||
const metaService = new MetaService({ accountability: req.accountability });
|
||||
const service = new PresetsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const metaService = new MetaService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const records = await service.readByQuery(req.sanitizedQuery);
|
||||
const meta = await metaService.getMetaForQuery('directus_presets', req.sanitizedQuery);
|
||||
@@ -49,7 +59,10 @@ router.get(
|
||||
router.get(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new PresetsService({ accountability: req.accountability });
|
||||
const service = new PresetsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const record = await service.readByKey(pk as any, req.sanitizedQuery);
|
||||
|
||||
@@ -62,7 +75,10 @@ router.get(
|
||||
router.patch(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new PresetsService({ accountability: req.accountability });
|
||||
const service = new PresetsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const primaryKey = await service.update(req.body, pk as any);
|
||||
|
||||
@@ -82,10 +98,30 @@ router.patch(
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (!req.body || Array.isArray(req.body) === false) {
|
||||
throw new InvalidPayloadException(`Body has to be an array of primary keys`);
|
||||
}
|
||||
|
||||
const service = new PresetsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
await service.delete(req.body as PrimaryKey[]);
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new PresetsService({ accountability: req.accountability });
|
||||
const service = new PresetsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
await service.delete(pk as any);
|
||||
return next();
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import express from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import { RelationsService, MetaService } from '../services';
|
||||
import { ForbiddenException } from '../exceptions';
|
||||
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
import { PrimaryKey } from '../types';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -12,7 +13,10 @@ router.use(useCollection('directus_relations'));
|
||||
router.post(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new RelationsService({ accountability: req.accountability });
|
||||
const service = new RelationsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const primaryKey = await service.create(req.body);
|
||||
|
||||
try {
|
||||
@@ -34,8 +38,14 @@ router.post(
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new RelationsService({ accountability: req.accountability });
|
||||
const metaService = new MetaService({ accountability: req.accountability });
|
||||
const service = new RelationsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const metaService = new MetaService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const records = await service.readByQuery(req.sanitizedQuery);
|
||||
const meta = await metaService.getMetaForQuery(req.collection, req.sanitizedQuery);
|
||||
@@ -49,7 +59,10 @@ router.get(
|
||||
router.get(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new RelationsService({ accountability: req.accountability });
|
||||
const service = new RelationsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const record = await service.readByKey(pk as any, req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
@@ -61,7 +74,10 @@ router.get(
|
||||
router.patch(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new RelationsService({ accountability: req.accountability });
|
||||
const service = new RelationsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const primaryKey = await service.update(req.body, pk as any);
|
||||
|
||||
@@ -81,10 +97,30 @@ router.patch(
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (!req.body || Array.isArray(req.body) === false) {
|
||||
throw new InvalidPayloadException(`Body has to be an array of primary keys`);
|
||||
}
|
||||
|
||||
const service = new RelationsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
await service.delete(req.body as PrimaryKey[]);
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new RelationsService({ accountability: req.accountability });
|
||||
const service = new RelationsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
await service.delete(pk as any);
|
||||
return next();
|
||||
|
||||
@@ -11,8 +11,14 @@ router.use(useCollection('directus_revisions'));
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new RevisionsService({ accountability: req.accountability });
|
||||
const metaService = new MetaService({ accountability: req.accountability });
|
||||
const service = new RevisionsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const metaService = new MetaService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const records = await service.readByQuery(req.sanitizedQuery);
|
||||
const meta = await metaService.getMetaForQuery('directus_revisions', req.sanitizedQuery);
|
||||
@@ -26,7 +32,10 @@ router.get(
|
||||
router.get(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new RevisionsService({ accountability: req.accountability });
|
||||
const service = new RevisionsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const record = await service.readByKey(pk as any, req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import express from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import { RolesService, MetaService } from '../services';
|
||||
import { ForbiddenException } from '../exceptions';
|
||||
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
import { PrimaryKey } from '../types';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -12,7 +13,10 @@ router.use(useCollection('directus_roles'));
|
||||
router.post(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new RolesService({ accountability: req.accountability });
|
||||
const service = new RolesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const primaryKey = await service.create(req.body);
|
||||
|
||||
try {
|
||||
@@ -34,8 +38,14 @@ router.post(
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new RolesService({ accountability: req.accountability });
|
||||
const metaService = new MetaService({ accountability: req.accountability });
|
||||
const service = new RolesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const metaService = new MetaService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const records = await service.readByQuery(req.sanitizedQuery);
|
||||
const meta = await metaService.getMetaForQuery('directus_roles', req.sanitizedQuery);
|
||||
@@ -49,7 +59,10 @@ router.get(
|
||||
router.get(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new RolesService({ accountability: req.accountability });
|
||||
const service = new RolesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const record = await service.readByKey(pk as any, req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
@@ -61,7 +74,10 @@ router.get(
|
||||
router.patch(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new RolesService({ accountability: req.accountability });
|
||||
const service = new RolesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const primaryKey = await service.update(req.body, pk as any);
|
||||
|
||||
@@ -81,10 +97,30 @@ router.patch(
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (!req.body || Array.isArray(req.body) === false) {
|
||||
throw new InvalidPayloadException(`Body has to be an array of primary keys`);
|
||||
}
|
||||
|
||||
const service = new RolesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
await service.delete(req.body as PrimaryKey[]);
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new RolesService({ accountability: req.accountability });
|
||||
const service = new RolesService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
await service.delete(pk as any);
|
||||
return next();
|
||||
|
||||
@@ -9,7 +9,10 @@ const router = Router();
|
||||
router.get(
|
||||
'/specs/oas',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new SpecificationService({ accountability: req.accountability });
|
||||
const service = new SpecificationService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
res.locals.payload = await service.oas.generate();
|
||||
return next();
|
||||
}),
|
||||
@@ -20,12 +23,15 @@ router.get('/ping', (req, res) => res.send('pong'));
|
||||
|
||||
router.get(
|
||||
'/info',
|
||||
(req, res, next) => {
|
||||
const service = new ServerService({ accountability: req.accountability });
|
||||
const data = service.serverInfo();
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new ServerService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const data = await service.serverInfo();
|
||||
res.locals.payload = { data };
|
||||
return next();
|
||||
},
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
|
||||
@@ -12,7 +12,10 @@ router.use(useCollection('directus_settings'));
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new SettingsService({ accountability: req.accountability });
|
||||
const service = new SettingsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const records = await service.readSingleton(req.sanitizedQuery);
|
||||
res.locals.payload = { data: records || null };
|
||||
return next();
|
||||
@@ -23,7 +26,10 @@ router.get(
|
||||
router.patch(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new SettingsService({ accountability: req.accountability });
|
||||
const service = new SettingsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
await service.upsertSingleton(req.body);
|
||||
|
||||
try {
|
||||
|
||||
@@ -1,15 +1,11 @@
|
||||
import express from 'express';
|
||||
import argon2 from 'argon2';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import Joi from 'joi';
|
||||
import {
|
||||
InvalidPayloadException,
|
||||
InvalidCredentialsException,
|
||||
ForbiddenException,
|
||||
} from '../exceptions';
|
||||
import { InvalidPayloadException, InvalidCredentialsException, ForbiddenException } from '../exceptions';
|
||||
import { UsersService, MetaService, AuthenticationService } from '../services';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
import { PrimaryKey } from '../types';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -18,7 +14,10 @@ router.use(useCollection('directus_users'));
|
||||
router.post(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new UsersService({ accountability: req.accountability });
|
||||
const service = new UsersService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const primaryKey = await service.create(req.body);
|
||||
|
||||
try {
|
||||
@@ -40,8 +39,14 @@ router.post(
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new UsersService({ accountability: req.accountability });
|
||||
const metaService = new MetaService({ accountability: req.accountability });
|
||||
const service = new UsersService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const metaService = new MetaService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const item = await service.readByQuery(req.sanitizedQuery);
|
||||
const meta = await metaService.getMetaForQuery('directus_users', req.sanitizedQuery);
|
||||
@@ -59,7 +64,10 @@ router.get(
|
||||
throw new InvalidCredentialsException();
|
||||
}
|
||||
|
||||
const service = new UsersService({ accountability: req.accountability });
|
||||
const service = new UsersService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
try {
|
||||
const item = await service.readByKey(req.accountability.user, req.sanitizedQuery);
|
||||
@@ -82,7 +90,10 @@ router.get(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (req.path.endsWith('me')) return next();
|
||||
const service = new UsersService({ accountability: req.accountability });
|
||||
const service = new UsersService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const items = await service.readByKey(pk as any, req.sanitizedQuery);
|
||||
res.locals.payload = { data: items || null };
|
||||
@@ -98,7 +109,10 @@ router.patch(
|
||||
throw new InvalidCredentialsException();
|
||||
}
|
||||
|
||||
const service = new UsersService({ accountability: req.accountability });
|
||||
const service = new UsersService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const primaryKey = await service.update(req.body, req.accountability.user);
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
|
||||
@@ -119,7 +133,7 @@ router.patch(
|
||||
throw new InvalidPayloadException(`"last_page" key is required.`);
|
||||
}
|
||||
|
||||
const service = new UsersService();
|
||||
const service = new UsersService({ schema: req.schema });
|
||||
await service.update({ last_page: req.body.last_page }, req.accountability.user);
|
||||
|
||||
return next();
|
||||
@@ -130,7 +144,10 @@ router.patch(
|
||||
router.patch(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new UsersService({ accountability: req.accountability });
|
||||
const service = new UsersService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const primaryKey = await service.update(req.body, pk as any);
|
||||
|
||||
@@ -150,10 +167,31 @@ router.patch(
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (!req.body || Array.isArray(req.body) === false) {
|
||||
throw new InvalidPayloadException(`Body has to be an array of primary keys`);
|
||||
}
|
||||
|
||||
const service = new UsersService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
await service.delete(req.body as PrimaryKey[]);
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new UsersService({ accountability: req.accountability });
|
||||
const service = new UsersService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
await service.delete(pk as any);
|
||||
|
||||
@@ -163,7 +201,7 @@ router.delete(
|
||||
);
|
||||
|
||||
const inviteSchema = Joi.object({
|
||||
email: Joi.string().email().required(),
|
||||
email: Joi.alternatives(Joi.string().email(), Joi.array().items(Joi.string().email())).required(),
|
||||
role: Joi.string().uuid({ version: 'uuidv4' }).required(),
|
||||
});
|
||||
|
||||
@@ -173,7 +211,10 @@ router.post(
|
||||
const { error } = inviteSchema.validate(req.body);
|
||||
if (error) throw new InvalidPayloadException(error.message);
|
||||
|
||||
const service = new UsersService({ accountability: req.accountability });
|
||||
const service = new UsersService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
await service.inviteUser(req.body.email, req.body.role);
|
||||
return next();
|
||||
}),
|
||||
@@ -190,7 +231,10 @@ router.post(
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const { error } = acceptInviteSchema.validate(req.body);
|
||||
if (error) throw new InvalidPayloadException(error.message);
|
||||
const service = new UsersService({ accountability: req.accountability });
|
||||
const service = new UsersService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
await service.acceptInvite(req.body.token, req.body.password);
|
||||
return next();
|
||||
}),
|
||||
@@ -208,9 +252,15 @@ router.post(
|
||||
throw new InvalidPayloadException(`"password" is required`);
|
||||
}
|
||||
|
||||
const service = new UsersService({ accountability: req.accountability });
|
||||
const service = new UsersService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const authService = new AuthenticationService({ accountability: req.accountability });
|
||||
const authService = new AuthenticationService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
await authService.verifyPassword(req.accountability.user, req.body.password);
|
||||
|
||||
const { url, secret } = await service.enableTFA(req.accountability.user);
|
||||
@@ -232,8 +282,14 @@ router.post(
|
||||
throw new InvalidPayloadException(`"otp" is required`);
|
||||
}
|
||||
|
||||
const service = new UsersService({ accountability: req.accountability });
|
||||
const authService = new AuthenticationService({ accountability: req.accountability });
|
||||
const service = new UsersService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const authService = new AuthenticationService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const otpValid = await authService.verifyOTP(req.accountability.user, req.body.otp);
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ router.get(
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/hash',
|
||||
'/hash/generate',
|
||||
asyncHandler(async (req, res) => {
|
||||
if (!req.body?.string) {
|
||||
throw new InvalidPayloadException(`"string" is required`);
|
||||
@@ -67,7 +67,10 @@ router.post(
|
||||
const { error } = SortSchema.validate(req.body);
|
||||
if (error) throw new InvalidPayloadException(error.message);
|
||||
|
||||
const service = new UtilsService({ accountability: req.accountability });
|
||||
const service = new UtilsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
await service.sort(req.collection, req.body);
|
||||
|
||||
return res.status(200).end();
|
||||
@@ -78,7 +81,10 @@ router.post(
|
||||
router.post(
|
||||
'/revert/:revision',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new RevisionsService({ accountability: req.accountability });
|
||||
const service = new RevisionsService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
await service.revert(req.params.revision);
|
||||
next();
|
||||
}),
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import express from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import { WebhooksService, MetaService } from '../services';
|
||||
import { ForbiddenException } from '../exceptions';
|
||||
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
import { PrimaryKey } from '../types';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -12,7 +13,10 @@ router.use(useCollection('directus_webhooks'));
|
||||
router.post(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new WebhooksService({ accountability: req.accountability });
|
||||
const service = new WebhooksService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const primaryKey = await service.create(req.body);
|
||||
|
||||
try {
|
||||
@@ -34,8 +38,14 @@ router.post(
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new WebhooksService({ accountability: req.accountability });
|
||||
const metaService = new MetaService({ accountability: req.accountability });
|
||||
const service = new WebhooksService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const metaService = new MetaService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const records = await service.readByQuery(req.sanitizedQuery);
|
||||
const meta = await metaService.getMetaForQuery(req.collection, req.sanitizedQuery);
|
||||
@@ -49,7 +59,10 @@ router.get(
|
||||
router.get(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new WebhooksService({ accountability: req.accountability });
|
||||
const service = new WebhooksService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const record = await service.readByKey(pk as any, req.sanitizedQuery);
|
||||
|
||||
@@ -62,7 +75,10 @@ router.get(
|
||||
router.patch(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new WebhooksService({ accountability: req.accountability });
|
||||
const service = new WebhooksService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const primaryKey = await service.update(req.body, pk as any);
|
||||
|
||||
@@ -82,10 +98,31 @@ router.patch(
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (!req.body || Array.isArray(req.body) === false) {
|
||||
throw new InvalidPayloadException(`Body has to be an array of primary keys`);
|
||||
}
|
||||
|
||||
const service = new WebhooksService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
await service.delete(req.body as PrimaryKey[]);
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new WebhooksService({ accountability: req.accountability });
|
||||
const service = new WebhooksService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
await service.delete(pk as any);
|
||||
|
||||
|
||||
@@ -4,36 +4,25 @@ import camelCase from 'camelcase';
|
||||
import path from 'path';
|
||||
import logger from '../logger';
|
||||
import env from '../env';
|
||||
import { performance } from 'perf_hooks';
|
||||
|
||||
import SchemaInspector from 'knex-schema-inspector';
|
||||
import SchemaInspector from '@directus/schema';
|
||||
import { getConfigFromEnv } from '../utils/get-config-from-env';
|
||||
|
||||
dotenv.config({ path: path.resolve(__dirname, '../../', '.env') });
|
||||
|
||||
const connectionConfig: Record<string, any> = {};
|
||||
|
||||
for (let [key, value] of Object.entries(env)) {
|
||||
key = key.toLowerCase();
|
||||
if (key.startsWith('db') === false) continue;
|
||||
if (key === 'db_client') continue;
|
||||
if (key === 'db_search_path') continue;
|
||||
if (key === 'db_connection_string') continue;
|
||||
|
||||
key = key.slice(3); // remove `DB_`
|
||||
|
||||
connectionConfig[camelCase(key)] = value;
|
||||
}
|
||||
const connectionConfig: Record<string, any> = getConfigFromEnv('DB_', [
|
||||
'DB_CLIENT',
|
||||
'DB_SEARCH_PATH',
|
||||
'DB_CONNECTION_STRING',
|
||||
]);
|
||||
|
||||
const knexConfig: Config = {
|
||||
client: env.DB_CLIENT,
|
||||
searchPath: env.DB_SEARCH_PATH,
|
||||
connection: env.DB_CONNECTION_STRING || connectionConfig,
|
||||
log: {
|
||||
warn: (msg) => {
|
||||
/** @note this is wild */
|
||||
if (msg === '.returning() is not supported by mysql and will not have any effect.')
|
||||
return;
|
||||
logger.warn(msg);
|
||||
},
|
||||
warn: (msg) => logger.warn(msg),
|
||||
error: (msg) => logger.error(msg),
|
||||
deprecate: (msg) => logger.info(msg),
|
||||
debug: (msg) => logger.debug(msg),
|
||||
@@ -46,9 +35,29 @@ if (env.DB_CLIENT === 'sqlite3') {
|
||||
|
||||
const database = knex(knexConfig);
|
||||
|
||||
const times: Record<string, number> = {};
|
||||
|
||||
database
|
||||
.on('query', (queryInfo) => {
|
||||
times[queryInfo.__knexUid] = performance.now();
|
||||
})
|
||||
.on('query-response', (response, queryInfo) => {
|
||||
const delta = performance.now() - times[queryInfo.__knexUid];
|
||||
logger.trace(`[${delta.toFixed(3)}ms] ${queryInfo.sql} [${queryInfo.bindings.join(', ')}]`);
|
||||
});
|
||||
|
||||
export async function hasDatabaseConnection() {
|
||||
try {
|
||||
await database.raw('select 1 + 1 as result');
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function validateDBConnection() {
|
||||
try {
|
||||
await database.raw('select 1+1 as result');
|
||||
await hasDatabaseConnection();
|
||||
} catch (error) {
|
||||
logger.fatal(`Can't connect to the database.`);
|
||||
logger.fatal(error);
|
||||
@@ -57,4 +66,12 @@ export async function validateDBConnection() {
|
||||
}
|
||||
|
||||
export const schemaInspector = SchemaInspector(database);
|
||||
|
||||
export async function isInstalled() {
|
||||
// The existence of a directus_collections table alone isn't a "proper" check to see if everything
|
||||
// is installed correctly of course, but it's safe enough to assume that this collection only
|
||||
// exists when using the installer CLI.
|
||||
return await schemaInspector.hasTable('directus_collections');
|
||||
}
|
||||
|
||||
export default database;
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
import Knex from 'knex';
|
||||
|
||||
export async function up(knex: Knex) {
|
||||
await knex.schema.alterTable('directus_fields', (table) => {
|
||||
table.dropForeign(['collection']);
|
||||
});
|
||||
|
||||
await knex.schema.alterTable('directus_activity', (table) => {
|
||||
table.dropForeign(['collection']);
|
||||
});
|
||||
|
||||
await knex.schema.alterTable('directus_permissions', (table) => {
|
||||
table.dropForeign(['collection']);
|
||||
});
|
||||
|
||||
await knex.schema.alterTable('directus_presets', (table) => {
|
||||
table.dropForeign(['collection']);
|
||||
});
|
||||
|
||||
await knex.schema.alterTable('directus_relations', (table) => {
|
||||
table.dropForeign(['one_collection']);
|
||||
table.dropForeign(['many_collection']);
|
||||
});
|
||||
|
||||
await knex.schema.alterTable('directus_revisions', (table) => {
|
||||
table.dropForeign(['collection']);
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex) {
|
||||
await knex.schema.alterTable('directus_fields', (table) => {
|
||||
table.foreign('collection').references('directus_collections.collection');
|
||||
});
|
||||
|
||||
await knex.schema.alterTable('directus_activity', (table) => {
|
||||
table.foreign('collection').references('directus_collections.collection');
|
||||
});
|
||||
|
||||
await knex.schema.alterTable('directus_permissions', (table) => {
|
||||
table.foreign('collection').references('directus_collections.collection');
|
||||
});
|
||||
|
||||
await knex.schema.alterTable('directus_presets', (table) => {
|
||||
table.foreign('collection').references('directus_collections.collection');
|
||||
});
|
||||
|
||||
await knex.schema.alterTable('directus_relations', (table) => {
|
||||
table.foreign('one_collection').references('directus_collections.collection');
|
||||
table.foreign('many_collection').references('directus_collections.collection');
|
||||
});
|
||||
|
||||
await knex.schema.alterTable('directus_revisions', (table) => {
|
||||
table.foreign('collection').references('directus_collections.collection');
|
||||
});
|
||||
}
|
||||
128
api/src/database/migrations/20201029A-remove-system-relations.ts
Normal file
128
api/src/database/migrations/20201029A-remove-system-relations.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
import Knex from 'knex';
|
||||
import { merge } from 'lodash';
|
||||
|
||||
export async function up(knex: Knex) {
|
||||
await knex('directus_relations')
|
||||
.delete()
|
||||
.where('many_collection', 'like', 'directus_%')
|
||||
.andWhere('one_collection', 'like', 'directus_%');
|
||||
}
|
||||
|
||||
export async function down(knex: Knex) {
|
||||
const defaults = {
|
||||
many_collection: 'directus_users',
|
||||
many_field: null,
|
||||
many_primary: null,
|
||||
one_collection: null,
|
||||
one_field: null,
|
||||
one_primary: null,
|
||||
junction_field: null,
|
||||
};
|
||||
|
||||
const systemRelations = [
|
||||
{
|
||||
many_collection: 'directus_users',
|
||||
many_field: 'role',
|
||||
many_primary: 'id',
|
||||
one_collection: 'directus_roles',
|
||||
one_field: 'users',
|
||||
one_primary: 'id',
|
||||
},
|
||||
{
|
||||
many_collection: 'directus_users',
|
||||
many_field: 'avatar',
|
||||
many_primary: 'id',
|
||||
one_collection: 'directus_files',
|
||||
one_primary: 'id',
|
||||
},
|
||||
{
|
||||
many_collection: 'directus_revisions',
|
||||
many_field: 'activity',
|
||||
many_primary: 'id',
|
||||
one_collection: 'directus_activity',
|
||||
one_field: 'revisions',
|
||||
one_primary: 'id',
|
||||
},
|
||||
{
|
||||
many_collection: 'directus_presets',
|
||||
many_field: 'user',
|
||||
many_primary: 'id',
|
||||
one_collection: 'directus_users',
|
||||
one_primary: 'id',
|
||||
},
|
||||
{
|
||||
many_collection: 'directus_presets',
|
||||
many_field: 'role',
|
||||
many_primary: 'id',
|
||||
one_collection: 'directus_roles',
|
||||
one_primary: 'id',
|
||||
},
|
||||
{
|
||||
many_collection: 'directus_folders',
|
||||
many_field: 'parent',
|
||||
many_primary: 'id',
|
||||
one_collection: 'directus_folders',
|
||||
one_primary: 'id',
|
||||
},
|
||||
{
|
||||
many_collection: 'directus_files',
|
||||
many_field: 'folder',
|
||||
many_primary: 'id',
|
||||
one_collection: 'directus_folders',
|
||||
one_primary: 'id',
|
||||
},
|
||||
{
|
||||
many_collection: 'directus_files',
|
||||
many_field: 'uploaded_by',
|
||||
many_primary: 'id',
|
||||
one_collection: 'directus_users',
|
||||
one_primary: 'id',
|
||||
},
|
||||
{
|
||||
many_collection: 'directus_fields',
|
||||
many_field: 'collection',
|
||||
many_primary: 'id',
|
||||
one_collection: 'directus_collections',
|
||||
one_field: 'fields',
|
||||
one_primary: 'collection',
|
||||
},
|
||||
{
|
||||
many_collection: 'directus_activity',
|
||||
many_field: 'user',
|
||||
many_primary: 'id',
|
||||
one_collection: 'directus_users',
|
||||
one_primary: 'id',
|
||||
},
|
||||
{
|
||||
many_collection: 'directus_settings',
|
||||
many_field: 'project_logo',
|
||||
many_primary: 'id',
|
||||
one_collection: 'directus_files',
|
||||
one_primary: 'id',
|
||||
},
|
||||
{
|
||||
many_collection: 'directus_settings',
|
||||
many_field: 'public_foreground',
|
||||
many_primary: 'id',
|
||||
one_collection: 'directus_files',
|
||||
one_primary: 'id',
|
||||
},
|
||||
{
|
||||
many_collection: 'directus_settings',
|
||||
many_field: 'public_background',
|
||||
many_primary: 'id',
|
||||
one_collection: 'directus_files',
|
||||
one_primary: 'id',
|
||||
},
|
||||
].map((row) => {
|
||||
for (const [key, value] of Object.entries(row)) {
|
||||
if (value !== null && (typeof value === 'object' || Array.isArray(value))) {
|
||||
(row as any)[key] = JSON.stringify(value);
|
||||
}
|
||||
}
|
||||
|
||||
return merge({}, defaults, row);
|
||||
});
|
||||
|
||||
await knex.insert(systemRelations).into('directus_relations');
|
||||
}
|
||||
@@ -0,0 +1,99 @@
|
||||
import Knex from 'knex';
|
||||
import { merge } from 'lodash';
|
||||
|
||||
export async function up(knex: Knex) {
|
||||
await knex('directus_collections').delete().where('collection', 'like', 'directus_%');
|
||||
}
|
||||
|
||||
export async function down(knex: Knex) {
|
||||
const defaults = {
|
||||
collection: null,
|
||||
hidden: false,
|
||||
singleton: false,
|
||||
icon: null,
|
||||
note: null,
|
||||
translations: null,
|
||||
display_template: null,
|
||||
};
|
||||
|
||||
const systemCollections = [
|
||||
{
|
||||
collection: 'directus_activity',
|
||||
note: 'Accountability logs for all events',
|
||||
},
|
||||
{
|
||||
collection: 'directus_collections',
|
||||
icon: 'list_alt',
|
||||
note: 'Additional collection configuration and metadata',
|
||||
},
|
||||
{
|
||||
collection: 'directus_fields',
|
||||
icon: 'input',
|
||||
note: 'Additional field configuration and metadata',
|
||||
},
|
||||
{
|
||||
collection: 'directus_files',
|
||||
icon: 'folder',
|
||||
note: 'Metadata for all managed file assets',
|
||||
},
|
||||
{
|
||||
collection: 'directus_folders',
|
||||
note: 'Provides virtual directories for files',
|
||||
},
|
||||
{
|
||||
collection: 'directus_permissions',
|
||||
icon: 'admin_panel_settings',
|
||||
note: 'Access permissions for each role',
|
||||
},
|
||||
{
|
||||
collection: 'directus_presets',
|
||||
icon: 'bookmark_border',
|
||||
note: 'Presets for collection defaults and bookmarks',
|
||||
},
|
||||
{
|
||||
collection: 'directus_relations',
|
||||
icon: 'merge_type',
|
||||
note: 'Relationship configuration and metadata',
|
||||
},
|
||||
{
|
||||
collection: 'directus_revisions',
|
||||
note: 'Data snapshots for all activity',
|
||||
},
|
||||
{
|
||||
collection: 'directus_roles',
|
||||
icon: 'supervised_user_circle',
|
||||
note: 'Permission groups for system users',
|
||||
},
|
||||
{
|
||||
collection: 'directus_sessions',
|
||||
note: 'User session information',
|
||||
},
|
||||
{
|
||||
collection: 'directus_settings',
|
||||
singleton: true,
|
||||
note: 'Project configuration options',
|
||||
},
|
||||
{
|
||||
collection: 'directus_users',
|
||||
archive_field: 'status',
|
||||
archive_value: 'archived',
|
||||
unarchive_value: 'draft',
|
||||
icon: 'people_alt',
|
||||
note: 'System users for the platform',
|
||||
},
|
||||
{
|
||||
collection: 'directus_webhooks',
|
||||
note: 'Configuration for event-based HTTP requests',
|
||||
},
|
||||
].map((row) => {
|
||||
for (const [key, value] of Object.entries(row)) {
|
||||
if (value !== null && (typeof value === 'object' || Array.isArray(value))) {
|
||||
(row as any)[key] = JSON.stringify(value);
|
||||
}
|
||||
}
|
||||
|
||||
return merge({}, defaults, row);
|
||||
});
|
||||
|
||||
await knex.insert(systemCollections).into('directus_collections');
|
||||
}
|
||||
1650
api/src/database/migrations/20201029C-remove-system-fields.ts
Normal file
1650
api/src/database/migrations/20201029C-remove-system-fields.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,152 @@
|
||||
import Knex from 'knex';
|
||||
|
||||
const updates = [
|
||||
{
|
||||
table: 'directus_fields',
|
||||
constraints: [
|
||||
{
|
||||
column: 'group',
|
||||
references: 'directus_fields.id',
|
||||
onDelete: 'SET NULL',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
table: 'directus_files',
|
||||
constraints: [
|
||||
{
|
||||
column: 'folder',
|
||||
references: 'directus_folders.id',
|
||||
onDelete: 'SET NULL',
|
||||
},
|
||||
{
|
||||
column: 'uploaded_by',
|
||||
references: 'directus_users.id',
|
||||
onDelete: 'SET NULL',
|
||||
},
|
||||
{
|
||||
column: 'modified_by',
|
||||
references: 'directus_users.id',
|
||||
onDelete: 'SET NULL',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
table: 'directus_folders',
|
||||
constraints: [
|
||||
{
|
||||
column: 'parent',
|
||||
references: 'directus_folders.id',
|
||||
onDelete: 'CASCADE',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
table: 'directus_permissions',
|
||||
constraints: [
|
||||
{
|
||||
column: 'role',
|
||||
references: 'directus_roles.id',
|
||||
onDelete: 'CASCADE',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
table: 'directus_presets',
|
||||
constraints: [
|
||||
{
|
||||
column: 'user',
|
||||
references: 'directus_users.id',
|
||||
onDelete: 'CASCADE',
|
||||
},
|
||||
{
|
||||
column: 'role',
|
||||
references: 'directus_roles.id',
|
||||
onDelete: 'CASCADE',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
table: 'directus_revisions',
|
||||
constraints: [
|
||||
{
|
||||
column: 'activity',
|
||||
references: 'directus_activity.id',
|
||||
onDelete: 'CASCADE',
|
||||
},
|
||||
{
|
||||
column: 'parent',
|
||||
references: 'directus_revisions.id',
|
||||
onDelete: 'SET NULL',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
table: 'directus_sessions',
|
||||
constraints: [
|
||||
{
|
||||
column: 'user',
|
||||
references: 'directus_users.id',
|
||||
onDelete: 'CASCADE',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
table: 'directus_settings',
|
||||
constraints: [
|
||||
{
|
||||
column: 'project_logo',
|
||||
references: 'directus_files.id',
|
||||
onDelete: 'SET NULL',
|
||||
},
|
||||
{
|
||||
column: 'public_foreground',
|
||||
references: 'directus_files.id',
|
||||
onDelete: 'SET NULL',
|
||||
},
|
||||
{
|
||||
column: 'public_background',
|
||||
references: 'directus_files.id',
|
||||
onDelete: 'SET NULL',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
table: 'directus_users',
|
||||
constraints: [
|
||||
{
|
||||
column: 'role',
|
||||
references: 'directus_roles.id',
|
||||
onDelete: 'SET NULL',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
export async function up(knex: Knex) {
|
||||
for (const update of updates) {
|
||||
await knex.schema.alterTable(update.table, (table) => {
|
||||
for (const constraint of update.constraints) {
|
||||
table.dropForeign([constraint.column]);
|
||||
|
||||
table
|
||||
.foreign(constraint.column)
|
||||
.references(constraint.references)
|
||||
.onUpdate('CASCADE')
|
||||
.onDelete(constraint.onDelete);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex) {
|
||||
for (const update of updates) {
|
||||
await knex.schema.alterTable(update.table, (table) => {
|
||||
for (const constraint of update.constraints) {
|
||||
table.dropForeign([constraint.column]);
|
||||
|
||||
table.foreign(constraint.column).references(constraint.references).onUpdate('NO ACTION').onDelete('NO ACTION');
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
import Knex from 'knex';
|
||||
|
||||
export async function up(knex: Knex) {
|
||||
await knex.schema.alterTable('directus_webhooks', (table) => {
|
||||
table.text('url').alter();
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex) {
|
||||
await knex.schema.alterTable('directus_webhooks', (table) => {
|
||||
table.string('url').alter();
|
||||
});
|
||||
}
|
||||
@@ -2,6 +2,7 @@ import fse from 'fs-extra';
|
||||
import Knex from 'knex';
|
||||
import path from 'path';
|
||||
import formatTitle from '@directus/format-title';
|
||||
import env from '../../env';
|
||||
|
||||
type Migration = {
|
||||
version: string;
|
||||
@@ -11,25 +12,34 @@ type Migration = {
|
||||
|
||||
export default async function run(database: Knex, direction: 'up' | 'down' | 'latest') {
|
||||
let migrationFiles = await fse.readdir(__dirname);
|
||||
migrationFiles = migrationFiles.filter((file: string) => file !== 'run.ts');
|
||||
|
||||
const completedMigrations = await database
|
||||
.select<Migration[]>('*')
|
||||
.from('directus_migrations')
|
||||
.orderBy('version');
|
||||
const customMigrationsPath = path.resolve(env.EXTENSIONS_PATH, 'migrations');
|
||||
const customMigrationFiles =
|
||||
((await fse.pathExists(customMigrationsPath)) && (await fse.readdir(customMigrationsPath))) || [];
|
||||
|
||||
const migrations = migrationFiles.map((migrationFile) => {
|
||||
const version = migrationFile.split('-')[0];
|
||||
const name = formatTitle(migrationFile.split('-').slice(1).join('_').split('.')[0]);
|
||||
migrationFiles = migrationFiles.filter(
|
||||
(file: string) => file.startsWith('run') === false && file.endsWith('.d.ts') === false
|
||||
);
|
||||
|
||||
const completedMigrations = await database.select<Migration[]>('*').from('directus_migrations').orderBy('version');
|
||||
|
||||
const migrations = [
|
||||
...migrationFiles.map((path) => parseFilePath(path)),
|
||||
...customMigrationFiles.map((path) => parseFilePath(path, true)),
|
||||
];
|
||||
|
||||
function parseFilePath(filePath: string, custom: boolean = false) {
|
||||
const version = filePath.split('-')[0];
|
||||
const name = formatTitle(filePath.split('-').slice(1).join('_').split('.')[0]);
|
||||
const completed = !!completedMigrations.find((migration) => migration.version === version);
|
||||
|
||||
return {
|
||||
file: migrationFile,
|
||||
file: custom ? path.join(customMigrationsPath, filePath) : path.join(__dirname, filePath),
|
||||
version,
|
||||
name,
|
||||
completed,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
if (direction === 'up') await up();
|
||||
if (direction === 'down') await down();
|
||||
@@ -52,11 +62,9 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
|
||||
throw Error('Nothing to upgrade');
|
||||
}
|
||||
|
||||
const { up } = require(path.join(__dirname, nextVersion.file));
|
||||
const { up } = require(nextVersion.file);
|
||||
await up(database);
|
||||
await database
|
||||
.insert({ version: nextVersion.version, name: nextVersion.name })
|
||||
.into('directus_migrations');
|
||||
await database.insert({ version: nextVersion.version, name: nextVersion.name }).into('directus_migrations');
|
||||
}
|
||||
|
||||
async function down() {
|
||||
@@ -66,15 +74,13 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
|
||||
throw Error('Nothing to downgrade');
|
||||
}
|
||||
|
||||
const migration = migrations.find(
|
||||
(migration) => migration.version === currentVersion.version
|
||||
);
|
||||
const migration = migrations.find((migration) => migration.version === currentVersion.version);
|
||||
|
||||
if (!migration) {
|
||||
throw new Error('Couldnt find migration');
|
||||
}
|
||||
|
||||
const { down } = require(path.join(__dirname, migration.file));
|
||||
const { down } = require(migration.file);
|
||||
await down(database);
|
||||
await database('directus_migrations').delete().where({ version: migration.version });
|
||||
}
|
||||
@@ -82,11 +88,9 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
|
||||
async function latest() {
|
||||
for (const migration of migrations) {
|
||||
if (migration.completed === false) {
|
||||
const { up } = require(path.join(__dirname, migration.file));
|
||||
const { up } = require(migration.file);
|
||||
await up(database);
|
||||
await database
|
||||
.insert({ version: migration.version, name: migration.name })
|
||||
.into('directus_migrations');
|
||||
await database.insert({ version: migration.version, name: migration.name }).into('directus_migrations');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import { AST, NestedCollectionNode, FieldNode, M2ONode, O2MNode } from '../types/ast';
|
||||
import { AST, NestedCollectionNode, FieldNode } from '../types/ast';
|
||||
import { clone, cloneDeep, uniq, pick } from 'lodash';
|
||||
import database from './index';
|
||||
import SchemaInspector from 'knex-schema-inspector';
|
||||
import { Query, Item } from '../types';
|
||||
import { Query, Item, SchemaOverview } from '../types';
|
||||
import { PayloadService } from '../services/payload';
|
||||
import applyQuery from '../utils/apply-query';
|
||||
import Knex, { QueryBuilder } from 'knex';
|
||||
@@ -16,6 +15,7 @@ type RunASTOptions = {
|
||||
|
||||
export default async function runAST(
|
||||
originalAST: AST | NestedCollectionNode,
|
||||
schema: SchemaOverview,
|
||||
options?: RunASTOptions
|
||||
): Promise<null | Item | Item[]> {
|
||||
const ast = cloneDeep(originalAST);
|
||||
@@ -26,11 +26,7 @@ export default async function runAST(
|
||||
const results: { [collection: string]: null | Item | Item[] } = {};
|
||||
|
||||
for (const collection of ast.names) {
|
||||
results[collection] = await run(
|
||||
collection,
|
||||
ast.children[collection],
|
||||
ast.query[collection]
|
||||
);
|
||||
results[collection] = await run(collection, ast.children[collection], ast.query[collection]);
|
||||
}
|
||||
|
||||
return results;
|
||||
@@ -38,27 +34,23 @@ export default async function runAST(
|
||||
return await run(ast.name, ast.children, options?.query || ast.query);
|
||||
}
|
||||
|
||||
async function run(
|
||||
collection: string,
|
||||
children: (NestedCollectionNode | FieldNode)[],
|
||||
query: Query
|
||||
) {
|
||||
async function run(collection: string, children: (NestedCollectionNode | FieldNode)[], query: Query) {
|
||||
// Retrieve the database columns to select in the current AST
|
||||
const { columnsToSelect, primaryKeyField, nestedCollectionNodes } = await parseCurrentLevel(
|
||||
collection,
|
||||
children,
|
||||
knex
|
||||
schema
|
||||
);
|
||||
|
||||
// The actual knex query builder instance. This is a promise that resolves with the raw items from the db
|
||||
const dbQuery = await getDBQuery(knex, collection, columnsToSelect, query, primaryKeyField);
|
||||
const dbQuery = await getDBQuery(knex, collection, columnsToSelect, query, primaryKeyField, schema);
|
||||
|
||||
const rawItems: Item | Item[] = await dbQuery;
|
||||
|
||||
if (!rawItems) return null;
|
||||
|
||||
// Run the items through the special transforms
|
||||
const payloadService = new PayloadService(collection, { knex });
|
||||
const payloadService = new PayloadService(collection, { knex, schema });
|
||||
let items: null | Item | Item[] = await payloadService.processValues('read', rawItems);
|
||||
|
||||
if (!items || items.length === 0) return items;
|
||||
@@ -73,15 +65,16 @@ export default async function runAST(
|
||||
// all nested items for all parent items at once. Because of this, we can't limit that query
|
||||
// to the "standard" item limit. Instead of _n_ nested items per parent item, it would mean
|
||||
// that there's _n_ items, which are then divided on the parent items. (no good)
|
||||
if (nestedNode.type === 'o2m' && typeof nestedNode.query.limit === 'number') {
|
||||
tempLimit = nestedNode.query.limit;
|
||||
if (nestedNode.type === 'o2m') {
|
||||
tempLimit = nestedNode.query.limit || 100;
|
||||
nestedNode.query.limit = -1;
|
||||
}
|
||||
|
||||
let nestedItems = await runAST(nestedNode, { knex, child: true });
|
||||
let nestedItems = await runAST(nestedNode, schema, { knex, child: true });
|
||||
|
||||
if (nestedItems) {
|
||||
// Merge all fetched nested records with the parent items
|
||||
|
||||
items = mergeWithParentItems(nestedItems, items, nestedNode, tempLimit);
|
||||
}
|
||||
}
|
||||
@@ -101,15 +94,10 @@ export default async function runAST(
|
||||
async function parseCurrentLevel(
|
||||
collection: string,
|
||||
children: (NestedCollectionNode | FieldNode)[],
|
||||
knex: Knex
|
||||
schema: SchemaOverview
|
||||
) {
|
||||
const schemaInspector = SchemaInspector(knex);
|
||||
|
||||
const primaryKeyField = await schemaInspector.primary(collection);
|
||||
|
||||
const columnsInCollection = (await schemaInspector.columns(collection)).map(
|
||||
({ column }) => column
|
||||
);
|
||||
const primaryKeyField = schema[collection].primary;
|
||||
const columnsInCollection = Object.keys(schema[collection].columns);
|
||||
|
||||
const columnsToSelect: string[] = [];
|
||||
const nestedCollectionNodes: NestedCollectionNode[] = [];
|
||||
@@ -150,7 +138,8 @@ async function getDBQuery(
|
||||
table: string,
|
||||
columns: string[],
|
||||
query: Query,
|
||||
primaryKeyField: string
|
||||
primaryKeyField: string,
|
||||
schema: SchemaOverview
|
||||
): Promise<QueryBuilder> {
|
||||
let dbQuery = knex.select(columns.map((column) => `${table}.${column}`)).from(table);
|
||||
|
||||
@@ -164,15 +153,12 @@ async function getDBQuery(
|
||||
|
||||
query.sort = query.sort || [{ column: primaryKeyField, order: 'asc' }];
|
||||
|
||||
await applyQuery(table, dbQuery, queryCopy);
|
||||
await applyQuery(knex, table, dbQuery, queryCopy, schema);
|
||||
|
||||
return dbQuery;
|
||||
}
|
||||
|
||||
function applyParentFilters(
|
||||
nestedCollectionNodes: NestedCollectionNode[],
|
||||
parentItem: Item | Item[]
|
||||
) {
|
||||
function applyParentFilters(nestedCollectionNodes: NestedCollectionNode[], parentItem: Item | Item[]) {
|
||||
const parentItems = toArray(parentItem);
|
||||
|
||||
for (const nestedNode of nestedCollectionNodes) {
|
||||
@@ -184,9 +170,7 @@ function applyParentFilters(
|
||||
filter: {
|
||||
...(nestedNode.query.filter || {}),
|
||||
[nestedNode.relation.one_primary!]: {
|
||||
_in: uniq(
|
||||
parentItems.map((res) => res[nestedNode.relation.many_field])
|
||||
).filter((id) => id),
|
||||
_in: uniq(parentItems.map((res) => res[nestedNode.relation.many_field])).filter((id) => id),
|
||||
},
|
||||
},
|
||||
};
|
||||
@@ -204,9 +188,7 @@ function applyParentFilters(
|
||||
filter: {
|
||||
...(nestedNode.query.filter || {}),
|
||||
[nestedNode.relation.many_field]: {
|
||||
_in: uniq(parentItems.map((res) => res[nestedNode.parentKey])).filter(
|
||||
(id) => id
|
||||
),
|
||||
_in: uniq(parentItems.map((res) => res[nestedNode.parentKey])).filter((id) => id),
|
||||
},
|
||||
},
|
||||
};
|
||||
@@ -252,9 +234,7 @@ function mergeWithParentItems(
|
||||
if (nestedNode.type === 'm2o') {
|
||||
for (const parentItem of parentItems) {
|
||||
const itemChild = nestedItems.find((nestedItem) => {
|
||||
return (
|
||||
nestedItem[nestedNode.relation.one_primary!] === parentItem[nestedNode.fieldKey]
|
||||
);
|
||||
return nestedItem[nestedNode.relation.one_primary!] == parentItem[nestedNode.fieldKey];
|
||||
});
|
||||
|
||||
parentItem[nestedNode.fieldKey] = itemChild || null;
|
||||
@@ -266,11 +246,9 @@ function mergeWithParentItems(
|
||||
if (Array.isArray(nestedItem[nestedNode.relation.many_field])) return true;
|
||||
|
||||
return (
|
||||
nestedItem[nestedNode.relation.many_field] ===
|
||||
parentItem[nestedNode.relation.one_primary!] ||
|
||||
nestedItem[nestedNode.relation.many_field]?.[
|
||||
nestedNode.relation.many_primary
|
||||
] === parentItem[nestedNode.relation.one_primary!]
|
||||
nestedItem[nestedNode.relation.many_field] == parentItem[nestedNode.relation.one_primary!] ||
|
||||
nestedItem[nestedNode.relation.many_field]?.[nestedNode.relation.one_primary!] ==
|
||||
parentItem[nestedNode.relation.one_primary!]
|
||||
);
|
||||
});
|
||||
|
||||
@@ -286,14 +264,9 @@ function mergeWithParentItems(
|
||||
for (const parentItem of parentItems) {
|
||||
const relatedCollection = parentItem[nestedNode.relation.one_collection_field!];
|
||||
|
||||
const itemChild = (nestedItem as Record<string, any[]>)[relatedCollection].find(
|
||||
(nestedItem) => {
|
||||
return (
|
||||
nestedItem[nestedNode.relatedKey[relatedCollection]] ===
|
||||
parentItem[nestedNode.fieldKey]
|
||||
);
|
||||
}
|
||||
);
|
||||
const itemChild = (nestedItem as Record<string, any[]>)[relatedCollection].find((nestedItem) => {
|
||||
return nestedItem[nestedNode.relatedKey[relatedCollection]] == parentItem[nestedNode.fieldKey];
|
||||
});
|
||||
|
||||
parentItem[nestedNode.fieldKey] = itemChild || null;
|
||||
}
|
||||
@@ -317,8 +290,7 @@ function removeTemporaryFields(
|
||||
|
||||
for (const relatedCollection of ast.names) {
|
||||
if (!fields[relatedCollection]) fields[relatedCollection] = [];
|
||||
if (!nestedCollectionNodes[relatedCollection])
|
||||
nestedCollectionNodes[relatedCollection] = [];
|
||||
if (!nestedCollectionNodes[relatedCollection]) nestedCollectionNodes[relatedCollection] = [];
|
||||
|
||||
for (const child of ast.children[relatedCollection]) {
|
||||
if (child.type === 'field') {
|
||||
@@ -346,10 +318,7 @@ function removeTemporaryFields(
|
||||
);
|
||||
}
|
||||
|
||||
item =
|
||||
fields[relatedCollection].length > 0
|
||||
? pick(rawItem, fields[relatedCollection])
|
||||
: rawItem[primaryKeyField];
|
||||
item = fields[relatedCollection].length > 0 ? pick(rawItem, fields[relatedCollection]) : rawItem[primaryKeyField];
|
||||
|
||||
items.push(item);
|
||||
}
|
||||
@@ -375,9 +344,7 @@ function removeTemporaryFields(
|
||||
item[nestedNode.fieldKey] = removeTemporaryFields(
|
||||
item[nestedNode.fieldKey],
|
||||
nestedNode,
|
||||
nestedNode.type === 'm2o'
|
||||
? nestedNode.relation.one_primary!
|
||||
: nestedNode.relation.many_primary,
|
||||
nestedNode.type === 'm2o' ? nestedNode.relation.one_primary! : nestedNode.relation.many_primary,
|
||||
item
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
table: directus_permissions
|
||||
|
||||
defaults:
|
||||
role: null
|
||||
collection: null
|
||||
action: null
|
||||
permissions: null
|
||||
validation: null
|
||||
presets: null
|
||||
fields: null
|
||||
limit: null
|
||||
|
||||
data:
|
||||
- collection: directus_settings
|
||||
action: read
|
||||
permissions: {}
|
||||
fields: 'project_name,project_logo,project_color,public_foreground,public_background,public_note,custom_css'
|
||||
@@ -1,84 +0,0 @@
|
||||
table: directus_presets
|
||||
|
||||
defaults:
|
||||
bookmark: null
|
||||
user: null
|
||||
role: null
|
||||
collection: null
|
||||
search: null
|
||||
filters: '[]'
|
||||
layout: tabular
|
||||
layout_query: null
|
||||
layout_options: null
|
||||
|
||||
data:
|
||||
- collection: directus_files
|
||||
layout: cards
|
||||
layout_query:
|
||||
cards:
|
||||
sort: -uploaded_on
|
||||
layout_options:
|
||||
cards:
|
||||
icon: insert_drive_file
|
||||
title: '{{ title }}'
|
||||
subtitle: '{{ type }} • {{ filesize }}'
|
||||
size: 4
|
||||
imageFit: crop
|
||||
|
||||
- collection: directus_users
|
||||
layout: cards
|
||||
layout_options:
|
||||
cards:
|
||||
icon: account_circle
|
||||
title: '{{ first_name }} {{ last_name }}'
|
||||
subtitle: '{{ email }}'
|
||||
size: 4
|
||||
|
||||
- collection: directus_activity
|
||||
layout: tabular
|
||||
layout_query:
|
||||
tabular:
|
||||
sort: -timestamp
|
||||
fields:
|
||||
- action
|
||||
- collection
|
||||
- timestamp
|
||||
- user
|
||||
layout_options:
|
||||
tabular:
|
||||
widths:
|
||||
action: 100
|
||||
collection: 210
|
||||
timestamp: 240
|
||||
user: 240
|
||||
|
||||
- collection: directus_webhooks
|
||||
layout: tabular
|
||||
layout_query:
|
||||
tabular:
|
||||
fields:
|
||||
- status
|
||||
- name
|
||||
- method
|
||||
- url
|
||||
layout_options:
|
||||
tabular:
|
||||
widths:
|
||||
status: 36
|
||||
name: 300
|
||||
|
||||
- collection: directus_roles
|
||||
layout: tabular
|
||||
layout_query:
|
||||
tabular:
|
||||
fields:
|
||||
- icon
|
||||
- name
|
||||
- description
|
||||
layout_options:
|
||||
tabular:
|
||||
widths:
|
||||
icon: 36
|
||||
name: 248
|
||||
description: 500
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
# directus_fields isn't surfaced in the app
|
||||
table: directus_fields
|
||||
|
||||
fields:
|
||||
- collection: directus_fields
|
||||
field: options
|
||||
hidden: true
|
||||
locked: true
|
||||
special: json
|
||||
- collection: directus_fields
|
||||
field: display_options
|
||||
hidden: true
|
||||
locked: true
|
||||
special: json
|
||||
- collection: directus_fields
|
||||
field: locked
|
||||
hidden: true
|
||||
locked: true
|
||||
special: boolean
|
||||
- collection: directus_fields
|
||||
field: readonly
|
||||
hidden: true
|
||||
locked: true
|
||||
special: boolean
|
||||
- collection: directus_fields
|
||||
field: hidden
|
||||
hidden: true
|
||||
locked: true
|
||||
special: boolean
|
||||
- collection: directus_fields
|
||||
field: special
|
||||
hidden: true
|
||||
locked: true
|
||||
special: csv
|
||||
- collection: directus_fields
|
||||
field: translations
|
||||
hidden: true
|
||||
locked: true
|
||||
special: json
|
||||
@@ -1,227 +0,0 @@
|
||||
table: directus_users
|
||||
|
||||
fields:
|
||||
- collection: directus_users
|
||||
field: first_name
|
||||
interface: text-input
|
||||
locked: true
|
||||
options:
|
||||
iconRight: account_circle
|
||||
sort: 1
|
||||
width: half
|
||||
- collection: directus_users
|
||||
field: last_name
|
||||
interface: text-input
|
||||
locked: true
|
||||
options:
|
||||
iconRight: account_circle
|
||||
sort: 2
|
||||
width: half
|
||||
- collection: directus_users
|
||||
field: email
|
||||
interface: text-input
|
||||
locked: true
|
||||
options:
|
||||
iconRight: email
|
||||
sort: 3
|
||||
width: half
|
||||
- collection: directus_users
|
||||
field: password
|
||||
special: hash, conceal
|
||||
interface: hash
|
||||
locked: true
|
||||
options:
|
||||
iconRight: lock
|
||||
masked: true
|
||||
sort: 4
|
||||
width: half
|
||||
- collection: directus_users
|
||||
field: avatar
|
||||
interface: file
|
||||
locked: true
|
||||
sort: 5
|
||||
width: full
|
||||
- collection: directus_users
|
||||
field: location
|
||||
interface: text-input
|
||||
options:
|
||||
iconRight: place
|
||||
sort: 6
|
||||
width: half
|
||||
- collection: directus_users
|
||||
field: title
|
||||
interface: text-input
|
||||
options:
|
||||
iconRight: work
|
||||
sort: 7
|
||||
width: half
|
||||
- collection: directus_users
|
||||
field: description
|
||||
interface: textarea
|
||||
sort: 8
|
||||
width: full
|
||||
- collection: directus_users
|
||||
field: tags
|
||||
interface: tags
|
||||
special: json
|
||||
sort: 9
|
||||
width: full
|
||||
options:
|
||||
iconRight: local_offer
|
||||
- collection: directus_users
|
||||
field: preferences_divider
|
||||
interface: divider
|
||||
options:
|
||||
icon: face
|
||||
title: User Preferences
|
||||
color: '#2F80ED'
|
||||
special: alias
|
||||
sort: 10
|
||||
width: full
|
||||
- collection: directus_users
|
||||
field: language
|
||||
interface: dropdown
|
||||
locked: true
|
||||
options:
|
||||
choices:
|
||||
- text: Afrikaans (South Africa)
|
||||
value: af-ZA
|
||||
- text: Arabic (Saudi Arabia)
|
||||
value: ar-SA
|
||||
- text: Catalan (Spain)
|
||||
value: ca-ES
|
||||
- text: Chinese (Simplified)
|
||||
value: zh-CN
|
||||
- text: Czech (Czech Republic)
|
||||
value: cs-CZ
|
||||
- text: Danish (Denmark)
|
||||
value: da-DK
|
||||
- text: Dutch (Netherlands)
|
||||
value: nl-NL
|
||||
- text: English (United States)
|
||||
value: en-US
|
||||
- text: Finnish (Finland)
|
||||
value: fi-FI
|
||||
- text: French (France)
|
||||
value: fr-FR
|
||||
- text: German (Germany)
|
||||
value: de-DE
|
||||
- text: Greek (Greece)
|
||||
value: el-GR
|
||||
- text: Hebrew (Israel)
|
||||
value: he-IL
|
||||
- text: Hungarian (Hungary)
|
||||
value: hu-HU
|
||||
- text: Icelandic (Iceland)
|
||||
value: is-IS
|
||||
- text: Indonesian (Indonesia)
|
||||
value: id-ID
|
||||
- text: Italian (Italy)
|
||||
value: it-IT
|
||||
- text: Japanese (Japan)
|
||||
value: ja-JP
|
||||
- text: Korean (Korea)
|
||||
value: ko-KR
|
||||
- text: Malay (Malaysia)
|
||||
value: ms-MY
|
||||
- text: Norwegian (Norway)
|
||||
value: no-NO
|
||||
- text: Polish (Poland)
|
||||
value: pl-PL
|
||||
- text: Portuguese (Brazil)
|
||||
value: pt-BR
|
||||
- text: Portuguese (Portugal)
|
||||
value: pt-PT
|
||||
- text: Russian (Russian Federation)
|
||||
value: ru-RU
|
||||
- text: Spanish (Spain)
|
||||
value: es-ES
|
||||
- text: Spanish (Latin America)
|
||||
value: es-419
|
||||
- text: Taiwanese Mandarin (Taiwan)
|
||||
value: zh-TW
|
||||
- text: Turkish (Turkey)
|
||||
value: tr-TR
|
||||
- text: Ukrainian (Ukraine)
|
||||
value: uk-UA
|
||||
- text: Vietnamese (Vietnam)
|
||||
value: vi-VN
|
||||
sort: 11
|
||||
width: half
|
||||
- collection: directus_users
|
||||
field: theme
|
||||
interface: dropdown
|
||||
locked: true
|
||||
options:
|
||||
choices:
|
||||
- value: auto
|
||||
text: Automatic (Based on System)
|
||||
- value: light
|
||||
text: Light Mode
|
||||
- value: dark
|
||||
text: Dark Mode
|
||||
sort: 12
|
||||
width: half
|
||||
- collection: directus_users
|
||||
field: tfa_secret
|
||||
interface: tfa-setup
|
||||
locked: true
|
||||
special: conceal
|
||||
sort: 13
|
||||
width: half
|
||||
- collection: directus_users
|
||||
field: admin_divider
|
||||
interface: divider
|
||||
locked: true
|
||||
options:
|
||||
icon: verified_user
|
||||
title: Admin Options
|
||||
color: '#F2994A'
|
||||
special: alias
|
||||
sort: 14
|
||||
width: full
|
||||
- collection: directus_users
|
||||
field: status
|
||||
interface: dropdown
|
||||
locked: true
|
||||
options:
|
||||
choices:
|
||||
- text: Draft
|
||||
value: draft
|
||||
- text: Invited
|
||||
value: invited
|
||||
- text: Active
|
||||
value: active
|
||||
- text: Suspended
|
||||
value: suspended
|
||||
- text: Archived
|
||||
value: archived
|
||||
sort: 15
|
||||
width: half
|
||||
- collection: directus_users
|
||||
field: role
|
||||
interface: many-to-one
|
||||
locked: true
|
||||
options:
|
||||
template: '{{ name }}'
|
||||
special: m2o
|
||||
sort: 16
|
||||
width: half
|
||||
- collection: directus_users
|
||||
field: token
|
||||
interface: token
|
||||
locked: true
|
||||
options:
|
||||
iconRight: vpn_key
|
||||
placeholder: Enter a secure access token...
|
||||
sort: 17
|
||||
width: full
|
||||
- collection: directus_users
|
||||
field: id
|
||||
special: uuid
|
||||
interface: text-input
|
||||
locked: true
|
||||
options:
|
||||
iconRight: vpn_key
|
||||
sort: 18
|
||||
width: full
|
||||
@@ -1,8 +0,0 @@
|
||||
table: directus_folders
|
||||
|
||||
fields:
|
||||
- collection: directus_folders
|
||||
field: id
|
||||
interface: text-input
|
||||
locked: true
|
||||
special: uuid
|
||||
@@ -1,14 +0,0 @@
|
||||
# directus_permissions isn't surfaced in the app
|
||||
table: directus_permissions
|
||||
|
||||
fields:
|
||||
- collection: directus_permissions
|
||||
field: permissions
|
||||
hidden: true
|
||||
locked: true
|
||||
special: json
|
||||
- collection: directus_permissions
|
||||
field: presets
|
||||
hidden: true
|
||||
locked: true
|
||||
special: json
|
||||
@@ -1,19 +0,0 @@
|
||||
table: directus_presets
|
||||
|
||||
fields:
|
||||
# directus_presets isn't surfaced in the app
|
||||
- collection: directus_presets
|
||||
field: filters
|
||||
hidden: true
|
||||
locked: true
|
||||
special: json
|
||||
- collection: directus_presets
|
||||
field: layout_query
|
||||
hidden: true
|
||||
locked: true
|
||||
special: json
|
||||
- collection: directus_presets
|
||||
field: layout_options
|
||||
hidden: true
|
||||
locked: true
|
||||
special: json
|
||||
@@ -1,14 +0,0 @@
|
||||
table: directus_revisions
|
||||
|
||||
fields:
|
||||
# directus_revisions isn't surfaced in the app
|
||||
- collection: directus_revisions
|
||||
field: data
|
||||
hidden: true
|
||||
locked: true
|
||||
special: json
|
||||
- collection: directus_revisions
|
||||
field: delta
|
||||
hidden: true
|
||||
locked: true
|
||||
special: json
|
||||
@@ -1,7 +0,0 @@
|
||||
table: directus_relations
|
||||
|
||||
fields:
|
||||
- collection: directus_relations
|
||||
field: one_allowed_collections
|
||||
locked: true
|
||||
special: csv
|
||||
@@ -25,33 +25,6 @@ type TableSeed = {
|
||||
};
|
||||
};
|
||||
|
||||
type RowSeed = {
|
||||
table: string;
|
||||
defaults: Record<string, any>;
|
||||
data: Record<string, any>[];
|
||||
};
|
||||
|
||||
type FieldSeed = {
|
||||
table: string;
|
||||
fields: {
|
||||
collection: string;
|
||||
field: string;
|
||||
special: string | null;
|
||||
interface: string | null;
|
||||
options: Record<string, any> | null;
|
||||
display: string | null;
|
||||
display_options: Record<string, any> | null;
|
||||
locked: boolean;
|
||||
readonly: boolean;
|
||||
hidden: boolean;
|
||||
sort: number | null;
|
||||
width: string | null;
|
||||
group: number | null;
|
||||
translations: Record<string, any> | null;
|
||||
note: string | null;
|
||||
}[];
|
||||
};
|
||||
|
||||
export default async function runSeed(database: Knex) {
|
||||
const exists = await database.schema.hasTable('directus_collections');
|
||||
|
||||
@@ -59,19 +32,13 @@ export default async function runSeed(database: Knex) {
|
||||
throw new Error('Database is already installed');
|
||||
}
|
||||
|
||||
await createTables(database);
|
||||
await insertRows(database);
|
||||
await insertFields(database);
|
||||
}
|
||||
|
||||
async function createTables(database: Knex) {
|
||||
const tableSeeds = await fse.readdir(path.resolve(__dirname, './01-tables/'));
|
||||
const tableSeeds = await fse.readdir(path.resolve(__dirname));
|
||||
|
||||
for (const tableSeedFile of tableSeeds) {
|
||||
const yamlRaw = await fse.readFile(
|
||||
path.resolve(__dirname, './01-tables', tableSeedFile),
|
||||
'utf8'
|
||||
);
|
||||
if (tableSeedFile.startsWith('run')) continue;
|
||||
|
||||
const yamlRaw = await fse.readFile(path.resolve(__dirname, tableSeedFile), 'utf8');
|
||||
|
||||
const seedData = yaml.safeLoad(yamlRaw) as TableSeed;
|
||||
|
||||
await database.schema.createTable(seedData.table, (tableBuilder) => {
|
||||
@@ -119,70 +86,9 @@ async function createTables(database: Knex) {
|
||||
}
|
||||
|
||||
if (columnInfo.references) {
|
||||
tableBuilder
|
||||
.foreign(columnName)
|
||||
.references(columnInfo.references.column)
|
||||
.inTable(columnInfo.references.table);
|
||||
column.references(columnInfo.references.column).inTable(columnInfo.references.table);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function insertRows(database: Knex) {
|
||||
const rowSeeds = await fse.readdir(path.resolve(__dirname, './02-rows/'));
|
||||
|
||||
for (const rowSeedFile of rowSeeds) {
|
||||
const yamlRaw = await fse.readFile(
|
||||
path.resolve(__dirname, './02-rows', rowSeedFile),
|
||||
'utf8'
|
||||
);
|
||||
const seedData = yaml.safeLoad(yamlRaw) as RowSeed;
|
||||
|
||||
const dataWithDefaults = seedData.data.map((row) => {
|
||||
for (const [key, value] of Object.entries(row)) {
|
||||
if (value !== null && (typeof value === 'object' || Array.isArray(value))) {
|
||||
row[key] = JSON.stringify(value);
|
||||
}
|
||||
}
|
||||
|
||||
return merge({}, seedData.defaults, row);
|
||||
});
|
||||
|
||||
await database.batchInsert(seedData.table, dataWithDefaults);
|
||||
}
|
||||
}
|
||||
|
||||
async function insertFields(database: Knex) {
|
||||
const fieldSeeds = await fse.readdir(path.resolve(__dirname, './03-fields/'));
|
||||
|
||||
const defaultsYaml = await fse.readFile(
|
||||
path.resolve(__dirname, './03-fields/_defaults.yaml'),
|
||||
'utf8'
|
||||
);
|
||||
const defaults = yaml.safeLoad(defaultsYaml) as FieldSeed;
|
||||
|
||||
for (const fieldSeedFile of fieldSeeds) {
|
||||
const yamlRaw = await fse.readFile(
|
||||
path.resolve(__dirname, './03-fields', fieldSeedFile),
|
||||
'utf8'
|
||||
);
|
||||
const seedData = yaml.safeLoad(yamlRaw) as FieldSeed;
|
||||
|
||||
if (fieldSeedFile === '_defaults.yaml') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const dataWithDefaults = seedData.fields.map((row) => {
|
||||
for (const [key, value] of Object.entries(row)) {
|
||||
if (value !== null && (typeof value === 'object' || Array.isArray(value))) {
|
||||
(row as any)[key] = JSON.stringify(value);
|
||||
}
|
||||
}
|
||||
|
||||
return merge({}, defaults, row);
|
||||
});
|
||||
|
||||
await database.batchInsert('directus_fields', dataWithDefaults);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,8 +21,12 @@ data:
|
||||
- collection: directus_files
|
||||
icon: folder
|
||||
note: Metadata for all managed file assets
|
||||
display_template: "{{ title }}"
|
||||
- collection: directus_folders
|
||||
note: Provides virtual directories for files
|
||||
display_template: "{{ name }}"
|
||||
- collection: directus_migrations
|
||||
note: What version of the database you're using
|
||||
- collection: directus_permissions
|
||||
icon: admin_panel_settings
|
||||
note: Access permissions for each role
|
||||
@@ -48,5 +52,6 @@ data:
|
||||
unarchive_value: draft
|
||||
icon: people_alt
|
||||
note: System users for the platform
|
||||
display_template: "{{ first_name }} {{ last_name }}"
|
||||
- collection: directus_webhooks
|
||||
note: Configuration for event-based HTTP requests
|
||||
9
api/src/database/system-data/collections/index.ts
Normal file
9
api/src/database/system-data/collections/index.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { requireYAML } from '../../../utils/require-yaml';
|
||||
import { merge } from 'lodash';
|
||||
import { CollectionMeta } from '../../../types';
|
||||
|
||||
const systemData = requireYAML(require.resolve('./collections.yaml'));
|
||||
|
||||
export const systemCollectionRows: CollectionMeta[] = systemData.data.map((row: Record<string, any>) => {
|
||||
return merge({ system: true }, systemData.defaults, row);
|
||||
});
|
||||
@@ -1,8 +1,13 @@
|
||||
table: directus_activity
|
||||
|
||||
fields:
|
||||
- collection: directus_activity
|
||||
field: action
|
||||
- field: id
|
||||
width: half
|
||||
|
||||
- field: item
|
||||
width: half
|
||||
|
||||
- field: action
|
||||
display: labels
|
||||
display_options:
|
||||
defaultForeground: '#263238'
|
||||
@@ -24,41 +29,47 @@ fields:
|
||||
value: authenticate
|
||||
foreground: '#9b51e0'
|
||||
background: '#e6d3f7'
|
||||
- collection: directus_activity
|
||||
field: collection
|
||||
width: half
|
||||
|
||||
- field: collection
|
||||
display: collection
|
||||
display_options:
|
||||
icon: true
|
||||
- collection: directus_activity
|
||||
field: timestamp
|
||||
width: half
|
||||
|
||||
- field: timestamp
|
||||
display: datetime
|
||||
options:
|
||||
relative: true
|
||||
- collection: directus_activity
|
||||
field: user
|
||||
width: half
|
||||
|
||||
- field: user
|
||||
display: user
|
||||
- collection: directus_activity
|
||||
field: comment
|
||||
width: half
|
||||
|
||||
- field: comment
|
||||
display: formatted-text
|
||||
display_options:
|
||||
subdued: true
|
||||
- collection: directus_activity
|
||||
field: user_agent
|
||||
width: half
|
||||
|
||||
- field: user_agent
|
||||
display: formatted-text
|
||||
display_options:
|
||||
font: monospace
|
||||
- collection: directus_activity
|
||||
field: ip
|
||||
width: half
|
||||
|
||||
- field: ip
|
||||
display: formatted-text
|
||||
display_options:
|
||||
font: monospace
|
||||
- collection: directus_activity
|
||||
field: revisions
|
||||
width: half
|
||||
|
||||
- field: revisions
|
||||
interface: one-to-many
|
||||
locked: true
|
||||
special: o2m
|
||||
options:
|
||||
fields:
|
||||
- collection
|
||||
- item
|
||||
width: full
|
||||
width: half
|
||||
@@ -1,69 +1,54 @@
|
||||
table: directus_collections
|
||||
|
||||
fields:
|
||||
- collection: directus_collections
|
||||
field: collection_divider
|
||||
- field: collection_divider
|
||||
special: alias
|
||||
interface: divider
|
||||
options:
|
||||
icon: box
|
||||
title: Collection Setup
|
||||
color: '#2F80ED'
|
||||
locked: true
|
||||
sort: 1
|
||||
width: full
|
||||
- collection: directus_collections
|
||||
field: collection
|
||||
|
||||
- field: collection
|
||||
interface: text-input
|
||||
options:
|
||||
font: monospace
|
||||
locked: true
|
||||
readonly: true
|
||||
sort: 2
|
||||
width: half
|
||||
- collection: directus_collections
|
||||
field: icon
|
||||
|
||||
- field: icon
|
||||
interface: icon
|
||||
options:
|
||||
locked: true
|
||||
sort: 3
|
||||
width: half
|
||||
- collection: directus_collections
|
||||
field: note
|
||||
|
||||
- field: note
|
||||
interface: text-input
|
||||
options:
|
||||
placeholder: A description of this collection...
|
||||
locked: true
|
||||
sort: 4
|
||||
width: full
|
||||
- collection: directus_collections
|
||||
field: display_template
|
||||
|
||||
- field: display_template
|
||||
interface: display-template
|
||||
options:
|
||||
collectionField: collection
|
||||
locked: true
|
||||
sort: 5
|
||||
width: full
|
||||
- collection: directus_collections
|
||||
field: hidden
|
||||
|
||||
- field: hidden
|
||||
special: boolean
|
||||
interface: toggle
|
||||
options:
|
||||
label: Hide within the App
|
||||
locked: true
|
||||
sort: 6
|
||||
width: half
|
||||
- collection: directus_collections
|
||||
field: singleton
|
||||
|
||||
- field: singleton
|
||||
special: boolean
|
||||
interface: toggle
|
||||
options:
|
||||
label: Treat as single object
|
||||
locked: true
|
||||
sort: 7
|
||||
width: half
|
||||
- collection: directus_collections
|
||||
field: translations
|
||||
|
||||
- field: translations
|
||||
special: json
|
||||
interface: repeater
|
||||
options:
|
||||
@@ -85,72 +70,58 @@ fields:
|
||||
width: half
|
||||
options:
|
||||
placeholder: Enter a translation...
|
||||
locked: true
|
||||
sort: 8
|
||||
width: full
|
||||
- collection: directus_collections
|
||||
field: archive_divider
|
||||
|
||||
- field: archive_divider
|
||||
special: alias
|
||||
interface: divider
|
||||
options:
|
||||
icon: archive
|
||||
title: Archive
|
||||
color: '#2F80ED'
|
||||
locked: true
|
||||
sort: 9
|
||||
width: full
|
||||
- collection: directus_collections
|
||||
field: archive_field
|
||||
|
||||
- field: archive_field
|
||||
interface: field
|
||||
options:
|
||||
collectionField: collection
|
||||
allowNone: true
|
||||
placeholder: Choose a field...
|
||||
locked: true
|
||||
sort: 10
|
||||
width: half
|
||||
- collection: directus_collections
|
||||
field: archive_app_filter
|
||||
|
||||
- field: archive_app_filter
|
||||
interface: toggle
|
||||
special: boolean
|
||||
options:
|
||||
label: Enable App Archive Filter
|
||||
locked: true
|
||||
sort: 11
|
||||
width: half
|
||||
- collection: directus_collections
|
||||
field: archive_value
|
||||
|
||||
- field: archive_value
|
||||
interface: text-input
|
||||
options:
|
||||
font: monospace
|
||||
iconRight: archive
|
||||
placeholder: Value set when archiving...
|
||||
locked: true
|
||||
sort: 12
|
||||
width: half
|
||||
- collection: directus_collections
|
||||
field: unarchive_value
|
||||
|
||||
- field: unarchive_value
|
||||
interface: text-input
|
||||
options:
|
||||
font: monospace
|
||||
iconRight: unarchive
|
||||
placeholder: Value set when unarchiving...
|
||||
locked: true
|
||||
sort: 13
|
||||
width: half
|
||||
- collection: directus_collections
|
||||
field: sort_divider
|
||||
|
||||
- field: sort_divider
|
||||
special: alias
|
||||
interface: divider
|
||||
options:
|
||||
icon: sort
|
||||
title: Sort
|
||||
color: '#2F80ED'
|
||||
locked: true
|
||||
sort: 14
|
||||
width: full
|
||||
- collection: directus_collections
|
||||
field: sort_field
|
||||
|
||||
- field: sort_field
|
||||
interface: field
|
||||
options:
|
||||
collectionField: collection
|
||||
@@ -160,6 +131,4 @@ fields:
|
||||
- decimal
|
||||
- integer
|
||||
allowNone: true
|
||||
locked: true
|
||||
sort: 15
|
||||
width: half
|
||||
81
api/src/database/system-data/fields/fields.yaml
Normal file
81
api/src/database/system-data/fields/fields.yaml
Normal file
@@ -0,0 +1,81 @@
|
||||
# directus_fields isn't surfaced in the app
|
||||
table: directus_fields
|
||||
|
||||
fields:
|
||||
- collection: directus_fields
|
||||
field: id
|
||||
width: half
|
||||
|
||||
- collection: directus_fields
|
||||
field: collection
|
||||
width: half
|
||||
|
||||
- collection: directus_fields
|
||||
field: field
|
||||
width: half
|
||||
|
||||
- collection: directus_fields
|
||||
field: special
|
||||
hidden: true
|
||||
special: csv
|
||||
width: half
|
||||
|
||||
- collection: directus_fields
|
||||
field: interface
|
||||
width: half
|
||||
|
||||
- collection: directus_fields
|
||||
field: options
|
||||
hidden: true
|
||||
special: json
|
||||
width: half
|
||||
|
||||
- collection: directus_fields
|
||||
field: display
|
||||
width: half
|
||||
|
||||
- collection: directus_fields
|
||||
field: display_options
|
||||
hidden: true
|
||||
special: json
|
||||
width: half
|
||||
|
||||
- collection: directus_fields
|
||||
field: locked
|
||||
hidden: true
|
||||
special: boolean
|
||||
width: half
|
||||
|
||||
- collection: directus_fields
|
||||
field: readonly
|
||||
hidden: true
|
||||
special: boolean
|
||||
width: half
|
||||
|
||||
- collection: directus_fields
|
||||
field: hidden
|
||||
hidden: true
|
||||
special: boolean
|
||||
width: half
|
||||
|
||||
- collection: directus_fields
|
||||
field: sort
|
||||
width: half
|
||||
|
||||
- collection: directus_fields
|
||||
field: width
|
||||
width: half
|
||||
|
||||
- collection: directus_fields
|
||||
field: group
|
||||
width: half
|
||||
|
||||
- collection: directus_fields
|
||||
field: translations
|
||||
hidden: true
|
||||
special: json
|
||||
width: half
|
||||
|
||||
- collection: directus_fields
|
||||
field: note
|
||||
width: half
|
||||
@@ -1,114 +1,117 @@
|
||||
table: directus_files
|
||||
|
||||
fields:
|
||||
- collection: directus_files
|
||||
field: id
|
||||
- field: id
|
||||
hidden: true
|
||||
interface: text-input
|
||||
locked: true
|
||||
special: uuid
|
||||
- collection: directus_files
|
||||
field: title
|
||||
|
||||
- field: title
|
||||
interface: text-input
|
||||
locked: true
|
||||
options:
|
||||
iconRight: title
|
||||
placeholder: A unique title...
|
||||
sort: 1
|
||||
width: full
|
||||
- collection: directus_files
|
||||
field: description
|
||||
|
||||
- field: description
|
||||
interface: textarea
|
||||
locked: true
|
||||
sort: 2
|
||||
width: full
|
||||
options:
|
||||
placeholder: An optional description...
|
||||
- collection: directus_files
|
||||
field: tags
|
||||
|
||||
- field: tags
|
||||
interface: tags
|
||||
locked: true
|
||||
options:
|
||||
iconRight: local_offer
|
||||
special: json
|
||||
sort: 3
|
||||
width: full
|
||||
display: tags
|
||||
- collection: directus_files
|
||||
field: location
|
||||
|
||||
- field: location
|
||||
interface: text-input
|
||||
locked: true
|
||||
options:
|
||||
iconRight: place
|
||||
placeholder: An optional location...
|
||||
sort: 4
|
||||
width: half
|
||||
- collection: directus_files
|
||||
field: storage
|
||||
|
||||
- field: storage
|
||||
interface: text-input
|
||||
locked: true
|
||||
options:
|
||||
iconRight: storage
|
||||
sort: 5
|
||||
width: half
|
||||
readonly: true
|
||||
- collection: directus_files
|
||||
field: storage_divider
|
||||
|
||||
- field: storage_divider
|
||||
interface: divider
|
||||
locked: true
|
||||
options:
|
||||
icon: insert_drive_file
|
||||
title: File Naming
|
||||
color: '#2F80ED'
|
||||
special: alias
|
||||
sort: 6
|
||||
width: full
|
||||
- collection: directus_files
|
||||
field: filename_disk
|
||||
|
||||
- field: filename_disk
|
||||
interface: text-input
|
||||
locked: true
|
||||
options:
|
||||
iconRight: publish
|
||||
placeholder: Name on disk storage...
|
||||
sort: 7
|
||||
width: half
|
||||
- collection: directus_files
|
||||
field: filename_download
|
||||
|
||||
- field: filename_download
|
||||
interface: text-input
|
||||
locked: true
|
||||
options:
|
||||
iconRight: get_app
|
||||
placeholder: Name when downloading...
|
||||
sort: 8
|
||||
width: half
|
||||
- collection: directus_files
|
||||
field: metadata
|
||||
|
||||
- field: metadata
|
||||
hidden: true
|
||||
locked: true
|
||||
special: json
|
||||
- collection: directus_files
|
||||
field: type
|
||||
|
||||
- field: type
|
||||
display: mime-type
|
||||
- collection: directus_files
|
||||
field: filesize
|
||||
|
||||
- field: filesize
|
||||
display: filesize
|
||||
- collection: directus_files
|
||||
field: modified_by
|
||||
|
||||
- field: modified_by
|
||||
interface: user
|
||||
locked: true
|
||||
special: user-updated
|
||||
width: half
|
||||
display: user
|
||||
- collection: directus_files
|
||||
field: modified_on
|
||||
|
||||
- field: modified_on
|
||||
interface: datetime
|
||||
locked: true
|
||||
special: date-updated
|
||||
width: half
|
||||
display: datetime
|
||||
- collection: directus_files
|
||||
field: created_on
|
||||
|
||||
- field: created_on
|
||||
display: datetime
|
||||
- collection: directus_files
|
||||
field: created_by
|
||||
|
||||
- field: created_by
|
||||
display: user
|
||||
|
||||
- field: embed
|
||||
width: half
|
||||
|
||||
- field: uploaded_by
|
||||
width: half
|
||||
|
||||
- field: folder
|
||||
width: half
|
||||
|
||||
- field: width
|
||||
width: half
|
||||
|
||||
- field: uploaded_on
|
||||
width: half
|
||||
|
||||
- field: height
|
||||
width: half
|
||||
|
||||
- field: charset
|
||||
width: half
|
||||
|
||||
- field: duration
|
||||
width: half
|
||||
14
api/src/database/system-data/fields/folders.yaml
Normal file
14
api/src/database/system-data/fields/folders.yaml
Normal file
@@ -0,0 +1,14 @@
|
||||
table: directus_folders
|
||||
|
||||
fields:
|
||||
- field: id
|
||||
interface: text-input
|
||||
special: uuid
|
||||
width: half
|
||||
|
||||
- field: parent
|
||||
width: half
|
||||
|
||||
- field: name
|
||||
width: full
|
||||
|
||||
25
api/src/database/system-data/fields/index.ts
Normal file
25
api/src/database/system-data/fields/index.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { requireYAML } from '../../../utils/require-yaml';
|
||||
import { merge } from 'lodash';
|
||||
import { FieldMeta } from '../../../types';
|
||||
import fse from 'fs-extra';
|
||||
import path from 'path';
|
||||
|
||||
const defaults = requireYAML(require.resolve('./_defaults.yaml'));
|
||||
const fieldData = fse.readdirSync(path.resolve(__dirname));
|
||||
|
||||
export let systemFieldRows: FieldMeta[] = [];
|
||||
|
||||
for (const filepath of fieldData) {
|
||||
if (filepath.includes('_defaults') || filepath.includes('index')) continue;
|
||||
|
||||
const systemFields = requireYAML(path.resolve(__dirname, filepath));
|
||||
|
||||
(systemFields.fields as FieldMeta[]).forEach((field, index) => {
|
||||
systemFieldRows.push(
|
||||
merge({ system: true }, defaults, field, {
|
||||
collection: systemFields.table,
|
||||
sort: index + 1,
|
||||
})
|
||||
);
|
||||
});
|
||||
}
|
||||
36
api/src/database/system-data/fields/permissions.yaml
Normal file
36
api/src/database/system-data/fields/permissions.yaml
Normal file
@@ -0,0 +1,36 @@
|
||||
# directus_permissions isn't surfaced in the app
|
||||
table: directus_permissions
|
||||
|
||||
fields:
|
||||
- field: permissions
|
||||
hidden: true
|
||||
special: json
|
||||
width: half
|
||||
|
||||
- field: presets
|
||||
hidden: true
|
||||
special: json
|
||||
width: half
|
||||
|
||||
- field: role
|
||||
width: half
|
||||
|
||||
- field: limit
|
||||
width: half
|
||||
|
||||
- field: collection
|
||||
width: half
|
||||
|
||||
- field: id
|
||||
width: half
|
||||
|
||||
- field: fields
|
||||
width: half
|
||||
special: csv
|
||||
|
||||
- field: action
|
||||
width: half
|
||||
|
||||
- field: validation
|
||||
width: half
|
||||
special: json
|
||||
35
api/src/database/system-data/fields/presets.yaml
Normal file
35
api/src/database/system-data/fields/presets.yaml
Normal file
@@ -0,0 +1,35 @@
|
||||
table: directus_presets
|
||||
|
||||
fields:
|
||||
- field: filters
|
||||
hidden: true
|
||||
special: json
|
||||
|
||||
- field: layout_query
|
||||
hidden: true
|
||||
special: json
|
||||
|
||||
- field: layout_options
|
||||
hidden: true
|
||||
special: json
|
||||
|
||||
- field: role
|
||||
width: half
|
||||
|
||||
- field: user
|
||||
width: half
|
||||
|
||||
- field: id
|
||||
width: half
|
||||
|
||||
- field: bookmark
|
||||
width: half
|
||||
|
||||
- field: search
|
||||
width: half
|
||||
|
||||
- field: collection
|
||||
width: half
|
||||
|
||||
- field: layout
|
||||
width: half
|
||||
33
api/src/database/system-data/fields/relations.yaml
Normal file
33
api/src/database/system-data/fields/relations.yaml
Normal file
@@ -0,0 +1,33 @@
|
||||
table: directus_relations
|
||||
|
||||
fields:
|
||||
- field: id
|
||||
width: half
|
||||
|
||||
- field: many_collection
|
||||
width: half
|
||||
|
||||
- field: many_field
|
||||
width: half
|
||||
|
||||
- field: many_primary
|
||||
width: half
|
||||
|
||||
- field: one_collection
|
||||
width: half
|
||||
|
||||
- field: one_field
|
||||
width: half
|
||||
|
||||
- field: one_primary
|
||||
width: half
|
||||
|
||||
- field: one_collection_field
|
||||
width: half
|
||||
|
||||
- field: one_allowed_collections
|
||||
special: csv
|
||||
width: half
|
||||
|
||||
- field: junction_field
|
||||
width: half
|
||||
25
api/src/database/system-data/fields/revisions.yaml
Normal file
25
api/src/database/system-data/fields/revisions.yaml
Normal file
@@ -0,0 +1,25 @@
|
||||
table: directus_revisions
|
||||
|
||||
fields:
|
||||
- field: id
|
||||
width: half
|
||||
|
||||
- field: activity
|
||||
width: half
|
||||
|
||||
- field: collection
|
||||
width: half
|
||||
|
||||
- field: item
|
||||
width: half
|
||||
|
||||
- field: data
|
||||
hidden: true
|
||||
special: json
|
||||
|
||||
- field: delta
|
||||
hidden: true
|
||||
special: json
|
||||
|
||||
- field: parent
|
||||
width: half
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user