Merge branch 'main' into aggregation

This commit is contained in:
rijkvanzanten
2021-08-06 16:14:29 -04:00
676 changed files with 15648 additions and 8806 deletions

View File

@@ -1,2 +1,3 @@
node_modules
dist
templates

View File

@@ -38,10 +38,9 @@ module.exports = {
parser: '@typescript-eslint/parser',
},
extends: [
'plugin:vue/vue3-essential',
'plugin:vue/vue3-recommended',
'eslint:recommended',
'plugin:@typescript-eslint/recommended',
'plugin:prettier-vue/recommended',
'prettier',
],
rules: {

View File

@@ -5,12 +5,17 @@ body:
- type: markdown
attributes:
value: Hi, thank you for taking the time to create an issue!
- type: markdown
- type: checkboxes
id: troubleshooting
attributes:
value: 'Before continuing, you must first have completed all [Troubleshooting Steps](https://docs.directus.io/getting-started/support/#troubleshooting-steps)'
- type: markdown
attributes:
value: Please confirm that an issue describing this problem doesn't exist already.
label: Preflight Checklist
options:
- label: I have completed all [Troubleshooting Steps](https://docs.directus.io/getting-started/support/#troubleshooting-steps).
required: true
- label: I'm on [the latest version of Directus](https://github.com/directus/directus/releases).
required: true
- label: There's [no other issue](https://github.com/directus/directus/issues) that already describes my problem.
required: true
- type: textarea
attributes:
label: Describe the Bug

View File

@@ -1,15 +0,0 @@
FROM docker:stable
RUN \
apk update && \
apk upgrade && \
apk add bash
COPY ./rootfs/ /
RUN \
chmod +x /usr/bin/lib/argsf && \
chmod +x /usr/bin/entrypoint && \
chmod +x /usr/bin/semver
ENTRYPOINT ["entrypoint"]

View File

@@ -1,47 +0,0 @@
name: "Build and publish Directus images"
description: "GitHub Action to publish Directus container images."
branding:
icon: archive
color: gray-dark
inputs:
repository:
description: "Repository name"
required: true
registry:
description: "Registry"
required: true
username:
description: "Registry user"
required: true
password:
description: "Registry password"
required: true
version:
description: "Version"
required: true
push:
description: "Push"
required: false
default: "false"
latest:
description: "Latest"
required: false
default: "false"
runs:
using: "docker"
image: "Dockerfile"
args:
- --registry
- ${{ inputs.registry }}
- --repository
- ${{ inputs.repository }}
- --username
- ${{ inputs.username }}
- --password
- ${{ inputs.password }}
- --version
- ${{ inputs.version }}
- --push
- ${{ inputs.push }}
- --latest
- ${{ inputs.latest }}

View File

@@ -1,13 +0,0 @@
root = true
[*]
charset = utf-8
end_of_line = lf
indent_size = 2
indent_style = space
insert_final_newline = true
tab_width = 2
trim_trailing_whitespace = true
[Makefile]
indent_style = tab

View File

@@ -1,106 +0,0 @@
# Builder image
FROM alpine:latest AS builder
ARG VERSION
ARG REPOSITORY=directus/directus
# Get runtime dependencies from optional dependencies
# defined in package.json of Directus API package
WORKDIR /directus
RUN apk add --no-cache jq \
&& wget -O directus-api-package.json "https://raw.githubusercontent.com/${REPOSITORY}/${VERSION}/api/package.json" \
&& jq '{ \
name: "directus-project", \
version: "1.0.0", \
description: "Directus Project", \
dependencies: .optionalDependencies \
}' \
directus-api-package.json > package.json
# Directus image
FROM node:16-alpine
ARG VERSION
ARG REPOSITORY=directus/directus
LABEL directus.version="${VERSION}"
LABEL org.opencontainers.image.source https://github.com/${REPOSITORY}
# Default environment variables
# (see https://docs.directus.io/reference/environment-variables/)
ENV \
PORT="8055" \
PUBLIC_URL="/" \
DB_CLIENT="sqlite3" \
DB_FILENAME="/directus/database/database.sqlite" \
RATE_LIMITER_ENABLED="false" \
RATE_LIMITER_STORE="memory" \
RATE_LIMITER_POINTS="25" \
RATE_LIMITER_DURATION="1" \
CACHE_ENABLED="false" \
STORAGE_LOCATIONS="local" \
STORAGE_LOCAL_PUBLIC_URL="/uploads" \
STORAGE_LOCAL_DRIVER="local" \
STORAGE_LOCAL_ROOT="/directus/uploads" \
ACCESS_TOKEN_TTL="15m" \
REFRESH_TOKEN_TTL="7d" \
REFRESH_TOKEN_COOKIE_SECURE="false" \
REFRESH_TOKEN_COOKIE_SAME_SITE="lax" \
OAUTH_PROVIDERS="" \
EXTENSIONS_PATH="/directus/extensions" \
EMAIL_FROM="no-reply@directus.io" \
EMAIL_TRANSPORT="sendmail" \
EMAIL_SENDMAIL_NEW_LINE="unix" \
EMAIL_SENDMAIL_PATH="/usr/sbin/sendmail"
RUN \
# Install system dependencies
# - 'bash' for entrypoint script
# - 'ssmtp' to be able to send mails
# - 'util-linux' not sure if this is required
apk upgrade --no-cache && apk add --no-cache \
bash \
ssmtp \
util-linux \
# Install global node dependencies
&& npm install -g \
yargs \
pino \
pino-colada \
# Create directory for Directus with corresponding ownership
# (can be omitted on newer Docker versions since WORKDIR below will do the same)
&& mkdir /directus && chown node:node /directus
# Switch to user 'node' and directory '/directus'
USER node
WORKDIR /directus
# Get package.json from builder image
COPY --from=builder --chown=node:node /directus/package.json .
RUN \
# Install Directus and runtime dependencies
# (retry if it fails for some reason, e.g. release not published yet)
for i in $(seq 10); do npm install "directus@${VERSION}" && break || sleep 30; done && \
npm install \
# Create data directories
&& mkdir -p \
database \
extensions/displays \
extensions/interfaces \
extensions/layouts \
extensions/modules \
uploads
# Expose data directories as volumes
VOLUME \
/directus/database \
/directus/extensions \
/directus/uploads
# Copy rootfs files
COPY ./rootfs /
EXPOSE 8055
SHELL ["/bin/bash", "-c"]
ENTRYPOINT ["entrypoint"]

View File

@@ -1,64 +0,0 @@
#!/usr/bin/env bash
set -e
function bootstrap() {
local warn=false
if [ "${KEY}" == "" ] ; then
export KEY=$(uuidgen)
warn=true
fi
if [ "${SECRET}" == "" ] ; then
export SECRET=$(node -e 'console.log(require("nanoid").nanoid(32))')
warn=true
fi
if [ "${warn}" == "true" ] ; then
print --level=warn --stdin <<WARN
>
> WARNING!
>
> The KEY and SECRET environment variables are not set. Some
> temporary variables were generated to fill the gap, but in
> production this is going to cause problems.
>
> Reference:
> https://docs.directus.io/reference/environment-variables.html
>
>
WARN
fi
# Create folder if using sqlite and file doesn't exist
if [ "${DB_CLIENT}" == "sqlite3" ] ; then
if [ "${DB_FILENAME}" == "" ] ; then
print --level=error "Missing DB_FILENAME environment variable"
exit 1
fi
if [ ! -f "${DB_FILENAME}" ] ; then
mkdir -p $(dirname ${DB_FILENAME})
fi
fi
npx directus bootstrap
}
command=""
if [ $# -eq 0 ] ; then
command="start"
elif [ "${1}" == "bash" ] || [ "${1}" == "shell" ] ; then
shift
exec bash $@
elif [ "${1}" == "command" ] ; then
shift
exec $@
else
command="${1}"
shift
fi
bootstrap
exec npx directus "${command}" $@

View File

@@ -1,48 +0,0 @@
#!/usr/bin/env node
// Workarounds?
process.env.NODE_PATH = "/usr/local/lib/node_modules";
require("module").Module._initPaths();
/**
* Read lines from stdin
*/
async function readlines() {
const chunks = [];
for await (const chunk of process.stdin) {
chunks.push(chunk);
}
const lines = chunks.join("").split("\n");
lines.pop();
return lines;
}
(async function () {
// Logger
const yargs = require("yargs");
const logger = require("pino")({
prettyPrint: process.env.LOG_STYLE !== "raw",
prettifier: require("pino-colada"),
level: process.env.LOG_LEVEL || "info",
});
function write(...message) {
if (level in logger) {
logger[level](...message);
} else {
logger.info(...message);
}
}
const args = yargs.argv;
const level = args.level || "info";
const stdin = args.stdin || false;
if (stdin) {
const lines = await readlines();
lines.forEach((line) => write(line));
} else {
write(...args._);
}
})();

View File

@@ -1,138 +0,0 @@
#!/usr/bin/env bash
set -e
root=$(dirname ${0})
source ${root}/lib/argsf
#
# Makes a set of tags
#
function make_tags() {
local prefix=""
local version=${1}
semver get major ${version} > /dev/null 2>&1
if [ "$?" != "0" ]; then
echo "${version}"
else
if [ "${version:0:1}" == "v" ]; then
prefix="v"
fi
major="$(semver get major ${version})"
minor="${major}.$(semver get minor ${version})"
patch="${minor}.$(semver get patch ${version})"
prerel="$(semver get prerel ${version})"
if [ "${prerel}" == "" ]; then
is_prerel=false
else
is_prerel=true
fi
build="$(semver get build ${version})"
if [ "${build}" == "" ]; then
is_build=false
else
is_build=true
fi
if [ "${is_prerel}" == "true" ]; then
echo "${prefix}${major}-${prerel}"
echo "${prefix}${minor}-${prerel}"
echo "${prefix}${patch}-${prerel}"
if [ "${is_build}" == "true" ]; then
echo "${prefix}${major}-${prerel}-${build}"
fi
else
echo "${prefix}${major}"
echo "${prefix}${minor}"
echo "${prefix}${patch}"
if [ "${is_build}" == "true" ]; then
echo "${prefix}${patch}-${build}"
fi
fi
fi
}
#
# Build script
#
function main() {
username=$(argument username)
password=$(argument password)
push=$(argument push "false")
latest=$(argument latest "false")
registry=$(argument registry "")
registry=$(echo "${registry}" | tr '[:upper:]' '[:lower:]')
repository=$(argument repository "directus/directus")
repository=$(echo "${repository}" | tr '[:upper:]' '[:lower:]')
version=$(argument version "")
context=$(argument context ".")
image="${repository}"
if [ "${registry}" != "" ]; then
image="${registry}/${image}"
fi
# Normalize tag
if [ "${version}" == "" ]; then
version=${GITHUB_REF##*/}
else
version=${version##*/}
fi
if [ "${version}" == "" ]; then
version=$(echo ${GITHUB_SHA:-"000000000000"} | cut -c1-12)
fi
tags=$(make_tags ${version})
echo "Tags = ${tags}"
# build image
docker build \
-t directus:main \
--build-arg VERSION=${version} \
--build-arg REPOSITORY=${repository} \
/directus/images/main
# login into registry
docker login -u "${username}" -p "${password}" "${registry}"
# Push latest
# TODO: check if it's really the latest
if [ "${latest}" == "true" ]; then
fqin="${image}:latest"
echo "Tagging ${fqin}"
docker tag directus:main ${fqin}
if [ "${push}" == "true" ]; then
echo "Pushing tag ${fqin}"
docker push "${fqin}"
fi
fi
# Push tags
for tag in $tags
do
tag=$(echo "${tag}" | tr '[:upper:]' '[:lower:]')
fqin="${image}:${tag}"
echo "Tagging ${fqin}"
docker tag directus:main "${fqin}"
if [ "${push}" == "true" ]; then
echo "Pushing tag ${fqin}"
docker push "${fqin}"
fi
done
echo "Finished."
exit $?
}
main
exit $?

View File

@@ -1,98 +0,0 @@
#
# Arguments and Flags (argsf)
# This is meant to work with bash shell
# To use, source this file into your bash scripts
#
# Implemented by João Biondo <wolfulus@gmail.com>
# https://github.com/WoLfulus/argsf
#
declare _ARGCOUNT=$#
declare _ARGDATA=("$@")
declare -A _ARGMAP
declare -A _FLAGMAP
for ((_arg_index_key=1;_arg_index_key<=$#;_arg_index_key++))
do
_arg_index_value=$(expr $_arg_index_key + 1)
_arg_key=${!_arg_index_key}
_arg_value=${!_arg_index_value}
if [[ $_arg_key == *"--"* ]]; then
if [[ $_arg_key == *" "* ]]; then
continue
fi
_arg_name="${_arg_key:2}"
_FLAGMAP[${_arg_name}]=1
if [[ $_arg_value != *"--"* ]] || [[ $_arg_value == *" "* ]] ; then
_ARGMAP[${_arg_name}]="$_arg_value"
else
_ARGMAP[${_arg_name}]=""
fi
fi
done
function _argument() {
if test "${_ARGMAP[${ARG_NAME}]+isset}" ; then
echo ${_ARGMAP[${ARG_NAME}]}
else
if [ ${ARG_DEFAULT} -eq 0 ]; then
echo "Error: required argument '--${ARG_NAME}' not specified" 1>&2
exit 1
else
echo ${ARG_DEFAULT_VALUE}
fi
fi
}
function argument() {
if [ $# -eq 1 ]; then
ARG_NAME="$1" ARG_DEFAULT=0 ARG_DEFAULT_VALUE= _argument "${_ARGUMENT_DATA}"
elif [ $# -eq 2 ]; then
ARG_NAME="$1" ARG_DEFAULT=1 ARG_DEFAULT_VALUE="$2" _argument "${_ARGUMENT_DATA}"
else
echo "argument: invalid number of arguments" 1>&2
return 1
fi
return 0
}
function flage() {
if [ $# -eq 1 ]; then
if [[ ${_FLAGMAP[$1]} ]] ; then
echo "true"
return 0
elif [[ ${_FLAGMAP[no-$1]} ]] ; then
echo "false"
return 0
else
echo "true"
return 0
fi
else
echo "flag: invalid number of arguments" 1>&2
return 1
fi
}
function flagd() {
if [ $# -eq 1 ]; then
if [[ ${_FLAGMAP[$1]} ]] ; then
echo "true"
return 0
elif [[ ${_FLAGMAP[no-$1]} ]] ; then
echo "false"
return 0
else
echo "false"
return 0
fi
else
echo "flag: invalid number of arguments" 1>&2
return 1
fi
}
function flag() {
flagd $1
return $?
}

View File

@@ -1,284 +0,0 @@
#!/usr/bin/env bash
#
# Copyright (c) 2014-2015 François Saint-Jacques <fsaintjacques@gmail.com>
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 3, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
#
set -o errexit -o nounset -o pipefail
NAT='0|[1-9][0-9]*'
ALPHANUM='[0-9]*[A-Za-z-][0-9A-Za-z-]*'
IDENT="$NAT|$ALPHANUM"
FIELD='[0-9A-Za-z-]+'
SEMVER_REGEX="\
^[vV]?\
($NAT)\\.($NAT)\\.($NAT)\
(\\-(${IDENT})(\\.(${IDENT}))*)?\
(\\+${FIELD}(\\.${FIELD})*)?$"
PROG=semver
PROG_VERSION="3.0.0"
USAGE="\
Usage:
$PROG bump (major|minor|patch|release|prerel <prerel>|build <build>) <version>
$PROG compare <version> <other_version>
$PROG get (major|minor|patch|release|prerel|build) <version>
$PROG --help
$PROG --version
Arguments:
<version> A version must match the following regular expression:
\"${SEMVER_REGEX}\"
In English:
-- The version must match X.Y.Z[-PRERELEASE][+BUILD]
where X, Y and Z are non-negative integers.
-- PRERELEASE is a dot separated sequence of non-negative integers and/or
identifiers composed of alphanumeric characters and hyphens (with
at least one non-digit). Numeric identifiers must not have leading
zeros. A hyphen (\"-\") introduces this optional part.
-- BUILD is a dot separated sequence of identifiers composed of alphanumeric
characters and hyphens. A plus (\"+\") introduces this optional part.
<other_version> See <version> definition.
<prerel> A string as defined by PRERELEASE above.
<build> A string as defined by BUILD above.
Options:
-v, --version Print the version of this tool.
-h, --help Print this help message.
Commands:
bump Bump by one of major, minor, patch; zeroing or removing
subsequent parts. \"bump prerel\" sets the PRERELEASE part and
removes any BUILD part. \"bump build\" sets the BUILD part.
\"bump release\" removes any PRERELEASE or BUILD parts.
The bumped version is written to stdout.
compare Compare <version> with <other_version>, output to stdout the
following values: -1 if <other_version> is newer, 0 if equal, 1 if
older. The BUILD part is not used in comparisons.
get Extract given part of <version>, where part is one of major, minor,
patch, prerel, build, or release.
See also:
https://semver.org -- Semantic Versioning 2.0.0"
function error {
echo -e "$1" >&2
exit 1
}
function usage-help {
error "$USAGE"
}
function usage-version {
echo -e "${PROG}: $PROG_VERSION"
exit 0
}
function validate-version {
local version=$1
if [[ "$version" =~ $SEMVER_REGEX ]]; then
# if a second argument is passed, store the result in var named by $2
if [ "$#" -eq "2" ]; then
local major=${BASH_REMATCH[1]}
local minor=${BASH_REMATCH[2]}
local patch=${BASH_REMATCH[3]}
local prere=${BASH_REMATCH[4]}
local build=${BASH_REMATCH[8]}
eval "$2=(\"$major\" \"$minor\" \"$patch\" \"$prere\" \"$build\")"
else
echo "$version"
fi
else
error "version $version does not match the semver scheme 'X.Y.Z(-PRERELEASE)(+BUILD)'. See help for more information."
fi
}
function is-nat {
[[ "$1" =~ ^($NAT)$ ]]
}
function is-null {
[ -z "$1" ]
}
function order-nat {
[ "$1" -lt "$2" ] && { echo -1 ; return ; }
[ "$1" -gt "$2" ] && { echo 1 ; return ; }
echo 0
}
function order-string {
[[ $1 < $2 ]] && { echo -1 ; return ; }
[[ $1 > $2 ]] && { echo 1 ; return ; }
echo 0
}
# given two (named) arrays containing NAT and/or ALPHANUM fields, compare them
# one by one according to semver 2.0.0 spec. Return -1, 0, 1 if left array ($1)
# is less-than, equal, or greater-than the right array ($2). The longer array
# is considered greater-than the shorter if the shorter is a prefix of the longer.
#
function compare-fields {
local l="$1[@]"
local r="$2[@]"
local leftfield=( "${!l}" )
local rightfield=( "${!r}" )
local left
local right
local i=$(( -1 ))
local order=$(( 0 ))
while true
do
[ $order -ne 0 ] && { echo $order ; return ; }
: $(( i++ ))
left="${leftfield[$i]}"
right="${rightfield[$i]}"
is-null "$left" && is-null "$right" && { echo 0 ; return ; }
is-null "$left" && { echo -1 ; return ; }
is-null "$right" && { echo 1 ; return ; }
is-nat "$left" && is-nat "$right" && { order=$(order-nat "$left" "$right") ; continue ; }
is-nat "$left" && { echo -1 ; return ; }
is-nat "$right" && { echo 1 ; return ; }
{ order=$(order-string "$left" "$right") ; continue ; }
done
}
# shellcheck disable=SC2206 # checked by "validate"; ok to expand prerel id's into array
function compare-version {
local order
validate-version "$1" V
validate-version "$2" V_
# compare major, minor, patch
local left=( "${V[0]}" "${V[1]}" "${V[2]}" )
local right=( "${V_[0]}" "${V_[1]}" "${V_[2]}" )
order=$(compare-fields left right)
[ "$order" -ne 0 ] && { echo "$order" ; return ; }
# compare pre-release ids when M.m.p are equal
local prerel="${V[3]:1}"
local prerel_="${V_[3]:1}"
local left=( ${prerel//./ } )
local right=( ${prerel_//./ } )
# if left and right have no pre-release part, then left equals right
# if only one of left/right has pre-release part, that one is less than simple M.m.p
[ -z "$prerel" ] && [ -z "$prerel_" ] && { echo 0 ; return ; }
[ -z "$prerel" ] && { echo 1 ; return ; }
[ -z "$prerel_" ] && { echo -1 ; return ; }
# otherwise, compare the pre-release id's
compare-fields left right
}
function command-bump {
local new; local version; local sub_version; local command;
case $# in
2) case $1 in
major|minor|patch|release) command=$1; version=$2;;
*) usage-help;;
esac ;;
3) case $1 in
prerel|build) command=$1; sub_version=$2 version=$3 ;;
*) usage-help;;
esac ;;
*) usage-help;;
esac
validate-version "$version" parts
# shellcheck disable=SC2154
local major="${parts[0]}"
local minor="${parts[1]}"
local patch="${parts[2]}"
local prere="${parts[3]}"
local build="${parts[4]}"
case "$command" in
major) new="$((major + 1)).0.0";;
minor) new="${major}.$((minor + 1)).0";;
patch) new="${major}.${minor}.$((patch + 1))";;
release) new="${major}.${minor}.${patch}";;
prerel) new=$(validate-version "${major}.${minor}.${patch}-${sub_version}");;
build) new=$(validate-version "${major}.${minor}.${patch}${prere}+${sub_version}");;
*) usage-help ;;
esac
echo "$new"
exit 0
}
function command-compare {
local v; local v_;
case $# in
2) v=$(validate-version "$1"); v_=$(validate-version "$2") ;;
*) usage-help ;;
esac
set +u # need unset array element to evaluate to null
compare-version "$v" "$v_"
exit 0
}
# shellcheck disable=SC2034
function command-get {
local part version
if [[ "$#" -ne "2" ]] || [[ -z "$1" ]] || [[ -z "$2" ]]; then
usage-help
exit 0
fi
part="$1"
version="$2"
validate-version "$version" parts
local major="${parts[0]}"
local minor="${parts[1]}"
local patch="${parts[2]}"
local prerel="${parts[3]:1}"
local build="${parts[4]:1}"
local release="${major}.${minor}.${patch}"
case "$part" in
major|minor|patch|release|prerel|build) echo "${!part}" ;;
*) usage-help ;;
esac
exit 0
}
case $# in
0) echo "Unknown command: $*"; usage-help;;
esac
case $1 in
--help|-h) echo -e "$USAGE"; exit 0;;
--version|-v) usage-version ;;
bump) shift; command-bump "$@";;
get) shift; command-get "$@";;
compare) shift; command-compare "$@";;
*) echo "Unknown arguments: $*"; usage-help;;
esac

View File

@@ -1,39 +0,0 @@
name: build-images
on:
release:
types:
- published
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Sleep for 30 seconds
uses: jakejarvis/wait-action@master
with:
time: '30s'
- name: Checkout
uses: actions/checkout@v2
- name: Build GitHub Container Registry
uses: ./.github/actions/build-images
with:
registry: "ghcr.io"
repository: "${{ github.repository }}"
username: "${{ secrets.REGISTRY_USERNAME }}"
password: "${{ secrets.REGISTRY_PASSWORD }}"
version: "${{ github.ref }}"
latest: "true"
push: "true"
- name: Build Docker Hub
uses: ./.github/actions/build-images
with:
registry: "docker.io"
repository: "${{ github.repository }}"
username: "${{ secrets.DOCKERHUB_USERNAME }}"
password: "${{ secrets.DOCKERHUB_PASSWORD }}"
version: "${{ github.ref }}"
latest: "true"
push: "true"

View File

@@ -1,24 +0,0 @@
name: create-release
on:
push:
tags:
- 'v*'
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Create Release
id: create_release
uses: actions/create-release@v1
env:
GITHUB_TOKEN: ${{ secrets.REPOSITORY_TOKEN }}
with:
tag_name: ${{ github.ref }}
release_name: ${{ github.ref }}
body: |
Directus ${{ github.ref }}
draft: false
prerelease: false

View File

@@ -1,4 +1,4 @@
name: Run e2e tests
name: E2E
on:
push:
branches:
@@ -10,12 +10,13 @@ jobs:
fail-fast: false
matrix:
db: ['mssql', 'mysql', 'postgres', 'maria', 'sqlite3']
node-version: ['12-alpine', '14-alpine', '16-alpine']
# node-version: ['12-alpine', '14-alpine', '16-alpine']
node-version: ['16-alpine']
env:
CACHED_IMAGE: ghcr.io/directus/directus-e2e-test-cache:${{ matrix.node-version }}
steps:
- name: Cancel Previous Runs
uses: styfle/cancel-workflow-action@0.9.0
uses: styfle/cancel-workflow-action@0.9.1
with:
access_token: ${{ secrets.GITHUB_TOKEN }}
- name: Login to GitHub Container Registry

42
.github/workflows/e2e.yml vendored Normal file
View File

@@ -0,0 +1,42 @@
name: E2E
on:
pull_request:
branches:
- main
jobs:
tests:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
db: ['postgres']
node-version: ['16-alpine']
env:
CACHED_IMAGE: ghcr.io/directus/directus-e2e-test-cache:${{ matrix.node-version }}
steps:
- name: Cancel Previous Runs
uses: styfle/cancel-workflow-action@0.9.1
with:
access_token: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: '16'
- name: restore node_modules cache
uses: actions/cache@v2
with:
path: |
node_modules
**/node_modules
key: ${{ runner.os }}-${{ hashFiles('**/package-lock.json') }}
- name: Install dependencies
run: |
npm install
- name: Build
run: |
npm run build
- name: Run tests
env:
TEST_NODE_VERSION: ${{ matrix.node-version }}
TEST_DB: ${{ matrix.db }}
run: npm run test:e2e

View File

@@ -14,7 +14,7 @@ jobs:
steps:
- name: Cancel Previous Runs
uses: styfle/cancel-workflow-action@0.9.0
uses: styfle/cancel-workflow-action@0.9.1
with:
access_token: ${{ secrets.GITHUB_TOKEN }}

148
.github/workflows/release.yml vendored Normal file
View File

@@ -0,0 +1,148 @@
name: Release
on:
push:
tags:
- 'v*'
env:
GHCR_IMAGE: ghcr.io/${{ github.repository }}
DOCKERHUB_IMAGE: ${{ github.repository }}
jobs:
create-release:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Create Release
id: create_release
uses: actions/create-release@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ github.ref }}
release_name: ${{ github.ref }}
body: |
Directus ${{ github.ref }}
draft: false
prerelease: false
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Use Node.js
uses: actions/setup-node@v2
with:
node-version: '16.x'
- uses: c-hive/gha-npm-cache@v1
- run: npm ci
- run: npm run build
- run: node docker/pack
- name: Cache build artifacts
uses: actions/cache@v2
with:
path: '**/dist'
key: build-artifacts-${{ github.sha }}
publish-npm:
runs-on: ubuntu-latest
needs: build
steps:
- uses: actions/checkout@v2
- name: Restore build artifacts
uses: actions/cache@v2
with:
path: '**/dist'
key: build-artifacts-${{ github.sha }}
- name: Use Node.js
uses: actions/setup-node@v2
with:
node-version: '16.x'
registry-url: 'https://registry.npmjs.org'
- run: npm ci
- run: npx lerna publish from-git --no-verify-access --yes
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
build-images:
runs-on: ubuntu-latest
needs: build
steps:
- uses: actions/checkout@v2
- name: Restore build artifacts
uses: actions/cache@v2
with:
path: '**/dist'
key: build-artifacts-${{ github.sha }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Docker meta
id: meta
uses: docker/metadata-action@v3
with:
images: |
${{ env.DOCKERHUB_IMAGE }}
${{ env.GHCR_IMAGE }}
# Remove this once v9 is released
flavor: |
latest=true
tags: |
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
- name: Login to DockerHub
uses: docker/login-action@v1
if: ${{ env.DOCKERHUB_IMAGE }}
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Login to GHCR
uses: docker/login-action@v1
if: ${{ env.GHCR_IMAGE }}
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v2
with:
context: .
file: './docker/Dockerfile'
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
platforms: linux/amd64,linux/arm64
push: true
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new
# Temp fix
# https://github.com/docker/build-push-action/issues/252
# https://github.com/moby/buildkit/issues/1896
- name: Move cache
run: |
rm -rf /tmp/.buildx-cache
mv /tmp/.buildx-cache-new /tmp/.buildx-cache

View File

@@ -26,32 +26,12 @@ FROM node:${NODE_VERSION}
#ENV TNS_ADMIN /usr/lib/instantclient
#ENV ORACLE_HOME /usr/lib/instantclient
RUN npm i -g lerna
WORKDIR /directus
COPY package*.json ./
COPY lerna.json ./
COPY api/package.json api/
COPY api/cli.js api/
COPY app/package.json app/
COPY docs/package.json docs/
COPY packages/create-directus-project/package.json packages/create-directus-project/
COPY packages/create-directus-project/lib/index.js packages/create-directus-project/lib/
COPY packages/drive/package.json packages/drive/
COPY packages/drive-azure/package.json packages/drive-azure/
COPY packages/drive-gcs/package.json packages/drive-gcs/
COPY packages/drive-s3/package.json packages/drive-s3/
COPY packages/format-title/package.json packages/format-title/
COPY packages/gatsby-source-directus/package.json packages/gatsby-source-directus/
COPY packages/schema/package.json packages/schema/
COPY packages/sdk/package.json packages/sdk/
COPY packages/specs/package.json packages/specs/
RUN npx lerna bootstrap
COPY . .
RUN npm install
WORKDIR /directus/api
CMD ["sh", "-c", "node ./dist/cli/index.js bootstrap; node ./dist/start.js;"]

View File

@@ -103,6 +103,7 @@ ACCESS_TOKEN_TTL="15m"
REFRESH_TOKEN_TTL="7d"
REFRESH_TOKEN_COOKIE_SECURE="false"
REFRESH_TOKEN_COOKIE_SAME_SITE="lax"
REFRESH_TOKEN_COOKIE_NAME="directus_refresh_token"
CORS_ENABLED="true"
CORS_ORIGIN="true"

View File

@@ -1,6 +1,6 @@
{
"name": "directus",
"version": "9.0.0-rc.83",
"version": "9.0.0-rc.88",
"license": "GPL-3.0-only",
"homepage": "https://github.com/directus/directus#readme",
"description": "Directus is a real-time API and App dashboard for managing SQL database content.",
@@ -55,9 +55,8 @@
"prebuild": "npm run cleanup",
"build": "tsc --build && copyfiles \"src/**/*.*\" -e \"src/**/*.ts\" -u 1 dist",
"cleanup": "rimraf dist",
"dev": "cross-env DIRECTUS_DEV=true NODE_ENV=development ts-node-dev --files --transpile-only --respawn --watch \".env\" --inspect --exit-child -- src/start.ts",
"cli": "cross-env DIRECTUS_DEV=true NODE_ENV=development ts-node --script-mode --transpile-only src/cli/index.ts",
"prepublishOnly": "npm run build"
"dev": "cross-env NODE_ENV=development SERVE_APP=false ts-node-dev --files --transpile-only --respawn --watch \".env\" --inspect --exit-child -- src/start.ts",
"cli": "cross-env NODE_ENV=development SERVE_APP=false ts-node --script-mode --transpile-only src/cli/index.ts"
},
"engines": {
"node": ">=12.20.0"
@@ -69,15 +68,15 @@
"example.env"
],
"dependencies": {
"@directus/app": "9.0.0-rc.83",
"@directus/drive": "9.0.0-rc.83",
"@directus/drive-azure": "9.0.0-rc.83",
"@directus/drive-gcs": "9.0.0-rc.83",
"@directus/drive-s3": "9.0.0-rc.83",
"@directus/format-title": "9.0.0-rc.83",
"@directus/schema": "9.0.0-rc.83",
"@directus/shared": "9.0.0-rc.83",
"@directus/specs": "9.0.0-rc.83",
"@directus/app": "9.0.0-rc.88",
"@directus/drive": "9.0.0-rc.88",
"@directus/drive-azure": "9.0.0-rc.88",
"@directus/drive-gcs": "9.0.0-rc.88",
"@directus/drive-s3": "9.0.0-rc.88",
"@directus/format-title": "9.0.0-rc.88",
"@directus/schema": "9.0.0-rc.88",
"@directus/shared": "9.0.0-rc.88",
"@directus/specs": "9.0.0-rc.88",
"@godaddy/terminus": "^4.9.0",
"@rollup/plugin-alias": "^3.1.2",
"@rollup/plugin-virtual": "^2.0.3",
@@ -99,14 +98,13 @@
"dotenv": "^10.0.0",
"eventemitter2": "^6.4.3",
"execa": "^5.1.1",
"exif-reader": "^1.0.3",
"exifr": "^7.1.2",
"express": "^4.17.1",
"express-session": "^1.17.2",
"fs-extra": "^10.0.0",
"grant": "^5.4.14",
"graphql": "^15.5.0",
"graphql-compose": "^9.0.1",
"icc": "^2.0.0",
"inquirer": "^8.1.1",
"joi": "^17.3.0",
"js-yaml": "^4.1.0",
@@ -115,7 +113,7 @@
"jsonwebtoken": "^8.5.1",
"keyv": "^4.0.3",
"knex": "^0.95.6",
"knex-schema-inspector": "^1.5.7",
"knex-schema-inspector": "1.5.13",
"liquidjs": "^9.25.0",
"lodash": "^4.17.21",
"macos-release": "^2.4.1",
@@ -125,12 +123,13 @@
"node-cron": "^3.0.0",
"node-machine-id": "^1.1.12",
"nodemailer": "^6.6.1",
"object-hash": "^2.2.0",
"openapi3-ts": "^2.0.0",
"ora": "^5.4.0",
"otplib": "^12.0.1",
"pino": "^6.11.3",
"pino": "6.13.0",
"pino-colada": "^2.1.0",
"pino-http": "^5.5.0",
"pino-http": "5.6.0",
"prettier": "^2.3.1",
"qs": "^6.9.4",
"rate-limiter-flexible": "^2.2.2",
@@ -138,6 +137,7 @@
"rollup": "^2.52.1",
"sharp": "^0.28.3",
"stream-json": "^1.7.1",
"update-check": "^1.5.4",
"uuid": "^8.3.2",
"uuid-validate": "0.0.3"
},
@@ -151,43 +151,43 @@
"memcached": "^2.2.2",
"mysql": "^2.18.1",
"nodemailer-mailgun-transport": "^2.1.3",
"oracledb": "^5.0.0",
"pg": "^8.6.0",
"sqlite3": "^5.0.2",
"tedious": "^11.0.8"
},
"gitHead": "24621f3934dc77eb23441331040ed13c676ceffd",
"devDependencies": {
"@types/async": "3.2.6",
"@types/async": "3.2.7",
"@types/atob": "2.1.2",
"@types/body-parser": "1.19.0",
"@types/busboy": "0.2.3",
"@types/body-parser": "1.19.1",
"@types/busboy": "0.2.4",
"@types/cookie-parser": "1.4.2",
"@types/cors": "2.8.10",
"@types/cors": "2.8.12",
"@types/destroy": "1.0.0",
"@types/express": "4.17.12",
"@types/express": "4.17.13",
"@types/express-pino-logger": "4.0.2",
"@types/express-session": "1.17.3",
"@types/fs-extra": "9.0.11",
"@types/inquirer": "7.3.2",
"@types/js-yaml": "4.0.1",
"@types/json2csv": "5.0.2",
"@types/jsonwebtoken": "8.5.2",
"@types/keyv": "3.1.1",
"@types/lodash": "4.14.170",
"@types/express-session": "1.17.4",
"@types/fs-extra": "9.0.12",
"@types/inquirer": "7.3.3",
"@types/js-yaml": "4.0.2",
"@types/json2csv": "5.0.3",
"@types/jsonwebtoken": "8.5.4",
"@types/keyv": "3.1.2",
"@types/lodash": "4.14.172",
"@types/mime-types": "2.1.0",
"@types/ms": "0.7.31",
"@types/node": "15.12.2",
"@types/node-cron": "2.0.3",
"@types/nodemailer": "6.4.2",
"@types/qs": "6.9.6",
"@types/sharp": "0.28.3",
"@types/stream-json": "1.7.0",
"@types/uuid": "8.3.0",
"@types/node-cron": "2.0.4",
"@types/nodemailer": "6.4.4",
"@types/object-hash": "2.1.1",
"@types/qs": "6.9.7",
"@types/sharp": "0.28.5",
"@types/stream-json": "1.7.1",
"@types/uuid": "8.3.1",
"@types/uuid-validate": "0.0.1",
"copyfiles": "2.4.1",
"cross-env": "7.0.3",
"ts-node-dev": "1.1.7",
"typescript": "4.3.4"
"ts-node-dev": "1.1.8",
"typescript": "4.3.5"
}
}

View File

@@ -24,7 +24,7 @@ import settingsRouter from './controllers/settings';
import usersRouter from './controllers/users';
import utilsRouter from './controllers/utils';
import webhooksRouter from './controllers/webhooks';
import { isInstalled, validateDBConnection } from './database';
import { isInstalled, validateDBConnection, validateMigrations } from './database';
import { emitAsyncSafe } from './emitter';
import env from './env';
import { InvalidPayloadException } from './exceptions';
@@ -47,6 +47,12 @@ import { session } from './middleware/session';
export default async function createApp(): Promise<express.Application> {
validateEnv(['KEY', 'SECRET']);
try {
new URL(env.PUBLIC_URL);
} catch {
logger.warn('PUBLIC_URL is not a valid URL');
}
await validateDBConnection();
if ((await isInstalled()) === false) {
@@ -54,6 +60,10 @@ export default async function createApp(): Promise<express.Application> {
process.exit(1);
}
if ((await validateMigrations()) === false) {
logger.warn(`Database migrations have not all been run`);
}
await initializeExtensions();
registerExtensionHooks();
@@ -99,7 +109,15 @@ export default async function createApp(): Promise<express.Application> {
app.use(cors);
}
if (!('DIRECTUS_DEV' in process.env)) {
app.get('/', (req, res, next) => {
if (env.ROOT_REDIRECT) {
res.redirect(env.ROOT_REDIRECT);
} else {
next();
}
});
if (env.SERVE_APP) {
const adminPath = require.resolve('@directus/app/dist/index.html');
const publicUrl = env.PUBLIC_URL.endsWith('/') ? env.PUBLIC_URL : env.PUBLIC_URL + '/';
@@ -107,14 +125,6 @@ export default async function createApp(): Promise<express.Application> {
let html = fse.readFileSync(adminPath, 'utf-8');
html = html.replace(/<meta charset="utf-8" \/>/, `<meta charset="utf-8" />\n\t\t<base href="${publicUrl}admin/">`);
app.get('/', (req, res, next) => {
if (env.ROOT_REDIRECT) {
res.redirect(env.ROOT_REDIRECT);
} else {
next();
}
});
app.get('/admin', (req, res) => res.send(html));
app.use('/admin', express.static(path.join(adminPath, '..')));
app.use('/admin/*', (req, res) => {

View File

@@ -12,12 +12,12 @@ export function getCache(): { cache: Keyv | null; schemaCache: Keyv | null } {
if (env.CACHE_ENABLED === true && cache === null) {
validateEnv(['CACHE_NAMESPACE', 'CACHE_TTL', 'CACHE_STORE']);
cache = getKeyvInstance(ms(env.CACHE_TTL as string));
cache.on('error', (err) => logger.error(err));
cache.on('error', (err) => logger.warn(err, `[cache] ${err}`));
}
if (env.CACHE_SCHEMA !== false && schemaCache === null) {
schemaCache = getKeyvInstance(typeof env.CACHE_SCHEMA === 'string' ? ms(env.CACHE_SCHEMA) : undefined);
schemaCache.on('error', (err) => logger.error(err));
schemaCache.on('error', (err) => logger.warn(err, `[cache] ${err}`));
}
return { cache, schemaCache };
@@ -43,7 +43,11 @@ function getConfig(store: 'memory' | 'redis' | 'memcache' = 'memory', ttl: numbe
if (store === 'redis') {
const KeyvRedis = require('@keyv/redis');
config.store = new KeyvRedis(env.CACHE_REDIS || getConfigFromEnv('CACHE_REDIS_'));
config.store = new KeyvRedis(env.CACHE_REDIS || getConfigFromEnv('CACHE_REDIS_'), {
commandTimeout: 500,
retryStrategy: false,
});
}
if (store === 'memcache') {

View File

@@ -1,3 +1,4 @@
import { Knex } from 'knex';
import { nanoid } from 'nanoid';
import runMigrations from '../../../database/migrations/run';
import installDatabase from '../../../database/seeds/run';
@@ -5,19 +6,16 @@ import env from '../../../env';
import logger from '../../../logger';
import { getSchema } from '../../../utils/get-schema';
import { RolesService, UsersService, SettingsService } from '../../../services';
import getDatabase, { isInstalled, hasDatabaseConnection } from '../../../database';
import getDatabase, { isInstalled, validateDBConnection, hasDatabaseConnection } from '../../../database';
import { SchemaOverview } from '../../../types';
export default async function bootstrap({ skipAdminInit }: { skipAdminInit?: boolean }): Promise<void> {
logger.info('Initializing bootstrap...');
if ((await isDatabaseAvailable()) === false) {
logger.error(`Can't connect to the database`);
process.exit(1);
}
const database = getDatabase();
await waitForDatabase(database);
if ((await isInstalled()) === false) {
logger.info('Installing Directus system tables...');
@@ -48,19 +46,20 @@ export default async function bootstrap({ skipAdminInit }: { skipAdminInit?: boo
process.exit(0);
}
async function isDatabaseAvailable() {
async function waitForDatabase(database: Knex) {
const tries = 5;
const secondsBetweenTries = 5;
for (let i = 0; i < tries; i++) {
if (await hasDatabaseConnection()) {
if (await hasDatabaseConnection(database)) {
return true;
}
await new Promise((resolve) => setTimeout(resolve, secondsBetweenTries * 1000));
}
return false;
// This will throw and exit the process if the database is not available
await validateDBConnection(database);
}
async function createDefaultAdmin(schema: SchemaOverview) {

View File

@@ -50,6 +50,13 @@ const password = (): Record<string, string> => ({
mask: '*',
});
const encrypt = (): Record<string, string | boolean> => ({
type: 'confirm',
name: 'options__encrypt',
message: 'Encrypt Connection:',
default: false,
});
const ssl = (): Record<string, string | boolean> => ({
type: 'confirm',
name: 'ssl',
@@ -62,5 +69,5 @@ export const databaseQuestions = {
mysql: [host, port, database, user, password],
pg: [host, port, database, user, password, ssl],
oracledb: [host, port, database, user, password],
mssql: [host, port, database, user, password],
mssql: [host, port, database, user, password, encrypt],
};

View File

@@ -9,6 +9,7 @@ export type Credentials = {
user?: string;
password?: string;
ssl?: boolean;
options__encrypt?: boolean;
};
export default function createDBConnection(
client: 'sqlite3' | 'mysql' | 'pg' | 'oracledb' | 'mssql',
@@ -23,26 +24,26 @@ export default function createDBConnection(
filename: filename as string,
};
} else {
if (client !== 'pg') {
const { host, port, database, user, password } = credentials as Credentials;
const { host, port, database, user, password } = credentials as Credentials;
connection = {
host: host,
port: Number(port),
database: database,
user: user,
password: password,
};
} else {
const { host, port, database, user, password, ssl } = credentials as Credentials;
connection = {
host: host,
port: Number(port),
database: database,
user: user,
password: password,
};
connection = {
host: host,
port: Number(port),
database: database,
user: user,
password: password,
ssl: ssl,
if (client === 'pg') {
const { ssl } = credentials as Credentials;
connection['ssl'] = ssl;
}
if (client === 'mssql') {
const { options__encrypt } = credentials as Credentials;
(connection as Knex.MsSqlConnectionConfig)['options'] = {
encrypt: options__encrypt,
};
}
}

View File

@@ -38,6 +38,7 @@ ACCESS_TOKEN_TTL="15m"
REFRESH_TOKEN_TTL="7d"
REFRESH_TOKEN_COOKIE_SECURE=false
REFRESH_TOKEN_COOKIE_SAME_SITE="lax"
REFRESH_TOKEN_COOKIE_NAME="directus_refresh_token"
####################################################################################################
## SSO (OAuth) Providers

View File

@@ -1,42 +1,42 @@
import { Transformation } from './types';
import { TransformationParams } from './types';
export const SYSTEM_ASSET_ALLOW_LIST: Transformation[] = [
export const SYSTEM_ASSET_ALLOW_LIST: TransformationParams[] = [
{
key: 'system-small-cover',
width: 64,
height: 64,
fit: 'cover',
transforms: [['resize', { width: 64, height: 64, fit: 'cover' }]],
},
{
key: 'system-small-contain',
width: 64,
fit: 'contain',
transforms: [['resize', { width: 64, fit: 'contain' }]],
},
{
key: 'system-medium-cover',
width: 300,
height: 300,
fit: 'cover',
transforms: [['resize', { width: 300, height: 300, fit: 'cover' }]],
},
{
key: 'system-medium-contain',
width: 300,
fit: 'contain',
transforms: [['resize', { width: 300, fit: 'contain' }]],
},
{
key: 'system-large-cover',
width: 800,
height: 600,
fit: 'cover',
transforms: [['resize', { width: 800, height: 800, fit: 'cover' }]],
},
{
key: 'system-large-contain',
width: 800,
fit: 'contain',
transforms: [['resize', { width: 800, fit: 'contain' }]],
},
];
export const ASSET_TRANSFORM_QUERY_KEYS = ['key', 'width', 'height', 'fit', 'withoutEnlargement', 'quality'];
export const ASSET_TRANSFORM_QUERY_KEYS = [
'key',
'transforms',
'width',
'height',
'format',
'fit',
'quality',
'withoutEnlargement',
];
export const FILTER_VARIABLES = ['$NOW', '$CURRENT_USER', '$CURRENT_ROLE'];

View File

@@ -10,7 +10,7 @@ import { ForbiddenException, InvalidQueryException, RangeNotSatisfiableException
import useCollection from '../middleware/use-collection';
import { AssetsService, PayloadService } from '../services';
import storage from '../storage';
import { Transformation } from '../types/assets';
import { TransformationParams, TransformationMethods, TransformationPreset } from '../types/assets';
import asyncHandler from '../utils/async-handler';
const router = Router();
@@ -68,26 +68,63 @@ router.get(
if ('key' in transformation && Object.keys(transformation).length > 1) {
throw new InvalidQueryException(`You can't combine the "key" query parameter with any other transformation.`);
}
if ('quality' in transformation && (Number(transformation.quality) < 1 || Number(transformation.quality) > 100)) {
throw new InvalidQueryException(`"quality" Parameter has to between 1 to 100`);
if ('transforms' in transformation) {
let transforms: unknown;
// Try parse the JSON array
try {
transforms = JSON.parse(transformation['transforms'] as string);
} catch {
throw new InvalidQueryException(`"transforms" Parameter needs to be a JSON array of allowed transformations.`);
}
// Check if it is actually an array.
if (!Array.isArray(transforms)) {
throw new InvalidQueryException(`"transforms" Parameter needs to be a JSON array of allowed transformations.`);
}
// Check against ASSETS_TRANSFORM_MAX_OPERATIONS
if (transforms.length > Number(env.ASSETS_TRANSFORM_MAX_OPERATIONS)) {
throw new InvalidQueryException(
`"transforms" Parameter is only allowed ${env.ASSETS_TRANSFORM_MAX_OPERATIONS} transformations.`
);
}
// Check the transformations are valid
transforms.forEach((transform) => {
const name = transform[0];
if (!TransformationMethods.includes(name)) {
throw new InvalidQueryException(`"transforms" Parameter does not allow "${name}" as a transformation.`);
}
});
transformation.transforms = transforms;
}
const systemKeys = SYSTEM_ASSET_ALLOW_LIST.map((transformation) => transformation.key);
const systemKeys = SYSTEM_ASSET_ALLOW_LIST.map((transformation) => transformation.key!);
const allKeys: string[] = [
...systemKeys,
...(assetSettings.storage_asset_presets || []).map((transformation: Transformation) => transformation.key),
...(assetSettings.storage_asset_presets || []).map((transformation: TransformationParams) => transformation.key),
];
// For use in the next request handler
res.locals.shortcuts = [...SYSTEM_ASSET_ALLOW_LIST, ...(assetSettings.storage_asset_presets || [])];
res.locals.transformation = transformation;
if (Object.keys(transformation).length === 0) {
if (
Object.keys(transformation).length === 0 ||
('transforms' in transformation && transformation.transforms!.length === 0)
) {
return next();
}
if (assetSettings.storage_asset_transform === 'all') {
if (transformation.key && allKeys.includes(transformation.key as string) === false)
if (transformation.key && allKeys.includes(transformation.key as string) === false) {
throw new InvalidQueryException(`Key "${transformation.key}" isn't configured.`);
}
return next();
} else if (assetSettings.storage_asset_transform === 'presets') {
if (allKeys.includes(transformation.key as string)) return next();
@@ -107,9 +144,9 @@ router.get(
schema: req.schema,
});
const transformation: Transformation = res.locals.transformation.key
? res.locals.shortcuts.find(
(transformation: Transformation) => transformation.key === res.locals.transformation.key
const transformation: TransformationParams | TransformationPreset = res.locals.transformation.key
? (res.locals.shortcuts as TransformationPreset[]).find(
(transformation) => transformation.key === res.locals.transformation.key
)
: res.locals.transformation;

View File

@@ -11,7 +11,8 @@ import { respond } from '../middleware/respond';
import { AuthenticationService, UsersService } from '../services';
import asyncHandler from '../utils/async-handler';
import getEmailFromProfile from '../utils/get-email-from-profile';
import { toArray } from '../utils/to-array';
import { toArray } from '@directus/shared/utils';
import logger from '../logger';
const router = Router();
@@ -59,7 +60,7 @@ router.post(
}
if (mode === 'cookie') {
res.cookie('directus_refresh_token', refreshToken, {
res.cookie(env.REFRESH_TOKEN_COOKIE_NAME, refreshToken, {
httpOnly: true,
domain: env.REFRESH_TOKEN_COOKIE_DOMAIN,
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
@@ -88,7 +89,7 @@ router.post(
schema: req.schema,
});
const currentRefreshToken = req.body.refresh_token || req.cookies.directus_refresh_token;
const currentRefreshToken = req.body.refresh_token || req.cookies[env.REFRESH_TOKEN_COOKIE_NAME];
if (!currentRefreshToken) {
throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`);
@@ -107,7 +108,7 @@ router.post(
}
if (mode === 'cookie') {
res.cookie('directus_refresh_token', refreshToken, {
res.cookie(env.REFRESH_TOKEN_COOKIE_NAME, refreshToken, {
httpOnly: true,
domain: env.REFRESH_TOKEN_COOKIE_DOMAIN,
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
@@ -136,7 +137,7 @@ router.post(
schema: req.schema,
});
const currentRefreshToken = req.body.refresh_token || req.cookies.directus_refresh_token;
const currentRefreshToken = req.body.refresh_token || req.cookies[env.REFRESH_TOKEN_COOKIE_NAME];
if (!currentRefreshToken) {
throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`);
@@ -144,8 +145,8 @@ router.post(
await authenticationService.logout(currentRefreshToken);
if (req.cookies.directus_refresh_token) {
res.clearCookie('directus_refresh_token', {
if (req.cookies[env.REFRESH_TOKEN_COOKIE_NAME]) {
res.clearCookie(env.REFRESH_TOKEN_COOKIE_NAME, {
httpOnly: true,
domain: env.REFRESH_TOKEN_COOKIE_DOMAIN,
secure: env.REFRESH_TOKEN_COOKIE_SECURE ?? false,
@@ -161,7 +162,7 @@ router.post(
router.post(
'/password/request',
asyncHandler(async (req, res, next) => {
if (!req.body.email) {
if (typeof req.body.email !== 'string') {
throw new InvalidPayloadException(`"email" field is required.`);
}
@@ -180,6 +181,7 @@ router.post(
if (err instanceof InvalidPayloadException) {
throw err;
} else {
logger.warn(err, `[email] ${err}`);
return next();
}
}
@@ -190,11 +192,11 @@ router.post(
router.post(
'/password/reset',
asyncHandler(async (req, res, next) => {
if (!req.body.token) {
if (typeof req.body.token !== 'string') {
throw new InvalidPayloadException(`"token" field is required.`);
}
if (!req.body.password) {
if (typeof req.body.password !== 'string') {
throw new InvalidPayloadException(`"password" field is required.`);
}
@@ -320,6 +322,9 @@ router.get(
});
} catch (error) {
emitStatus('fail');
logger.warn(error);
if (redirect) {
let reason = 'UNKNOWN_EXCEPTION';
@@ -340,7 +345,7 @@ router.get(
emitStatus('success');
if (redirect) {
res.cookie('directus_refresh_token', refreshToken, {
res.cookie(env.REFRESH_TOKEN_COOKIE_NAME, refreshToken, {
httpOnly: true,
domain: env.REFRESH_TOKEN_COOKIE_DOMAIN,
maxAge: ms(env.REFRESH_TOKEN_TTL as string),

View File

@@ -3,18 +3,17 @@ import asyncHandler from '../utils/async-handler';
import { RouteNotFoundException } from '../exceptions';
import { listExtensions, getAppExtensionSource } from '../extensions';
import { respond } from '../middleware/respond';
import { depluralize } from '@directus/shared/utils';
import { AppExtensionType, Plural } from '@directus/shared/types';
import { APP_EXTENSION_TYPES } from '@directus/shared/constants';
import { depluralize, isAppExtension } from '@directus/shared/utils';
import { Plural } from '@directus/shared/types';
const router = Router();
router.get(
'/:type',
asyncHandler(async (req, res, next) => {
const type = depluralize(req.params.type as Plural<AppExtensionType>);
const type = depluralize(req.params.type as Plural<string>);
if (APP_EXTENSION_TYPES.includes(type) === false) {
if (!isAppExtension(type)) {
throw new RouteNotFoundException(req.path);
}
@@ -32,9 +31,9 @@ router.get(
router.get(
'/:type/index.js',
asyncHandler(async (req, res) => {
const type = depluralize(req.params.type as Plural<AppExtensionType>);
const type = depluralize(req.params.type as Plural<string>);
if (APP_EXTENSION_TYPES.includes(type) === false) {
if (!isAppExtension(type)) {
throw new RouteNotFoundException(req.path);
}

View File

@@ -6,7 +6,8 @@ import validateCollection from '../middleware/collection-exists';
import { respond } from '../middleware/respond';
import useCollection from '../middleware/use-collection';
import { FieldsService } from '../services/fields';
import { Field, types } from '../types';
import { Field, Type } from '@directus/shared/types';
import { TYPES } from '@directus/shared/constants';
import asyncHandler from '../utils/async-handler';
const router = Router();
@@ -65,7 +66,7 @@ const newFieldSchema = Joi.object({
collection: Joi.string().optional(),
field: Joi.string().required(),
type: Joi.string()
.valid(...types, ...ALIAS_TYPES)
.valid(...TYPES, ...ALIAS_TYPES)
.allow(null)
.optional(),
schema: Joi.object({
@@ -93,7 +94,7 @@ router.post(
throw new InvalidPayloadException(error.message);
}
const field: Partial<Field> & { field: string; type: typeof types[number] | null } = req.body;
const field: Partial<Field> & { field: string; type: Type | null } = req.body;
await service.createField(req.params.collection, field);
@@ -152,7 +153,7 @@ router.patch(
const updateSchema = Joi.object({
type: Joi.string()
.valid(...types, ...ALIAS_TYPES)
.valid(...TYPES, ...ALIAS_TYPES)
.allow(null),
schema: Joi.object({
default_value: Joi.any(),
@@ -183,7 +184,7 @@ router.patch(
throw new InvalidPayloadException(`You need to provide "type" when providing "schema".`);
}
const fieldData: Partial<Field> & { field: string; type: typeof types[number] } = req.body;
const fieldData: Partial<Field> & { field: string; type: Type } = req.body;
if (!fieldData.field) fieldData.field = req.params.field;

View File

@@ -11,7 +11,7 @@ import { validateBatch } from '../middleware/validate-batch';
import { FilesService, MetaService } from '../services';
import { File, PrimaryKey } from '../types';
import asyncHandler from '../utils/async-handler';
import { toArray } from '../utils/to-array';
import { toArray } from '@directus/shared/utils';
const router = express.Router();

View File

@@ -2,12 +2,13 @@ import argon2 from 'argon2';
import { Router } from 'express';
import Joi from 'joi';
import { nanoid } from 'nanoid';
import { InvalidPayloadException, InvalidQueryException } from '../exceptions';
import { ForbiddenException, InvalidPayloadException, InvalidQueryException } from '../exceptions';
import collectionExists from '../middleware/collection-exists';
import { respond } from '../middleware/respond';
import { RevisionsService, UtilsService, ImportService } from '../services';
import asyncHandler from '../utils/async-handler';
import Busboy from 'busboy';
import { getCache } from '../cache';
const router = Router();
@@ -115,4 +116,20 @@ router.post(
})
);
router.post(
'/cache/clear',
asyncHandler(async (req, res) => {
if (req.accountability?.admin !== true) {
throw new ForbiddenException();
}
const { cache, schemaCache } = getCache();
await cache?.clear();
await schemaCache?.clear();
res.status(200).end();
})
);
export default router;

View File

@@ -5,6 +5,9 @@ import env from '../env';
import logger from '../logger';
import { getConfigFromEnv } from '../utils/get-config-from-env';
import { validateEnv } from '../utils/validate-env';
import fse from 'fs-extra';
import path from 'path';
import { merge } from 'lodash';
let database: Knex | null = null;
let inspector: ReturnType<typeof SchemaInspector> | null = null;
@@ -65,6 +68,13 @@ export default function getDatabase(): Knex {
};
}
if (env.DB_CLIENT === 'mssql') {
// This brings MS SQL in line with the other DB vendors. We shouldn't do any automatic
// timezone conversion on the database level, especially not when other database vendors don't
// act the same
merge(knexConfig, { connection: { options: { useUTC: false } } });
}
database = knex(knexConfig);
const times: Record<string, number> = {};
@@ -94,8 +104,8 @@ export function getSchemaInspector(): ReturnType<typeof SchemaInspector> {
return inspector;
}
export async function hasDatabaseConnection(): Promise<boolean> {
const database = getDatabase();
export async function hasDatabaseConnection(database?: Knex): Promise<boolean> {
database = database ?? getDatabase();
try {
if (env.DB_CLIENT === 'oracledb') {
@@ -103,15 +113,22 @@ export async function hasDatabaseConnection(): Promise<boolean> {
} else {
await database.raw('SELECT 1');
}
return true;
} catch {
return false;
}
}
export async function validateDBConnection(): Promise<void> {
export async function validateDBConnection(database?: Knex): Promise<void> {
database = database ?? getDatabase();
try {
await hasDatabaseConnection();
if (env.DB_CLIENT === 'oracledb') {
await database.raw('select 1 from DUAL');
} else {
await database.raw('SELECT 1');
}
} catch (error) {
logger.error(`Can't connect to the database.`);
logger.error(error);
@@ -127,3 +144,35 @@ export async function isInstalled(): Promise<boolean> {
// exists when using the installer CLI.
return await inspector.hasTable('directus_collections');
}
export async function validateMigrations(): Promise<boolean> {
const database = getDatabase();
try {
let migrationFiles = await fse.readdir(path.join(__dirname, 'migrations'));
const customMigrationsPath = path.resolve(env.EXTENSIONS_PATH, 'migrations');
let customMigrationFiles =
((await fse.pathExists(customMigrationsPath)) && (await fse.readdir(customMigrationsPath))) || [];
migrationFiles = migrationFiles.filter(
(file: string) => file.startsWith('run') === false && file.endsWith('.d.ts') === false
);
customMigrationFiles = customMigrationFiles.filter((file: string) => file.endsWith('.js'));
migrationFiles.push(...customMigrationFiles);
const requiredVersions = migrationFiles.map((filePath) => filePath.split('-')[0]);
const completedVersions = (await database.select('version').from('directus_migrations')).map(
({ version }) => version
);
return requiredVersions.every((version) => completedVersions.includes(version));
} catch (error) {
logger.error(`Database migrations cannot be found`);
logger.error(error);
throw process.exit(1);
}
}

View File

@@ -1,7 +1,6 @@
import { Knex } from 'knex';
// @ts-ignore
import Client_Oracledb from 'knex/lib/dialects/oracledb';
import env from '../../env';
async function oracleAlterUrl(knex: Knex, type: string): Promise<void> {
await knex.raw('ALTER TABLE "directus_webhooks" ADD "url__temp" ?', [knex.raw(type)]);
@@ -23,7 +22,7 @@ export async function up(knex: Knex): Promise<void> {
}
export async function down(knex: Knex): Promise<void> {
if (env.DB_CLIENT === 'oracledb') {
if (knex.client instanceof Client_Oracledb) {
await oracleAlterUrl(knex, 'VARCHAR2(255)');
return;
}

View File

@@ -1,7 +1,6 @@
import { Knex } from 'knex';
// @ts-ignore
import Client_Oracledb from 'knex/lib/dialects/oracledb';
import env from '../../env';
async function oracleAlterCollections(knex: Knex, type: string): Promise<void> {
await knex.raw('ALTER TABLE "directus_webhooks" ADD "collections__temp" ?', [knex.raw(type)]);
@@ -23,7 +22,7 @@ export async function up(knex: Knex): Promise<void> {
}
export async function down(knex: Knex): Promise<void> {
if (env.DB_CLIENT === 'oracledb') {
if (knex.client instanceof Client_Oracledb) {
await oracleAlterCollections(knex, 'VARCHAR2(255)');
return;
}

View File

@@ -1,12 +1,22 @@
import { Knex } from 'knex';
// @ts-ignore
import Client_Oracledb from 'knex/lib/dialects/oracledb';
export async function up(knex: Knex): Promise<void> {
if (knex.client instanceof Client_Oracledb) {
return;
}
await knex.schema.alterTable('directus_files', (table) => {
table.bigInteger('filesize').nullable().defaultTo(null).alter();
});
}
export async function down(knex: Knex): Promise<void> {
if (knex.client instanceof Client_Oracledb) {
return;
}
await knex.schema.alterTable('directus_files', (table) => {
table.integer('filesize').nullable().defaultTo(null).alter();
});

View File

@@ -0,0 +1,13 @@
import { Knex } from 'knex';
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_fields', (table) => {
table.json('conditions');
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_files', (table) => {
table.dropColumn('conditions');
});
}

View File

@@ -0,0 +1,22 @@
import { Knex } from 'knex';
import { getDefaultIndexName } from '../../utils/get-default-index-name';
const indexName = getDefaultIndexName('foreign', 'directus_settings', 'storage_default_folder');
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_settings', (table) => {
table
.uuid('storage_default_folder')
.references('id')
.inTable('directus_folders')
.withKeyName(indexName)
.onDelete('SET NULL');
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_files', (table) => {
table.dropForeign(['storage_default_folder'], indexName);
table.dropColumn('storage_default_folder');
});
}

View File

@@ -0,0 +1,49 @@
import { Knex } from 'knex';
import logger from '../../logger';
export async function up(knex: Knex): Promise<void> {
const dividerGroups = await knex.select('*').from('directus_fields').where('interface', '=', 'group-divider');
for (const dividerGroup of dividerGroups) {
const newOptions: { showHeader: true; headerIcon?: string; headerColor?: string } = { showHeader: true };
if (dividerGroup.options) {
try {
const options =
typeof dividerGroup.options === 'string' ? JSON.parse(dividerGroup.options) : dividerGroup.options;
if (options.icon) newOptions.headerIcon = options.icon;
if (options.color) newOptions.headerColor = options.color;
} catch (err) {
logger.warn(`Couldn't convert previous options from field ${dividerGroup.collection}.${dividerGroup.field}`);
logger.warn(err);
}
}
try {
await knex('directus_fields')
.update({
interface: 'group-standard',
options: JSON.stringify(newOptions),
})
.where('id', '=', dividerGroup.id);
} catch (err) {
logger.warn(`Couldn't update ${dividerGroup.collection}.${dividerGroup.field} to new group interface`);
logger.warn(err);
}
}
await knex('directus_fields')
.update({
interface: 'group-standard',
})
.where({ interface: 'group-raw' });
}
export async function down(knex: Knex): Promise<void> {
await knex('directus_fields')
.update({
interface: 'group-raw',
})
.where('interface', '=', 'group-standard');
}

View File

@@ -0,0 +1,13 @@
import { Knex } from 'knex';
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_fields', (table) => {
table.boolean('required').defaultTo(false);
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_fields', (table) => {
table.dropColumn('required');
});
}

View File

@@ -0,0 +1,35 @@
import { Knex } from 'knex';
export async function up(knex: Knex): Promise<void> {
const groups = await knex.select('*').from('directus_fields').where({ interface: 'group-standard' });
const raw = [];
const detail = [];
for (const group of groups) {
const options = typeof group.options === 'string' ? JSON.parse(group.options) : group.options || {};
if (options.showHeader === true) {
detail.push(group);
} else {
raw.push(group);
}
}
for (const field of raw) {
await knex('directus_fields').update({ interface: 'group-raw' }).where({ id: field.id });
}
for (const field of detail) {
await knex('directus_fields').update({ interface: 'group-detail' }).where({ id: field.id });
}
}
export async function down(knex: Knex): Promise<void> {
await knex('directus_fields')
.update({
interface: 'group-standard',
})
.where({ interface: 'group-detail' })
.orWhere({ interface: 'group-raw' });
}

View File

@@ -0,0 +1,94 @@
import { Knex } from 'knex';
// Change image metadata structure to match the output from 'exifr'
export async function up(knex: Knex): Promise<void> {
const files = await knex
.select<{ id: number; metadata: string }[]>('id', 'metadata')
.from('directus_files')
.whereNotNull('metadata');
for (const { id, metadata } of files) {
let prevMetadata;
try {
prevMetadata = JSON.parse(metadata);
} catch {
continue;
}
// Update only required if metadata has 'exif' data
if (prevMetadata.exif) {
// Get all data from 'exif' and rename the following keys:
// - 'image' to 'ifd0'
// - 'thumbnail to 'ifd1'
// - 'interoperability' to 'interop'
const newMetadata = prevMetadata.exif;
if (newMetadata.image) {
newMetadata.ifd0 = newMetadata.image;
delete newMetadata.image;
}
if (newMetadata.thumbnail) {
newMetadata.ifd1 = newMetadata.thumbnail;
delete newMetadata.thumbnail;
}
if (newMetadata.interoperability) {
newMetadata.interop = newMetadata.interoperability;
delete newMetadata.interoperability;
}
if (prevMetadata.icc) {
newMetadata.icc = prevMetadata.icc;
}
if (prevMetadata.iptc) {
newMetadata.iptc = prevMetadata.iptc;
}
await knex('directus_files')
.update({ metadata: JSON.stringify(newMetadata) })
.where({ id });
}
}
}
export async function down(knex: Knex): Promise<void> {
const files = await knex
.select<{ id: number; metadata: string }[]>('id', 'metadata')
.from('directus_files')
.whereNotNull('metadata')
.whereNot('metadata', '{}');
for (const { id, metadata } of files) {
const prevMetadata = JSON.parse(metadata);
// Update only required if metadata has keys other than 'icc' and 'iptc'
if (Object.keys(prevMetadata).filter((key) => key !== 'icc' && key !== 'iptc').length > 0) {
// Put all data under 'exif' and rename/move keys afterwards
const newMetadata: { exif: Record<string, unknown>; icc?: unknown; iptc?: unknown } = { exif: prevMetadata };
if (newMetadata.exif.ifd0) {
newMetadata.exif.image = newMetadata.exif.ifd0;
delete newMetadata.exif.ifd0;
}
if (newMetadata.exif.ifd1) {
newMetadata.exif.thumbnail = newMetadata.exif.ifd1;
delete newMetadata.exif.ifd1;
}
if (newMetadata.exif.interop) {
newMetadata.exif.interoperability = newMetadata.exif.interop;
delete newMetadata.exif.interop;
}
if (newMetadata.exif.icc) {
newMetadata.icc = newMetadata.exif.icc;
delete newMetadata.exif.icc;
}
if (newMetadata.exif.iptc) {
newMetadata.iptc = newMetadata.exif.iptc;
delete newMetadata.exif.iptc;
}
await knex('directus_files')
.update({ metadata: JSON.stringify(newMetadata) })
.where({ id });
}
}
}

View File

@@ -5,12 +5,7 @@ import fse from 'fs-extra';
import { Knex } from 'knex';
import path from 'path';
import env from '../../env';
type Migration = {
version: string;
name: string;
timestamp: Date;
};
import { Migration } from '../../types';
export default async function run(database: Knex, direction: 'up' | 'down' | 'latest'): Promise<void> {
let migrationFiles = await fse.readdir(__dirname);

View File

@@ -7,7 +7,7 @@ import { applyFunctionToColumnName } from '../utils/apply-function-to-column-nam
import applyQuery from '../utils/apply-query';
import { getColumn } from '../utils/get-column';
import { stripFunction } from '../utils/strip-function';
import { toArray } from '../utils/to-array';
import { toArray } from '@directus/shared/utils';
import getDatabase from './index';
type RunASTOptions = {

View File

@@ -3,13 +3,13 @@ import yaml from 'js-yaml';
import { Knex } from 'knex';
import { isObject } from 'lodash';
import path from 'path';
import { types } from '../../types';
import { Type } from '@directus/shared/types';
type TableSeed = {
table: string;
columns: {
[column: string]: {
type?: typeof types[number];
type?: Type;
primary?: boolean;
nullable?: boolean;
default?: any;
@@ -45,6 +45,8 @@ export default async function runSeed(database: Knex): Promise<void> {
for (const [columnName, columnInfo] of Object.entries(seedData.columns)) {
let column: Knex.ColumnBuilder;
if (columnInfo.type === 'alias' || columnInfo.type === 'unknown') return;
if (columnInfo.type === 'string') {
column = tableBuilder.string(columnName, columnInfo.length);
} else if (columnInfo.increments) {

View File

@@ -8,6 +8,7 @@ defaults:
note: null
translations: null
display_template: null
accountability: 'all'
data:
- collection: directus_activity

View File

@@ -73,3 +73,8 @@ fields:
- collection: directus_fields
field: note
width: half
- collection: directus_fields
field: conditions
hidden: true
special: json

View File

@@ -1,7 +1,7 @@
import fse from 'fs-extra';
import { merge } from 'lodash';
import path from 'path';
import { FieldMeta } from '../../../types';
import { FieldMeta } from '@directus/shared/types';
import { requireYAML } from '../../../utils/require-yaml';
const defaults = requireYAML(require.resolve('./_defaults.yaml'));

View File

@@ -124,7 +124,7 @@ fields:
options:
slug: true
onlyOnCreate: false
width: half
width: full
- field: fit
name: Fit
type: string
@@ -173,6 +173,7 @@ fields:
step: 1
width: half
- field: withoutEnlargement
name: Upscaling
type: boolean
schema:
default_value: false
@@ -181,6 +182,51 @@ fields:
width: half
options:
label: Don't upscale images
- field: format
name: Format
type: string
schema:
is_nullable: false
default_value: ''
meta:
interface: select-dropdown
options:
allowNone: true
choices:
- value: jpeg
text: JPEG
- value: png
text: PNG
- value: webp
text: WebP
- value: tiff
text: Tiff
width: half
- field: transforms
name: Additional Transformations
type: json
schema:
is_nullable: false
default_value: []
meta:
note:
The Sharp method name and its arguments. See https://sharp.pixelplumbing.com/api-constructor for more
information.
interface: json
options:
template: >
[
["blur", 45],
["grayscale"],
["extend", { "right": 500, "background": "rgb(255, 0, 0)" }]
]
placeholder: >
[
["blur", 45],
["grayscale"],
["extend", { "right": 500, "background": "rgb(255, 0, 0)" }]
]
width: full
template: '{{key}}'
special: json
width: full
@@ -197,6 +243,11 @@ fields:
text: Presets Only
width: half
- field: storage_default_folder
interface: system-folder
width: half
note: Default folder where new files are uploaded
- field: overrides_divider
interface: presentation-divider
options:

View File

@@ -8,7 +8,7 @@ import fs from 'fs';
import { clone, toNumber, toString } from 'lodash';
import path from 'path';
import { requireYAML } from './utils/require-yaml';
import { toArray } from './utils/to-array';
import { toArray } from '@directus/shared/utils';
const acceptedEnvTypes = ['string', 'number', 'regex', 'array'];
@@ -16,7 +16,7 @@ const defaults: Record<string, any> = {
CONFIG_PATH: path.resolve(process.cwd(), '.env'),
PORT: 8055,
PUBLIC_URL: 'http://localhost:8055',
PUBLIC_URL: '/',
MAX_PAYLOAD_SIZE: '100kb',
STORAGE_LOCATIONS: 'local',
@@ -34,6 +34,7 @@ const defaults: Record<string, any> = {
REFRESH_TOKEN_TTL: '7d',
REFRESH_TOKEN_COOKIE_SECURE: false,
REFRESH_TOKEN_COOKIE_SAME_SITE: 'lax',
REFRESH_TOKEN_COOKIE_NAME: 'directus_refresh_token',
ROOT_REDIRECT: './admin',
@@ -64,9 +65,12 @@ const defaults: Record<string, any> = {
TELEMETRY: true,
ASSETS_CACHE_TTL: '30m',
ASSETS_CACHE_TTL: '30d',
ASSETS_TRANSFORM_MAX_CONCURRENT: 1,
ASSETS_TRANSFORM_IMAGE_MAX_DIMENSION: 6000,
ASSETS_TRANSFORM_MAX_OPERATIONS: 5,
SERVE_APP: true,
};
// Allows us to force certain environment variable into a type, instead of relying
@@ -170,6 +174,8 @@ function getEnvironmentValueByType(envVariableString: string) {
return new RegExp(envVariableValue);
case 'string':
return envVariableValue;
case 'json':
return tryJSON(envVariableValue);
}
}
@@ -181,14 +187,14 @@ function processValues(env: Record<string, any>) {
// and store it in the variable with the same name but without '_FILE' at the end
let newKey;
if (key.length > 5 && key.endsWith('_FILE')) {
newKey = key.slice(0, -5);
if (newKey in env) {
throw new Error(
`Duplicate environment variable encountered: you can't use "${newKey}" and "${key}" simultaneously.`
);
}
try {
value = fs.readFileSync(value, { encoding: 'utf8' });
newKey = key.slice(0, -5);
if (newKey in env) {
throw new Error(
`Duplicate environment variable encountered: you can't use "${key}" and "${newKey}" simultaneously.`
);
}
key = newKey;
} catch {
throw new Error(`Failed to read value from file "${value}", defined in environment variable "${key}".`);
@@ -214,6 +220,9 @@ function processValues(env: Record<string, any>) {
case 'array':
env[key] = toArray(value);
break;
case 'json':
env[key] = tryJSON(value);
break;
}
continue;
}
@@ -247,6 +256,14 @@ function processValues(env: Record<string, any>) {
continue;
}
if (String(value).includes(',')) {
env[key] = toArray(value);
}
// Try converting the value to a JS object. This allows JSON objects to be passed for nested
// config flags, or custom param names (that aren't camelCased)
env[key] = tryJSON(value);
// If '_FILE' variable hasn't been processed yet, store it as it is (string)
if (newKey) {
env[key] = value;
@@ -255,3 +272,11 @@ function processValues(env: Record<string, any>) {
return env;
}
function tryJSON(value: any) {
try {
return JSON.parse(value);
} catch {
return value;
}
}

View File

@@ -1,4 +1,4 @@
import { BaseException } from '../base';
import { BaseException } from '@directus/shared/exceptions';
type Exceptions = {
collection: string;

View File

@@ -46,7 +46,7 @@ async function uniqueViolation(error: MSSQLError) {
* information_schema when this happens
*/
const betweenQuotes = /'([^']+)'/;
const betweenQuotes = /'([^']+)'/g;
const betweenParens = /\(([^)]+)\)/g;
const quoteMatches = error.message.match(betweenQuotes);
@@ -54,21 +54,35 @@ async function uniqueViolation(error: MSSQLError) {
if (!quoteMatches || !parenMatches) return error;
const keyName = quoteMatches[1];
const keyName = quoteMatches[1]?.slice(1, -1);
const database = getDatabase();
let collection = quoteMatches[0]?.slice(1, -1);
let field: string | null = null;
const constraintUsage = await database
.select('*')
.from('INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE')
.where({
CONSTRAINT_NAME: keyName,
})
.first();
if (keyName) {
const database = getDatabase();
const collection = constraintUsage.TABLE_NAME;
const field = constraintUsage.COLUMN_NAME;
const invalid = parenMatches[parenMatches.length - 1].slice(1, -1);
const constraintUsage = await database
.select('sys.columns.name as field', database.raw('OBJECT_NAME(??) as collection', ['sys.columns.object_id']))
.from('sys.indexes')
.innerJoin('sys.index_columns', (join) => {
join
.on('sys.indexes.object_id', '=', 'sys.index_columns.object_id')
.andOn('sys.indexes.index_id', '=', 'sys.index_columns.index_id');
})
.innerJoin('sys.columns', (join) => {
join
.on('sys.index_columns.object_id', '=', 'sys.columns.object_id')
.andOn('sys.index_columns.column_id', '=', 'sys.columns.column_id');
})
.where('sys.indexes.name', '=', keyName)
.first();
collection = constraintUsage?.collection;
field = constraintUsage?.field;
}
const invalid = parenMatches[parenMatches.length - 1]?.slice(1, -1);
return new RecordNotUniqueException(field, {
collection,

View File

@@ -1,4 +1,4 @@
import { BaseException } from '../base';
import { BaseException } from '@directus/shared/exceptions';
type Extensions = {
collection: string;

View File

@@ -1,4 +1,4 @@
import { BaseException } from '../base';
import { BaseException } from '@directus/shared/exceptions';
type Exceptions = {
collection: string;

View File

@@ -1,4 +1,4 @@
import { BaseException } from '../base';
import { BaseException } from '@directus/shared/exceptions';
type Extensions = {
collection: string;

View File

@@ -1,4 +1,4 @@
import { BaseException } from '../base';
import { BaseException } from '@directus/shared/exceptions';
type Exceptions = {
collection: string;

View File

@@ -1,4 +1,4 @@
import { BaseException } from '../base';
import { BaseException } from '@directus/shared/exceptions';
type Extensions = {
collection: string;

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class ForbiddenException extends BaseException {
constructor() {

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class GraphQLValidationException extends BaseException {
constructor(extensions: Record<string, any>) {

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
type Extensions = {
limit: number;

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class IllegalAssetTransformation extends BaseException {
constructor(message: string) {

View File

@@ -1,5 +1,3 @@
export * from './base';
export * from './failed-validation';
export * from './forbidden';
export * from './graphql-validation';
export * from './hit-rate-limit';

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class InvalidCredentialsException extends BaseException {
constructor(message = 'Invalid user credentials.') {

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class InvalidIPException extends BaseException {
constructor(message = 'Invalid IP address.') {

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class InvalidOTPException extends BaseException {
constructor(message = 'Invalid user OTP.') {

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class InvalidPayloadException extends BaseException {
constructor(message: string, extensions?: Record<string, unknown>) {

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class InvalidQueryException extends BaseException {
constructor(message: string) {

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
type Extensions = {
allow: string[];

View File

@@ -1,5 +1,5 @@
import { Range } from '@directus/drive';
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class RangeNotSatisfiableException extends BaseException {
constructor(range: Range) {

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class RouteNotFoundException extends BaseException {
constructor(path: string) {

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
type Extensions = {
service: string;

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class UnprocessableEntityException extends BaseException {
constructor(message: string) {

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class UserSuspendedException extends BaseException {
constructor(message = 'User suspended.') {

View File

@@ -7,8 +7,8 @@ import {
getLocalExtensions,
getPackageExtensions,
resolvePackage,
} from '@directus/shared/utils';
import { APP_EXTENSION_TYPES, SHARED_DEPS } from '@directus/shared/constants';
} from '@directus/shared/utils/node';
import { APP_EXTENSION_TYPES, APP_SHARED_DEPS } from '@directus/shared/constants';
import getDatabase from './database';
import emitter from './emitter';
import env from './env';
@@ -31,10 +31,15 @@ let extensions: Extension[] = [];
let extensionBundles: Partial<Record<AppExtensionType, string>> = {};
export async function initializeExtensions(): Promise<void> {
await ensureExtensionDirs(env.EXTENSIONS_PATH);
extensions = await getExtensions();
try {
await ensureExtensionDirs(env.EXTENSIONS_PATH);
extensions = await getExtensions();
} catch (err) {
logger.warn(`Couldn't load extensions`);
logger.warn(err);
}
if (!('DIRECTUS_DEV' in process.env)) {
if (env.SERVE_APP ?? env.NODE_ENV !== 'development') {
extensionBundles = await generateExtensionBundles();
}
@@ -74,7 +79,7 @@ async function getExtensions(): Promise<Extension[]> {
}
async function generateExtensionBundles() {
const sharedDepsMapping = await getSharedDepsMapping(SHARED_DEPS);
const sharedDepsMapping = await getSharedDepsMapping(APP_SHARED_DEPS);
const internalImports = Object.entries(sharedDepsMapping).map(([name, path]) => ({
find: name,
replacement: path,

View File

@@ -3,7 +3,7 @@
*/
import env from './env';
import { toArray } from './utils/to-array';
import { toArray } from '@directus/shared/utils';
import { getConfigFromEnv } from './utils/get-config-from-env';
const enabledProviders = toArray(env.OAUTH_PROVIDERS).map((provider) => provider.toLowerCase());

View File

@@ -7,7 +7,7 @@ import env from './env';
const pinoOptions: LoggerOptions = {
level: env.LOG_LEVEL || 'info',
redact: {
paths: ['req.headers.authorization', 'req.cookies.directus_refresh_token'],
paths: ['req.headers.authorization', `req.cookies.${env.REFRESH_TOKEN_COOKIE_NAME}`],
censor: '--redact--',
},
};
@@ -19,16 +19,20 @@ if (env.LOG_STYLE !== 'raw') {
const logger = pino(pinoOptions);
export const expressLogger = pinoHTTP({
logger,
serializers: {
req(request: Request) {
const output = stdSerializers.req(request);
output.url = redactQuery(output.url);
return output;
},
export const expressLogger = pinoHTTP(
{
logger,
},
}) as RequestHandler;
{
serializers: {
req(request: Request) {
const output = stdSerializers.req(request);
output.url = redactQuery(output.url);
return output;
},
},
}
) as RequestHandler;
export default logger;

View File

@@ -1,6 +1,7 @@
import nodemailer, { Transporter } from 'nodemailer';
import env from './env';
import logger from './logger';
import { getConfigFromEnv } from './utils/get-config-from-env';
let transporter: Transporter;
@@ -23,13 +24,16 @@ export default function getMailer(): Transporter {
};
}
const tls: Record<string, unknown> = getConfigFromEnv('EMAIL_SMTP_TLS_');
transporter = nodemailer.createTransport({
pool: env.EMAIL_SMTP_POOL,
host: env.EMAIL_SMTP_HOST,
port: env.EMAIL_SMTP_PORT,
secure: env.EMAIL_SMTP_SECURE,
ignoreTLS: env.EMAIL_SMTP_IGNORE_TLS,
auth: auth,
auth,
tls,
} as Record<string, unknown>);
} else if (env.EMAIL_TRANSPORT.toLowerCase() === 'mailgun') {
const mg = require('nodemailer-mailgun-transport');
@@ -39,6 +43,7 @@ export default function getMailer(): Transporter {
api_key: env.EMAIL_MAILGUN_API_KEY,
domain: env.EMAIL_MAILGUN_DOMAIN,
},
host: env.EMAIL_MAILGUN_HOST || 'https://api.mailgun.net',
}) as any
);
} else {

View File

@@ -4,6 +4,7 @@ import env from '../env';
import asyncHandler from '../utils/async-handler';
import { getCacheControlHeader } from '../utils/get-cache-headers';
import { getCacheKey } from '../utils/get-cache-key';
import logger from '../logger';
const checkCacheMiddleware: RequestHandler = asyncHandler(async (req, res, next) => {
const { cache } = getCache();
@@ -17,10 +18,26 @@ const checkCacheMiddleware: RequestHandler = asyncHandler(async (req, res, next)
}
const key = getCacheKey(req);
const cachedData = await cache.get(key);
let cachedData;
try {
cachedData = await cache.get(key);
} catch (err) {
logger.warn(err, `[cache] Couldn't read key ${key}. ${err.message}`);
return next();
}
if (cachedData) {
const cacheExpiryDate = (await cache.get(`${key}__expires_at`)) as number | null;
let cacheExpiryDate;
try {
cacheExpiryDate = (await cache.get(`${key}__expires_at`)) as number | null;
} catch (err) {
logger.warn(err, `[cache] Couldn't read key ${`${key}__expires_at`}. ${err.message}`);
return next();
}
const cacheTTL = cacheExpiryDate ? cacheExpiryDate - Date.now() : null;
res.setHeader('Cache-Control', getCacheControlHeader(req, cacheTTL));

View File

@@ -1,9 +1,10 @@
import { ErrorRequestHandler } from 'express';
import { emitAsyncSafe } from '../emitter';
import env from '../env';
import { BaseException, MethodNotAllowedException } from '../exceptions';
import { MethodNotAllowedException } from '../exceptions';
import { BaseException } from '@directus/shared/exceptions';
import logger from '../logger';
import { toArray } from '../utils/to-array';
import { toArray } from '@directus/shared/utils';
// Note: keep all 4 parameters here. That's how Express recognizes it's the error handler, even if
// we don't use next

View File

@@ -8,6 +8,7 @@ import asyncHandler from '../utils/async-handler';
import { getCacheKey } from '../utils/get-cache-key';
import { parse as toXML } from 'js2xmlparser';
import { getCacheControlHeader } from '../utils/get-cache-headers';
import logger from '../logger';
export const respond: RequestHandler = asyncHandler(async (req, res) => {
const { cache } = getCache();
@@ -20,8 +21,14 @@ export const respond: RequestHandler = asyncHandler(async (req, res) => {
res.locals.cache !== false
) {
const key = getCacheKey(req);
await cache.set(key, res.locals.payload, ms(env.CACHE_TTL as string));
await cache.set(`${key}__expires_at`, Date.now() + ms(env.CACHE_TTL as string));
try {
await cache.set(key, res.locals.payload, ms(env.CACHE_TTL as string));
await cache.set(`${key}__expires_at`, Date.now() + ms(env.CACHE_TTL as string));
} catch (err) {
logger.warn(err, `[cache] Couldn't set key ${key}. ${err}`);
}
res.setHeader('Cache-Control', getCacheControlHeader(req, ms(env.CACHE_TTL as string)));
res.setHeader('Vary', 'Origin, Cache-Control');
} else {

View File

@@ -1,6 +1,7 @@
import { RequestHandler } from 'express';
import Joi from 'joi';
import { FailedValidationException, InvalidPayloadException } from '../exceptions';
import { InvalidPayloadException } from '../exceptions';
import { FailedValidationException } from '@directus/shared/exceptions';
import asyncHandler from '../utils/async-handler';
import { sanitizeQuery } from '../utils/sanitize-query';

View File

@@ -8,6 +8,7 @@ import url from 'url';
import createApp from './app';
import getDatabase from './database';
import { emitAsyncSafe } from './emitter';
import env from './env';
import logger from './logger';
export default async function createServer(): Promise<http.Server> {
@@ -86,9 +87,7 @@ export default async function createServer(): Promise<http.Server> {
async function beforeShutdown() {
emitAsyncSafe('server.stop.before', { server });
if ('DIRECTUS_DEV' in process.env) {
logger.info('Restarting...');
} else {
if (env.NODE_ENV !== 'development') {
logger.info('Shutting down...');
}
}
@@ -102,7 +101,7 @@ export default async function createServer(): Promise<http.Server> {
async function onShutdown() {
emitAsyncSafe('server.stop');
if (!('DIRECTUS_DEV' in process.env)) {
if (env.NODE_ENV !== 'development') {
logger.info('Directus shut down OK. Bye bye!');
}
}

View File

@@ -1,15 +1,18 @@
import { Range, StatResponse } from '@directus/drive';
import { Knex } from 'knex';
import path from 'path';
import sharp, { ResizeOptions } from 'sharp';
import getDatabase from '../database';
import { RangeNotSatisfiableException, IllegalAssetTransformation } from '../exceptions';
import storage from '../storage';
import { AbstractServiceOptions, Accountability, Transformation } from '../types';
import { AuthorizationService } from './authorization';
import { Semaphore } from 'async-mutex';
import { Knex } from 'knex';
import { contentType } from 'mime-types';
import ObjectHash from 'object-hash';
import path from 'path';
import sharp from 'sharp';
import getDatabase from '../database';
import env from '../env';
import { File } from '../types';
import { IllegalAssetTransformation, RangeNotSatisfiableException } from '../exceptions';
import storage from '../storage';
import { AbstractServiceOptions, File, Transformation, TransformationParams, TransformationPreset } from '../types';
import { Accountability } from '@directus/shared/types';
import { AuthorizationService } from './authorization';
import * as TransformationUtils from '../utils/transformations';
sharp.concurrency(1);
@@ -30,7 +33,7 @@ export class AssetsService {
async getAsset(
id: string,
transformation: Transformation,
transformation: TransformationParams | TransformationPreset,
range?: Range
): Promise<{ stream: NodeJS.ReadableStream; file: any; stat: StatResponse }> {
const publicSettings = await this.knex
@@ -53,18 +56,23 @@ export class AssetsService {
}
const type = file.type;
const transforms = TransformationUtils.resolvePreset(transformation, file);
// We can only transform JPEG, PNG, and WebP
if (type && Object.keys(transformation).length > 0 && ['image/jpeg', 'image/png', 'image/webp'].includes(type)) {
const resizeOptions = this.parseTransformation(transformation);
if (type && transforms.length > 0 && ['image/jpeg', 'image/png', 'image/webp', 'image/tiff'].includes(type)) {
const maybeNewFormat = TransformationUtils.maybeExtractFormat(transforms);
const assetFilename =
path.basename(file.filename_disk, path.extname(file.filename_disk)) +
this.getAssetSuffix(transformation) +
path.extname(file.filename_disk);
getAssetSuffix(transforms) +
(maybeNewFormat ? `.${maybeNewFormat}` : path.extname(file.filename_disk));
const { exists } = await storage.disk(file.storage).exists(assetFilename);
if (maybeNewFormat) {
file.type = contentType(assetFilename) || null;
}
if (exists) {
return {
stream: storage.disk(file.storage).getStream(assetFilename, range),
@@ -94,15 +102,9 @@ export class AssetsService {
const transformer = sharp({
limitInputPixels: Math.pow(env.ASSETS_TRANSFORM_IMAGE_MAX_DIMENSION, 2),
sequentialRead: true,
})
.rotate()
.resize(resizeOptions);
}).rotate();
if (transformation.quality) {
transformer.toFormat(type.substring(6) as 'jpeg' | 'png' | 'webp', {
quality: Number(transformation.quality),
});
}
transforms.forEach(([method, ...args]) => (transformer[method] as any).apply(transformer, args));
await storage.disk(file.storage).put(assetFilename, readStream.pipe(transformer), type);
@@ -118,28 +120,9 @@ export class AssetsService {
return { stream: readStream, file, stat };
}
}
private parseTransformation(transformation: Transformation): ResizeOptions {
const resizeOptions: ResizeOptions = {};
if (transformation.width) resizeOptions.width = Number(transformation.width);
if (transformation.height) resizeOptions.height = Number(transformation.height);
if (transformation.fit) resizeOptions.fit = transformation.fit;
if (transformation.withoutEnlargement)
resizeOptions.withoutEnlargement = Boolean(transformation.withoutEnlargement);
return resizeOptions;
}
private getAssetSuffix(transformation: Transformation) {
if (Object.keys(transformation).length === 0) return '';
return (
'__' +
Object.entries(transformation)
.sort((a, b) => (a[0] > b[0] ? 1 : -1))
.map((e) => e.join('_'))
.join(',')
);
}
}
const getAssetSuffix = (transforms: Transformation[]) => {
if (Object.keys(transforms).length === 0) return '';
return `__${ObjectHash.sha1(transforms)}`;
};

View File

@@ -15,9 +15,12 @@ import {
} from '../exceptions';
import { createRateLimiter } from '../rate-limiter';
import { ActivityService } from '../services/activity';
import { AbstractServiceOptions, Accountability, Action, SchemaOverview, Session } from '../types';
import { AbstractServiceOptions, Action, SchemaOverview, Session } from '../types';
import { Accountability } from '@directus/shared/types';
import { SettingsService } from './settings';
import { merge } from 'lodash';
import { performance } from 'perf_hooks';
import { stall } from '../utils/stall';
type AuthenticateOptions = {
email: string;
@@ -52,6 +55,9 @@ export class AuthenticationService {
async authenticate(
options: AuthenticateOptions
): Promise<{ accessToken: any; refreshToken: any; expires: any; id?: any }> {
const STALL_TIME = 100;
const timeStart = performance.now();
const settingsService = new SettingsService({
knex: this.knex,
schema: this.schema,
@@ -97,8 +103,10 @@ export class AuthenticationService {
emitStatus('fail');
if (user?.status === 'suspended') {
await stall(STALL_TIME, timeStart);
throw new UserSuspendedException();
} else {
await stall(STALL_TIME, timeStart);
throw new InvalidCredentialsException();
}
}
@@ -125,17 +133,20 @@ export class AuthenticationService {
if (password !== undefined) {
if (!user.password) {
emitStatus('fail');
await stall(STALL_TIME, timeStart);
throw new InvalidCredentialsException();
}
if ((await argon2.verify(user.password, password)) === false) {
emitStatus('fail');
await stall(STALL_TIME, timeStart);
throw new InvalidCredentialsException();
}
}
if (user.tfa_secret && !otp) {
emitStatus('fail');
await stall(STALL_TIME, timeStart);
throw new InvalidOTPException(`"otp" is required`);
}
@@ -144,6 +155,7 @@ export class AuthenticationService {
if (otpValid === false) {
emitStatus('fail');
await stall(STALL_TIME, timeStart);
throw new InvalidOTPException(`"otp" is invalid`);
}
}
@@ -193,6 +205,8 @@ export class AuthenticationService {
await loginAttemptsLimiter.set(user.id, 0, 0);
}
await stall(STALL_TIME, timeStart);
return {
accessToken,
refreshToken,

View File

@@ -1,13 +1,14 @@
import { Knex } from 'knex';
import { cloneDeep, flatten, merge, uniq, uniqWith } from 'lodash';
import { cloneDeep, merge, uniq, uniqWith, flatten, isNil } from 'lodash';
import getDatabase from '../database';
import { FailedValidationException, ForbiddenException } from '../exceptions';
import { ForbiddenException } from '../exceptions';
import { FailedValidationException } from '@directus/shared/exceptions';
import { validatePayload, parseFilter } from '@directus/shared/utils';
import { Accountability } from '@directus/shared/types';
import {
AbstractServiceOptions,
Accountability,
AST,
FieldNode,
Filter,
Item,
NestedCollectionNode,
Permission,
@@ -15,9 +16,8 @@ import {
PrimaryKey,
Query,
SchemaOverview,
Aggregate,
} from '../types';
import generateJoi from '../utils/generate-joi';
import { parseFilter } from '../utils/parse-filter';
import { ItemsService } from './items';
import { PayloadService } from './payload';
@@ -71,7 +71,13 @@ export class AuthorizationService {
if (ast.type === 'm2a') {
collections.push(...ast.names.map((name) => ({ collection: name, field: ast.fieldKey })));
/** @TODO add nestedNode */
for (const children of Object.values(ast.children)) {
for (const nestedNode of children) {
if (nestedNode.type !== 'field') {
collections.push(...getCollectionsFromAST(nestedNode));
}
}
}
} else {
collections.push({
collection: ast.name,
@@ -89,17 +95,23 @@ export class AuthorizationService {
}
function validateFields(ast: AST | NestedCollectionNode | FieldNode) {
if (ast.type !== 'field' && ast.type !== 'm2a') {
/** @TODO remove m2a check */
const collection = ast.name;
if (ast.type !== 'field') {
if (ast.type === 'm2a') {
for (const [collection, children] of Object.entries(ast.children)) {
checkFields(collection, children, ast.query?.[collection]?.aggregate);
}
} else {
checkFields(ast.name, ast.children, ast.query?.aggregate);
}
}
function checkFields(collection: string, children: (NestedCollectionNode | FieldNode)[], aggregate?: Aggregate) {
// We check the availability of the permissions in the step before this is run
const permissions = permissionsForCollections.find((permission) => permission.collection === collection)!;
const allowedFields = permissions.fields || [];
if (ast.query.aggregate && allowedFields.includes('*') === false) {
for (const [_operation, aliasMap] of Object.entries(ast.query.aggregate)) {
if (aggregate && allowedFields.includes('*') === false) {
for (const [_operation, aliasMap] of Object.entries(aggregate)) {
if (!aliasMap) continue;
for (const [column, _alias] of Object.entries(aliasMap)) {
@@ -108,7 +120,7 @@ export class AuthorizationService {
}
}
for (const childNode of ast.children) {
for (const childNode of children) {
if (childNode.type !== 'field') {
validateFields(childNode);
continue;
@@ -129,43 +141,61 @@ export class AuthorizationService {
ast: AST | NestedCollectionNode | FieldNode,
accountability: Accountability | null
): AST | NestedCollectionNode | FieldNode {
if (ast.type !== 'field' && ast.type !== 'm2a') {
/** @TODO remove m2a check */
const collection = ast.name;
if (ast.type !== 'field') {
if (ast.type === 'm2a') {
const collections = Object.keys(ast.children);
for (const collection of collections) {
updateFilterQuery(collection, ast.query[collection]);
}
for (const [collection, children] of Object.entries(ast.children)) {
ast.children[collection] = children.map((child) => applyFilters(child, accountability)) as (
| NestedCollectionNode
| FieldNode
)[];
}
} else {
const collection = ast.name;
updateFilterQuery(collection, ast.query);
ast.children = ast.children.map((child) => applyFilters(child, accountability)) as (
| NestedCollectionNode
| FieldNode
)[];
}
}
return ast;
function updateFilterQuery(collection: string, query: Query) {
// We check the availability of the permissions in the step before this is run
const permissions = permissionsForCollections.find((permission) => permission.collection === collection)!;
const parsedPermissions = parseFilter(permissions.permissions, accountability);
if (!ast.query.filter || Object.keys(ast.query.filter).length === 0) {
ast.query.filter = { _and: [] };
if (!query.filter || Object.keys(query.filter).length === 0) {
query.filter = { _and: [] };
} else {
ast.query.filter = { _and: [ast.query.filter] };
query.filter = { _and: [query.filter] };
}
if (parsedPermissions && Object.keys(parsedPermissions).length > 0) {
ast.query.filter._and.push(parsedPermissions);
query.filter._and.push(parsedPermissions);
}
if (ast.query.filter._and.length === 0) delete ast.query.filter._and;
if (query.filter._and.length === 0) delete query.filter._and;
if (permissions.limit && ast.query.limit && ast.query.limit > permissions.limit) {
if (permissions.limit && query.limit && query.limit > permissions.limit) {
throw new ForbiddenException();
}
// Default to the permissions limit if limit hasn't been set
if (permissions.limit && !ast.query.limit) {
ast.query.limit = permissions.limit;
if (permissions.limit && !query.limit) {
query.limit = permissions.limit;
}
ast.children = ast.children.map((child) => applyFilters(child, accountability)) as (
| NestedCollectionNode
| FieldNode
)[];
}
return ast;
}
}
@@ -173,8 +203,6 @@ export class AuthorizationService {
* Checks if the provided payload matches the configured permissions, and adds the presets to the payload.
*/
validatePayload(action: PermissionsAction, collection: string, data: Partial<Item>): Promise<Partial<Item>> {
const validationErrors: FailedValidationException[] = [];
const payload = cloneDeep(data);
let permission: Permission | undefined;
@@ -216,44 +244,57 @@ export class AuthorizationService {
const payloadWithPresets = merge({}, preset, payload);
const requiredColumns: string[] = [];
const hasValidationRules =
isNil(permission.validation) === false && Object.keys(permission.validation ?? {}).length > 0;
for (const [name, field] of Object.entries(this.schema.collections[collection].fields)) {
const requiredColumns: SchemaOverview['collections'][string]['fields'][string][] = [];
for (const field of Object.values(this.schema.collections[collection].fields)) {
const specials = field?.special ?? [];
const hasGenerateSpecial = ['uuid', 'date-created', 'role-created', 'user-created'].some((name) =>
specials.includes(name)
);
const isRequired = field.nullable === false && field.defaultValue === null && hasGenerateSpecial === false;
const notNullable = field.nullable === false && hasGenerateSpecial === false;
if (isRequired) {
requiredColumns.push(name);
if (notNullable) {
requiredColumns.push(field);
}
}
if (hasValidationRules === false && requiredColumns.length === 0) {
return payloadWithPresets;
}
if (requiredColumns.length > 0) {
permission.validation = {
_and: [permission.validation, {}],
};
permission.validation = hasValidationRules ? { _and: [permission.validation] } : { _and: [] };
if (action === 'create') {
for (const name of requiredColumns) {
permission.validation._and[1][name] = {
_submitted: true,
};
for (const field of requiredColumns) {
if (action === 'create' && field.defaultValue === null) {
permission.validation._and.push({
[field.field]: {
_submitted: true,
},
});
}
} else {
for (const name of requiredColumns) {
permission.validation._and[1][name] = {
permission.validation._and.push({
[field.field]: {
_nnull: true,
};
}
},
});
}
}
const validationErrors: FailedValidationException[] = [];
validationErrors.push(
...this.validateJoi(parseFilter(permission.validation || {}, this.accountability), payloadWithPresets)
...flatten(
validatePayload(parseFilter(permission.validation!, this.accountability), payloadWithPresets).map((error) =>
error.details.map((details) => new FailedValidationException(details))
)
)
);
if (validationErrors.length > 0) throw validationErrors;
@@ -261,48 +302,6 @@ export class AuthorizationService {
return payloadWithPresets;
}
validateJoi(validation: Filter, payload: Partial<Item>): FailedValidationException[] {
if (!validation) return [];
const errors: FailedValidationException[] = [];
/**
* Note there can only be a single _and / _or per level
*/
if (Object.keys(validation)[0] === '_and') {
const subValidation = Object.values(validation)[0];
const nestedErrors = flatten<FailedValidationException>(
subValidation.map((subObj: Record<string, any>) => {
return this.validateJoi(subObj, payload);
})
).filter((err?: FailedValidationException) => err);
errors.push(...nestedErrors);
} else if (Object.keys(validation)[0] === '_or') {
const subValidation = Object.values(validation)[0];
const nestedErrors = flatten<FailedValidationException>(
subValidation.map((subObj: Record<string, any>) => this.validateJoi(subObj, payload))
);
const allErrored = subValidation.length === nestedErrors.length;
if (allErrored) {
errors.push(...nestedErrors);
}
} else {
const schema = generateJoi(validation);
const { error } = schema.validate(payload, { abortEarly: false });
if (error) {
errors.push(...error.details.map((details) => new FailedValidationException(details)));
}
}
return errors;
}
async checkAccess(action: PermissionsAction, collection: string, pk: PrimaryKey | PrimaryKey[]): Promise<void> {
if (this.accountability?.admin === true) return;

View File

@@ -10,14 +10,8 @@ import logger from '../logger';
import { FieldsService, RawField } from '../services/fields';
import { ItemsService, MutationOptions } from '../services/items';
import Keyv from 'keyv';
import {
AbstractServiceOptions,
Accountability,
Collection,
CollectionMeta,
FieldMeta,
SchemaOverview,
} from '../types';
import { AbstractServiceOptions, Collection, CollectionMeta, SchemaOverview } from '../types';
import { Accountability, FieldMeta } from '@directus/shared/types';
export type RawCollection = {
collection: string;
@@ -213,6 +207,11 @@ export class CollectionsService {
const collections: Collection[] = [];
/**
* The collections as known in the schema cache.
*/
const knownCollections = Object.keys(this.schema.collections);
for (const table of tablesInDatabase) {
const collection: Collection = {
collection: table.name,
@@ -220,7 +219,12 @@ export class CollectionsService {
schema: table,
};
collections.push(collection);
// By only returning collections that are known in the schema cache, we prevent weird
// situations where the collections endpoint returns different info from every other
// collection
if (knownCollections.includes(table.name)) {
collections.push(collection);
}
}
return collections;
@@ -272,6 +276,8 @@ export class CollectionsService {
const collections: Collection[] = [];
const knownCollections = Object.keys(this.schema.collections);
for (const table of tables) {
const collection: Collection = {
collection: table.name,
@@ -279,7 +285,12 @@ export class CollectionsService {
schema: table,
};
collections.push(collection);
// By only returning collections that are known in the schema cache, we prevent weird
// situations where the collections endpoint returns different info from every other
// collection
if (knownCollections.includes(table.name)) {
collections.push(collection);
}
}
return collections;

View File

@@ -11,16 +11,18 @@ import { ForbiddenException, InvalidPayloadException } from '../exceptions';
import { translateDatabaseError } from '../exceptions/database/translate';
import { ItemsService } from '../services/items';
import { PayloadService } from '../services/payload';
import { AbstractServiceOptions, Accountability, FieldMeta, SchemaOverview, types } from '../types';
import { Field } from '../types/field';
import { AbstractServiceOptions, SchemaOverview } from '../types';
import { Accountability } from '@directus/shared/types';
import { Field, FieldMeta, Type } from '@directus/shared/types';
import getDefaultValue from '../utils/get-default-value';
import getLocalType from '../utils/get-local-type';
import { toArray } from '../utils/to-array';
import { isEqual } from 'lodash';
import { toArray } from '@directus/shared/utils';
import { isEqual, isNil } from 'lodash';
import { RelationsService } from './relations';
import Keyv from 'keyv';
import { DeepPartial } from '@directus/shared/types';
export type RawField = DeepPartial<Field> & { field: string; type: typeof types[number] };
export type RawField = DeepPartial<Field> & { field: string; type: Type };
export class FieldsService {
knex: Knex;
@@ -213,15 +215,20 @@ export class FieldsService {
async createField(
collection: string,
field: Partial<Field> & { field: string; type: typeof types[number] | null },
field: Partial<Field> & { field: string; type: Type | null },
table?: Knex.CreateTableBuilder // allows collection creation to
): Promise<void> {
if (this.accountability && this.accountability.admin !== true) {
throw new ForbiddenException();
}
const exists =
field.field in this.schema.collections[collection].fields ||
isNil(await this.knex.select('id').from('directus_fields').where({ collection, field: field.field }).first()) ===
false;
// Check if field already exists, either as a column, or as a row in directus_fields
if (field.field in this.schema.collections[collection].fields) {
if (exists) {
throw new InvalidPayloadException(`Field "${field.field}" already exists in collection "${collection}"`);
}
@@ -313,7 +320,6 @@ export class FieldsService {
return field.field;
}
/** @todo save accountability */
async deleteField(collection: string, field: string): Promise<void> {
if (this.accountability && this.accountability.admin !== true) {
throw new ForbiddenException();
@@ -434,6 +440,9 @@ export class FieldsService {
public addColumnToTable(table: Knex.CreateTableBuilder, field: RawField | Field, alter: Column | null = null): void {
let column: Knex.ColumnBuilder;
// Don't attempt to add a DB column for alias / corrupt fields
if (field.type === 'alias' || field.type === 'unknown') return;
if (field.schema?.has_auto_increment) {
column = table.increments(field.field);
} else if (field.type === 'string') {
@@ -445,6 +454,10 @@ export class FieldsService {
column = table.string(field.field);
} else if (field.type === 'hash') {
column = table.string(field.field, 255);
} else if (field.type === 'dateTime') {
column = table.dateTime(field.field, { useTz: false });
} else if (field.type === 'timestamp') {
column = table.timestamp(field.field, { useTz: true });
} else {
column = table[field.type](field.field);
}

View File

@@ -1,7 +1,6 @@
import formatTitle from '@directus/format-title';
import axios, { AxiosResponse } from 'axios';
import parseEXIF from 'exif-reader';
import { parse as parseICC } from 'icc';
import exifr from 'exifr';
import { clone } from 'lodash';
import { extension } from 'mime-types';
import path from 'path';
@@ -13,8 +12,7 @@ import { ForbiddenException, ServiceUnavailableException } from '../exceptions';
import logger from '../logger';
import storage from '../storage';
import { AbstractServiceOptions, File, PrimaryKey } from '../types';
import parseIPTC from '../utils/parse-iptc';
import { toArray } from '../utils/to-array';
import { toArray } from '@directus/shared/utils';
import { ItemsService, MutationOptions } from './items';
export class FilesService extends ItemsService {
@@ -32,6 +30,14 @@ export class FilesService extends ItemsService {
): Promise<PrimaryKey> {
const payload = clone(data);
if ('folder' in payload === false) {
const settings = await this.knex.select('storage_default_folder').from('directus_settings').first();
if (settings?.storage_default_folder) {
payload.folder = settings.storage_default_folder;
}
}
if (primaryKey !== undefined) {
await this.updateOne(primaryKey, payload, { emitEvents: false });
@@ -46,9 +52,10 @@ export class FilesService extends ItemsService {
primaryKey = await this.createOne(payload, { emitEvents: false });
}
const fileExtension = path.extname(payload.filename_download) || (payload.type && extension(payload.type));
const fileExtension =
path.extname(payload.filename_download) || (payload.type && '.' + extension(payload.type)) || '';
payload.filename_disk = primaryKey + '.' + fileExtension;
payload.filename_disk = primaryKey + (fileExtension || '');
if (!payload.type) {
payload.type = 'application/octet-stream';
@@ -77,37 +84,30 @@ export class FilesService extends ItemsService {
payload.height = meta.height;
}
payload.filesize = meta.size;
payload.metadata = {};
if (meta.icc) {
try {
payload.metadata.icc = parseICC(meta.icc);
} catch (err) {
logger.warn(`Couldn't extract ICC information from file`);
logger.warn(err);
try {
payload.metadata = await exifr.parse(buffer.content, {
icc: true,
iptc: true,
ifd1: true,
interop: true,
translateValues: true,
reviveValues: true,
mergeOutput: false,
});
if (payload.metadata?.iptc?.Headline) {
payload.title = payload.metadata.iptc.Headline;
}
}
if (meta.exif) {
try {
payload.metadata.exif = parseEXIF(meta.exif);
} catch (err) {
logger.warn(`Couldn't extract EXIF information from file`);
logger.warn(err);
if (!payload.description && payload.metadata?.iptc?.Caption) {
payload.description = payload.metadata.iptc.Caption;
}
}
if (meta.iptc) {
try {
payload.metadata.iptc = parseIPTC(meta.iptc);
payload.title = payload.metadata.iptc.headline || payload.title;
payload.description = payload.description || payload.metadata.iptc.caption;
payload.tags = payload.metadata.iptc.keywords;
} catch (err) {
logger.warn(`Couldn't extract IPTC information from file`);
logger.warn(err);
if (payload.metadata?.iptc?.Keywords) {
payload.tags = payload.metadata.iptc.Keywords;
}
} catch (err) {
logger.warn(`Couldn't extract metadata from file`);
logger.warn(err);
}
}

View File

@@ -44,11 +44,14 @@ import {
import { Knex } from 'knex';
import { flatten, get, mapKeys, merge, set, uniq } from 'lodash';
import ms from 'ms';
import { getCache } from '../cache';
import getDatabase from '../database';
import env from '../env';
import { BaseException, GraphQLValidationException, InvalidPayloadException } from '../exceptions';
import { ForbiddenException, GraphQLValidationException, InvalidPayloadException } from '../exceptions';
import { BaseException } from '@directus/shared/exceptions';
import { listExtensions } from '../extensions';
import { AbstractServiceOptions, Accountability, Action, GraphQLParams, Item, Query, SchemaOverview } from '../types';
import { Accountability } from '@directus/shared/types';
import { AbstractServiceOptions, Action, GraphQLParams, Item, Query, SchemaOverview } from '../types';
import { getGraphQLType } from '../utils/get-graphql-type';
import { reduceSchema } from '../utils/reduce-schema';
import { sanitizeQuery } from '../utils/sanitize-query';
@@ -1103,7 +1106,7 @@ export class GraphQLService {
* Select the correct service for the given collection. This allows the individual services to run
* their custom checks (f.e. it allows UsersService to prevent updating TFA secret from outside)
*/
getService(collection: string): RolesService {
getService(collection: string): ItemsService {
const opts = {
knex: this.knex,
accountability: this.accountability,
@@ -1376,7 +1379,7 @@ export class GraphQLService {
userAgent: req?.get('user-agent'),
});
if (args.mode === 'cookie') {
res?.cookie('directus_refresh_token', result.refreshToken, {
res?.cookie(env.REFRESH_TOKEN_COOKIE_NAME, result.refreshToken, {
httpOnly: true,
domain: env.REFRESH_TOKEN_COOKIE_DOMAIN,
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
@@ -1407,13 +1410,13 @@ export class GraphQLService {
accountability: accountability,
schema: this.schema,
});
const currentRefreshToken = args.refresh_token || req?.cookies.directus_refresh_token;
const currentRefreshToken = args.refresh_token || req?.cookies[env.REFRESH_TOKEN_COOKIE_NAME];
if (!currentRefreshToken) {
throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`);
}
const result = await authenticationService.refresh(currentRefreshToken);
if (args.mode === 'cookie') {
res?.cookie('directus_refresh_token', result.refreshToken, {
res?.cookie(env.REFRESH_TOKEN_COOKIE_NAME, result.refreshToken, {
httpOnly: true,
domain: env.REFRESH_TOKEN_COOKIE_DOMAIN,
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
@@ -1443,7 +1446,7 @@ export class GraphQLService {
accountability: accountability,
schema: this.schema,
});
const currentRefreshToken = args.refresh_token || req?.cookies.directus_refresh_token;
const currentRefreshToken = args.refresh_token || req?.cookies[env.REFRESH_TOKEN_COOKIE_NAME];
if (!currentRefreshToken) {
throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`);
}
@@ -1609,6 +1612,21 @@ export class GraphQLService {
return true;
},
},
utils_cache_clear: {
type: GraphQLVoid,
resolve: async () => {
if (this.accountability?.admin !== true) {
throw new ForbiddenException();
}
const { cache, schemaCache } = getCache();
await cache?.clear();
await schemaCache?.clear();
return;
},
},
users_invite_accept: {
type: GraphQLBoolean,
args: {

View File

@@ -1,6 +1,7 @@
import { Knex } from 'knex';
import getDatabase from '../database';
import { AbstractServiceOptions, Accountability, SchemaOverview } from '../types';
import { AbstractServiceOptions, SchemaOverview } from '../types';
import { Accountability } from '@directus/shared/types';
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
import StreamArray from 'stream-json/streamers/StreamArray';
import { ItemsService } from './items';

View File

@@ -9,10 +9,10 @@ import env from '../env';
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
import { translateDatabaseError } from '../exceptions/database/translate';
import logger from '../logger';
import { Accountability } from '@directus/shared/types';
import {
AbstractService,
AbstractServiceOptions,
Accountability,
Action,
Item as AnyItem,
PermissionsAction,
@@ -21,7 +21,7 @@ import {
SchemaOverview,
} from '../types';
import getASTFromQuery from '../utils/get-ast-from-query';
import { toArray } from '../utils/to-array';
import { toArray } from '@directus/shared/utils';
import { AuthorizationService } from './authorization';
import { PayloadService } from './payload';
@@ -279,6 +279,17 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
throw new ForbiddenException();
}
emitAsyncSafe(`${this.eventScope}.read`, {
event: `${this.eventScope}.read`,
accountability: this.accountability,
collection: this.collection,
query,
action: 'read',
payload: records,
schema: this.schema,
database: getDatabase(),
});
return records as Item[];
}
@@ -306,17 +317,6 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
throw new ForbiddenException();
}
emitAsyncSafe(`${this.eventScope}.read`, {
event: `${this.eventScope}.read`,
accountability: this.accountability,
collection: this.collection,
item: key,
action: 'read',
payload: results,
schema: this.schema,
database: getDatabase(),
});
return results[0];
}
@@ -344,17 +344,6 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
const results = await this.readByQuery(queryWithKeys, opts);
emitAsyncSafe(`${this.eventScope}.read`, {
event: `${this.eventScope}.read`,
accountability: this.accountability,
collection: this.collection,
item: keys,
action: 'read',
payload: results,
schema: this.schema,
database: getDatabase(),
});
return results;
}

View File

@@ -6,7 +6,8 @@ import getDatabase from '../../database';
import env from '../../env';
import { InvalidPayloadException } from '../../exceptions';
import logger from '../../logger';
import { AbstractServiceOptions, Accountability, SchemaOverview } from '../../types';
import { AbstractServiceOptions, SchemaOverview } from '../../types';
import { Accountability } from '@directus/shared/types';
import getMailer from '../../mailer';
import { Transporter, SendMailOptions } from 'nodemailer';
import prettier from 'prettier';

View File

@@ -1,10 +1,11 @@
import { Knex } from 'knex';
import getDatabase from '../database';
import { ForbiddenException } from '../exceptions';
import { AbstractServiceOptions, Accountability, SchemaOverview } from '../types';
import { AbstractServiceOptions, SchemaOverview } from '../types';
import { Accountability } from '@directus/shared/types';
import { Query } from '../types/query';
import { applyFilter, applySearch } from '../utils/apply-query';
import { parseFilter } from '../utils/parse-filter';
import { parseFilter } from '@directus/shared/utils';
export class MetaService {
knex: Knex;

View File

@@ -1,13 +1,14 @@
import argon2 from 'argon2';
import { format, formatISO, parse, parseISO } from 'date-fns';
import { format, parseISO } from 'date-fns';
import Joi from 'joi';
import { Knex } from 'knex';
import { clone, cloneDeep, isObject, isPlainObject, omit } from 'lodash';
import { v4 as uuidv4 } from 'uuid';
import getDatabase from '../database';
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
import { AbstractServiceOptions, Accountability, Item, PrimaryKey, Query, SchemaOverview } from '../types';
import { toArray } from '../utils/to-array';
import { AbstractServiceOptions, Item, PrimaryKey, Query, SchemaOverview, Alterations } from '../types';
import { Accountability } from '@directus/shared/types';
import { toArray } from '@directus/shared/utils';
import { ItemsService } from './items';
type Action = 'create' | 'read' | 'update';
@@ -21,16 +22,6 @@ type Transformers = {
}) => Promise<any>;
};
type Alterations = {
create: {
[key: string]: any;
}[];
update: {
[key: string]: any;
}[];
delete: (number | string)[];
};
/**
* Process a given payload for a collection to ensure the special fields (hash, uuid, date etc) are
* handled correctly.
@@ -50,13 +41,6 @@ export class PayloadService {
return this;
}
/**
* @todo allow this to be extended
*
* @todo allow these extended special types to have "field dependencies"?
* f.e. the file-links transformer needs the id and filename_download to be fetched from the DB
* in order to work
*/
public transformers: Transformers = {
async hash({ action, value }) {
if (!value) return;
@@ -222,11 +206,15 @@ export class PayloadService {
['dateTime', 'date', 'timestamp'].includes(field.type)
);
if (dateColumns.length === 0) return payloads;
const timeColumns = fieldsInCollection.filter(([_name, field]) => {
return field.type === 'time';
});
if (dateColumns.length === 0 && timeColumns.length === 0) return payloads;
for (const [name, dateColumn] of dateColumns) {
for (const payload of payloads) {
let value = payload[name];
let value: number | string | Date = payload[name];
if (value === null || value === '0000-00-00') {
payload[name] = null;
@@ -236,32 +224,54 @@ export class PayloadService {
if (!value) continue;
if (action === 'read') {
if (typeof value === 'string') value = new Date(value);
if (typeof value === 'number' || typeof value === 'string') {
value = new Date(value);
}
if (dateColumn.type === 'timestamp') {
const newValue = formatISO(value);
const newValue = value.toISOString();
payload[name] = newValue;
}
if (dateColumn.type === 'dateTime') {
// Strip off the Z at the end of a non-timezone datetime value
const newValue = format(value, "yyyy-MM-dd'T'HH:mm:ss");
const year = String(value.getUTCFullYear());
const month = String(value.getUTCMonth() + 1).padStart(2, '0');
const date = String(value.getUTCDate()).padStart(2, '0');
const hours = String(value.getUTCHours()).padStart(2, '0');
const minutes = String(value.getUTCMinutes()).padStart(2, '0');
const seconds = String(value.getUTCSeconds()).padStart(2, '0');
const newValue = `${year}-${month}-${date}T${hours}:${minutes}:${seconds}`;
payload[name] = newValue;
}
if (dateColumn.type === 'date') {
const [year, month, day] = value.toISOString().substr(0, 10).split('-');
// Strip off the time / timezone information from a date-only value
const newValue = format(value, 'yyyy-MM-dd');
const newValue = `${year}-${month}-${day}`;
payload[name] = newValue;
}
} else {
if (value instanceof Date === false) {
if (value instanceof Date === false && typeof value === 'string') {
if (dateColumn.type === 'date') {
const newValue = parse(value, 'yyyy-MM-dd', new Date());
payload[name] = newValue;
const [date] = value.split('T');
const [year, month, day] = date.split('-');
payload[name] = new Date(Date.UTC(Number(year), Number(month) - 1, Number(day)));
}
if (dateColumn.type === 'timestamp' || dateColumn.type === 'dateTime') {
if (dateColumn.type === 'dateTime') {
const [date, time] = value.split('T');
const [year, month, day] = date.split('-');
const [hours, minutes, seconds] = time.substring(0, 8).split(':');
payload[name] = new Date(
Date.UTC(Number(year), Number(month) - 1, Number(day), Number(hours), Number(minutes), Number(seconds))
);
}
if (dateColumn.type === 'timestamp') {
const newValue = parseISO(value);
payload[name] = newValue;
}
@@ -270,6 +280,22 @@ export class PayloadService {
}
}
/**
* Some DB drivers (MS SQL f.e.) return time values as Date objects. For consistencies sake,
* we'll abstract those back to hh:mm:ss
*/
for (const [name] of timeColumns) {
for (const payload of payloads) {
const value = payload[name];
if (!value) continue;
if (action === 'read') {
if (value instanceof Date) payload[name] = format(value, 'HH:mm:ss');
}
}
}
return payloads;
}
@@ -318,6 +344,9 @@ export class PayloadService {
const relatedPrimary = this.schema.collections[relatedCollection].primary;
const relatedRecord: Partial<Item> = payload[relation.field];
if (['string', 'number'].includes(typeof relatedRecord)) continue;
const hasPrimaryKey = relatedPrimary in relatedRecord;
let relatedPrimaryKey: PrimaryKey = relatedRecord[relatedPrimary];
@@ -455,7 +484,8 @@ export class PayloadService {
schema: this.schema,
});
const relatedRecords: Partial<Item>[] = [];
const recordsToUpsert: Partial<Item>[] = [];
const savedPrimaryKeys: PrimaryKey[] = [];
// Nested array of individual items
if (Array.isArray(payload[relation.meta!.one_field!])) {
@@ -465,30 +495,46 @@ export class PayloadService {
let record = cloneDeep(relatedRecord);
if (typeof relatedRecord === 'string' || typeof relatedRecord === 'number') {
const exists = !!(await this.knex
.select(relatedPrimaryKeyField)
const existingRecord = await this.knex
.select(relatedPrimaryKeyField, relation.field)
.from(relation.collection)
.where({ [relatedPrimaryKeyField]: record })
.first());
.first();
if (exists === false) {
if (!!existingRecord === false) {
throw new ForbiddenException();
}
// If the related item is already associated to the current item, and there's no
// other updates (which is indicated by the fact that this is just the PK, we can
// ignore updating this item. This makes sure we don't trigger any update logic
// for items that aren't actually being updated. NOTE: We use == here, as the
// primary key might be reported as a string instead of number, coming from the
// http route, and or a bigInteger in the DB
if (
existingRecord[relation.field] == parent ||
existingRecord[relation.field] == payload[currentPrimaryKeyField]
) {
savedPrimaryKeys.push(existingRecord[relatedPrimaryKeyField]);
continue;
}
record = {
[relatedPrimaryKeyField]: relatedRecord,
};
}
relatedRecords.push({
recordsToUpsert.push({
...record,
[relation.field]: parent || payload[currentPrimaryKeyField],
});
}
const savedPrimaryKeys = await itemsService.upsertMany(relatedRecords, {
onRevisionCreate: (id) => revisions.push(id),
});
savedPrimaryKeys.push(
...(await itemsService.upsertMany(recordsToUpsert, {
onRevisionCreate: (id) => revisions.push(id),
}))
);
const query: Query = {
filter: {
@@ -540,7 +586,7 @@ export class PayloadService {
}
if (alterations.update) {
const primaryKeyField = this.schema.collections[this.collection].primary;
const primaryKeyField = this.schema.collections[relation.collection].primary;
for (const item of alterations.update) {
await itemsService.updateOne(

View File

@@ -1,8 +1,9 @@
import { Knex } from 'knex';
import { systemRelationRows } from '../database/system-data/relations';
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
import { AbstractServiceOptions, SchemaOverview, Query, Relation, RelationMeta, Accountability } from '../types';
import { toArray } from '../utils/to-array';
import { AbstractServiceOptions, SchemaOverview, Query, Relation, RelationMeta } from '../types';
import { Accountability } from '@directus/shared/types';
import { toArray } from '@directus/shared/utils';
import { ItemsService, QueryOptions } from './items';
import { PermissionsService } from './permissions';
import SchemaInspector from '@directus/schema';

View File

@@ -1,6 +1,6 @@
import { UnprocessableEntityException } from '../exceptions';
import { AbstractServiceOptions, PrimaryKey } from '../types';
import { ItemsService } from './items';
import { ForbiddenException, UnprocessableEntityException } from '../exceptions';
import { AbstractServiceOptions, PrimaryKey, Query, Alterations, Item } from '../types';
import { ItemsService, MutationOptions } from './items';
import { PermissionsService } from './permissions';
import { PresetsService } from './presets';
import { UsersService } from './users';
@@ -10,21 +10,89 @@ export class RolesService extends ItemsService {
super('directus_roles', options);
}
private async checkForOtherAdminRoles(excludeKeys: PrimaryKey[]): Promise<void> {
// Make sure there's at least one admin role left after this deletion is done
const otherAdminRoles = await this.knex
.count('*', { as: 'count' })
.from('directus_roles')
.whereNotIn('id', excludeKeys)
.andWhere({ admin_access: true })
.first();
const otherAdminRolesCount = +(otherAdminRoles?.count || 0);
if (otherAdminRolesCount === 0) throw new UnprocessableEntityException(`You can't delete the last admin role.`);
}
private async checkForOtherAdminUsers(key: PrimaryKey, users: Alterations | Item[]): Promise<void> {
const role = await this.knex.select('admin_access').from('directus_roles').where('id', '=', key).first();
if (!role) throw new ForbiddenException();
// The users that will now be in this new non-admin role
let userKeys: PrimaryKey[] = [];
if (Array.isArray(users)) {
userKeys = users.map((user) => (typeof user === 'string' ? user : user.id)).filter((id) => id);
} else {
userKeys = users.update.map((user) => user.id).filter((id) => id);
}
const usersThatWereInRoleBefore = (await this.knex.select('id').from('directus_users').where('role', '=', key)).map(
(user) => user.id
);
const usersThatAreRemoved = usersThatWereInRoleBefore.filter((id) => userKeys.includes(id) === false);
const usersThatAreAdded = Array.isArray(users) ? users : users.create;
// If the role the users are moved to is an admin-role, and there's at least 1 (new) admin
// user, we don't have to check for other admin
// users
if ((role.admin_access === true || role.admin_access === 1) && usersThatAreAdded.length > 0) return;
const otherAdminUsers = await this.knex
.count('*', { as: 'count' })
.from('directus_users')
.whereNotIn('directus_users.id', [...userKeys, ...usersThatAreRemoved])
.andWhere({ 'directus_roles.admin_access': true })
.leftJoin('directus_roles', 'directus_users.role', 'directus_roles.id')
.first();
const otherAdminUsersCount = +(otherAdminUsers?.count || 0);
if (otherAdminUsersCount === 0) {
throw new UnprocessableEntityException(`You can't remove the last admin user from the admin role.`);
}
return;
}
async updateOne(key: PrimaryKey, data: Record<string, any>, opts?: MutationOptions): Promise<PrimaryKey> {
if ('admin_access' in data && data.admin_access === false) {
await this.checkForOtherAdminRoles([key]);
}
if ('users' in data) {
await this.checkForOtherAdminUsers(key, data.users);
}
return super.updateOne(key, data, opts);
}
async updateMany(keys: PrimaryKey[], data: Record<string, any>, opts?: MutationOptions): Promise<PrimaryKey[]> {
if ('admin_access' in data && data.admin_access === false) {
await this.checkForOtherAdminRoles(keys);
}
return super.updateMany(keys, data, opts);
}
async deleteOne(key: PrimaryKey): Promise<PrimaryKey> {
await this.deleteMany([key]);
return key;
}
async deleteMany(keys: PrimaryKey[]): Promise<PrimaryKey[]> {
// Make sure there's at least one admin role left after this deletion is done
const otherAdminRoles = await this.knex
.count('*', { as: 'count' })
.from('directus_roles')
.whereNotIn('id', keys)
.andWhere({ admin_access: true })
.first();
const otherAdminRolesCount = +(otherAdminRoles?.count || 0);
if (otherAdminRolesCount === 0) throw new UnprocessableEntityException(`You can't delete the last admin role.`);
await this.checkForOtherAdminRoles(keys);
await this.knex.transaction(async (trx) => {
const itemsService = new ItemsService('directus_roles', {
@@ -77,6 +145,10 @@ export class RolesService extends ItemsService {
return keys;
}
deleteByQuery(query: Query, opts?: MutationOptions): Promise<PrimaryKey[]> {
return super.deleteByQuery(query, opts);
}
/**
* @deprecated Use `deleteOne` or `deleteMany` instead
*/

View File

@@ -12,8 +12,9 @@ import env from '../env';
import logger from '../logger';
import { rateLimiter } from '../middleware/rate-limiter';
import storage from '../storage';
import { AbstractServiceOptions, Accountability, SchemaOverview } from '../types';
import { toArray } from '../utils/to-array';
import { AbstractServiceOptions, SchemaOverview } from '../types';
import { Accountability } from '@directus/shared/types';
import { toArray } from '@directus/shared/utils';
import getMailer from '../mailer';
import { SettingsService } from './settings';

View File

@@ -7,16 +7,8 @@ import { OpenAPIObject, OperationObject, PathItemObject, SchemaObject, TagObject
import { version } from '../../package.json';
import getDatabase from '../database';
import env from '../env';
import {
AbstractServiceOptions,
Accountability,
Collection,
Field,
Permission,
Relation,
SchemaOverview,
types,
} from '../types';
import { AbstractServiceOptions, Collection, Permission, Relation, SchemaOverview } from '../types';
import { Accountability, Field, Type } from '@directus/shared/types';
import { getRelationType } from '../utils/get-relation-type';
import { CollectionsService } from './collections';
import { FieldsService } from './fields';
@@ -459,20 +451,33 @@ class OASSpecsService implements SpecificationSubService {
}
private fieldTypes: Record<
typeof types[number],
Type,
{
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'integer' | 'null' | undefined;
format?: string;
items?: any;
}
> = {
alias: {
type: 'string',
},
bigInteger: {
type: 'integer',
format: 'int64',
},
binary: {
type: 'string',
format: 'binary',
},
boolean: {
type: 'boolean',
},
csv: {
type: 'array',
items: {
type: 'string',
},
},
date: {
type: 'string',
format: 'date',
@@ -488,6 +493,9 @@ class OASSpecsService implements SpecificationSubService {
type: 'number',
format: 'float',
},
hash: {
type: 'string',
},
integer: {
type: 'integer',
},
@@ -511,23 +519,13 @@ class OASSpecsService implements SpecificationSubService {
type: 'string',
format: 'timestamp',
},
binary: {
type: 'string',
format: 'binary',
unknown: {
type: undefined,
},
uuid: {
type: 'string',
format: 'uuid',
},
csv: {
type: 'array',
items: {
type: 'string',
},
},
hash: {
type: 'string',
},
};
}

Some files were not shown because too many files have changed in this diff Show More