diff --git a/.eslintignore b/.eslintignore index f06235c460..ae44c032fc 100644 --- a/.eslintignore +++ b/.eslintignore @@ -1,2 +1,3 @@ node_modules dist +templates diff --git a/.eslintrc.js b/.eslintrc.js index d641f4a30e..e338ba96cf 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -38,10 +38,9 @@ module.exports = { parser: '@typescript-eslint/parser', }, extends: [ - 'plugin:vue/vue3-essential', + 'plugin:vue/vue3-recommended', 'eslint:recommended', 'plugin:@typescript-eslint/recommended', - 'plugin:prettier-vue/recommended', 'prettier', ], rules: { diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index e8ea08c548..b6b791dc59 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -2,10 +2,7 @@ /docs/*.md @benhaynes -/packages/cli @WoLfulus -/packages/sdk @WoLfulus -/packages/gatsby-source-directus @WoLfulus - /packages/shared @nickrum -/packages/extension-sdk @nickrum +/packages/extensions-sdk @nickrum +/packages/create-directus-extension @nickrum /app/vite.config.js @nickrum diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index e181d3dd09..8de2b788b6 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -5,12 +5,17 @@ body: - type: markdown attributes: value: Hi, thank you for taking the time to create an issue! - - type: markdown + - type: checkboxes + id: troubleshooting attributes: - value: 'Before continuing, you must first have completed all [Troubleshooting Steps](https://docs.directus.io/getting-started/support/#troubleshooting-steps)' - - type: markdown - attributes: - value: Please confirm that an issue describing this problem doesn't exist already. + label: Preflight Checklist + options: + - label: I have completed all [Troubleshooting Steps](https://docs.directus.io/getting-started/support/#troubleshooting-steps). + required: true + - label: I'm on [the latest version of Directus](https://github.com/directus/directus/releases). + required: true + - label: There's [no other issue](https://github.com/directus/directus/issues) that already describes my problem. + required: true - type: textarea attributes: label: Describe the Bug diff --git a/.github/actions/build-images/Dockerfile b/.github/actions/build-images/Dockerfile deleted file mode 100644 index b9e58e9666..0000000000 --- a/.github/actions/build-images/Dockerfile +++ /dev/null @@ -1,15 +0,0 @@ -FROM docker:stable - -RUN \ - apk update && \ - apk upgrade && \ - apk add bash - -COPY ./rootfs/ / - -RUN \ - chmod +x /usr/bin/lib/argsf && \ - chmod +x /usr/bin/entrypoint && \ - chmod +x /usr/bin/semver - -ENTRYPOINT ["entrypoint"] diff --git a/.github/actions/build-images/action.yml b/.github/actions/build-images/action.yml deleted file mode 100644 index b15148ebc1..0000000000 --- a/.github/actions/build-images/action.yml +++ /dev/null @@ -1,47 +0,0 @@ -name: "Build and publish Directus images" -description: "GitHub Action to publish Directus container images." -branding: - icon: archive - color: gray-dark -inputs: - repository: - description: "Repository name" - required: true - registry: - description: "Registry" - required: true - username: - description: "Registry user" - required: true - password: - description: "Registry password" - required: true - version: - description: "Version" - required: true - push: - description: "Push" - required: false - default: "false" - latest: - description: "Latest" - required: false - default: "false" -runs: - using: "docker" - image: "Dockerfile" - args: - - --registry - - ${{ inputs.registry }} - - --repository - - ${{ inputs.repository }} - - --username - - ${{ inputs.username }} - - --password - - ${{ inputs.password }} - - --version - - ${{ inputs.version }} - - --push - - ${{ inputs.push }} - - --latest - - ${{ inputs.latest }} diff --git a/.github/actions/build-images/rootfs/directus/images/main/Dockerfile b/.github/actions/build-images/rootfs/directus/images/main/Dockerfile deleted file mode 100644 index 42b6478af6..0000000000 --- a/.github/actions/build-images/rootfs/directus/images/main/Dockerfile +++ /dev/null @@ -1,106 +0,0 @@ -# Builder image -FROM alpine:latest AS builder - -ARG VERSION -ARG REPOSITORY=directus/directus - -# Get runtime dependencies from optional dependencies -# defined in package.json of Directus API package -WORKDIR /directus -RUN apk add --no-cache jq \ - && wget -O directus-api-package.json "https://raw.githubusercontent.com/${REPOSITORY}/${VERSION}/api/package.json" \ - && jq '{ \ - name: "directus-project", \ - version: "1.0.0", \ - description: "Directus Project", \ - dependencies: .optionalDependencies \ - }' \ - directus-api-package.json > package.json - -# Directus image -FROM node:16-alpine - -ARG VERSION -ARG REPOSITORY=directus/directus - -LABEL directus.version="${VERSION}" -LABEL org.opencontainers.image.source https://github.com/${REPOSITORY} - -# Default environment variables -# (see https://docs.directus.io/reference/environment-variables/) -ENV \ - PORT="8055" \ - PUBLIC_URL="/" \ - DB_CLIENT="sqlite3" \ - DB_FILENAME="/directus/database/database.sqlite" \ - RATE_LIMITER_ENABLED="false" \ - RATE_LIMITER_STORE="memory" \ - RATE_LIMITER_POINTS="25" \ - RATE_LIMITER_DURATION="1" \ - CACHE_ENABLED="false" \ - STORAGE_LOCATIONS="local" \ - STORAGE_LOCAL_PUBLIC_URL="/uploads" \ - STORAGE_LOCAL_DRIVER="local" \ - STORAGE_LOCAL_ROOT="/directus/uploads" \ - ACCESS_TOKEN_TTL="15m" \ - REFRESH_TOKEN_TTL="7d" \ - REFRESH_TOKEN_COOKIE_SECURE="false" \ - REFRESH_TOKEN_COOKIE_SAME_SITE="lax" \ - OAUTH_PROVIDERS="" \ - EXTENSIONS_PATH="/directus/extensions" \ - EMAIL_FROM="no-reply@directus.io" \ - EMAIL_TRANSPORT="sendmail" \ - EMAIL_SENDMAIL_NEW_LINE="unix" \ - EMAIL_SENDMAIL_PATH="/usr/sbin/sendmail" - -RUN \ - # Install system dependencies - # - 'bash' for entrypoint script - # - 'ssmtp' to be able to send mails - # - 'util-linux' not sure if this is required - apk upgrade --no-cache && apk add --no-cache \ - bash \ - ssmtp \ - util-linux \ - # Install global node dependencies - && npm install -g \ - yargs \ - pino \ - pino-colada \ - # Create directory for Directus with corresponding ownership - # (can be omitted on newer Docker versions since WORKDIR below will do the same) - && mkdir /directus && chown node:node /directus - -# Switch to user 'node' and directory '/directus' -USER node -WORKDIR /directus - -# Get package.json from builder image -COPY --from=builder --chown=node:node /directus/package.json . - -RUN \ - # Install Directus and runtime dependencies - # (retry if it fails for some reason, e.g. release not published yet) - for i in $(seq 10); do npm install "directus@${VERSION}" && break || sleep 30; done && \ - npm install \ - # Create data directories - && mkdir -p \ - database \ - extensions/displays \ - extensions/interfaces \ - extensions/layouts \ - extensions/modules \ - uploads - -# Expose data directories as volumes -VOLUME \ - /directus/database \ - /directus/extensions \ - /directus/uploads - -# Copy rootfs files -COPY ./rootfs / - -EXPOSE 8055 -SHELL ["/bin/bash", "-c"] -ENTRYPOINT ["entrypoint"] diff --git a/.github/actions/build-images/rootfs/directus/images/main/rootfs/usr/local/bin/entrypoint b/.github/actions/build-images/rootfs/directus/images/main/rootfs/usr/local/bin/entrypoint deleted file mode 100755 index 7b2d1a6298..0000000000 --- a/.github/actions/build-images/rootfs/directus/images/main/rootfs/usr/local/bin/entrypoint +++ /dev/null @@ -1,64 +0,0 @@ -#!/usr/bin/env bash - -set -e - -function bootstrap() { - local warn=false - - if [ "${KEY}" == "" ] ; then - export KEY=$(uuidgen) - warn=true - fi - - if [ "${SECRET}" == "" ] ; then - export SECRET=$(node -e 'console.log(require("nanoid").nanoid(32))') - warn=true - fi - - if [ "${warn}" == "true" ] ; then - print --level=warn --stdin < -> WARNING! -> -> The KEY and SECRET environment variables are not set. Some -> temporary variables were generated to fill the gap, but in -> production this is going to cause problems. -> -> Reference: -> https://docs.directus.io/reference/environment-variables.html -> -> -WARN - fi - - # Create folder if using sqlite and file doesn't exist - if [ "${DB_CLIENT}" == "sqlite3" ] ; then - if [ "${DB_FILENAME}" == "" ] ; then - print --level=error "Missing DB_FILENAME environment variable" - exit 1 - fi - - if [ ! -f "${DB_FILENAME}" ] ; then - mkdir -p $(dirname ${DB_FILENAME}) - fi - fi - - npx directus bootstrap -} - -command="" -if [ $# -eq 0 ] ; then - command="start" -elif [ "${1}" == "bash" ] || [ "${1}" == "shell" ] ; then - shift - exec bash $@ -elif [ "${1}" == "command" ] ; then - shift - exec $@ -else - command="${1}" - shift -fi - -bootstrap -exec npx directus "${command}" $@ diff --git a/.github/actions/build-images/rootfs/directus/images/main/rootfs/usr/local/bin/print b/.github/actions/build-images/rootfs/directus/images/main/rootfs/usr/local/bin/print deleted file mode 100755 index 037ecd8f0f..0000000000 --- a/.github/actions/build-images/rootfs/directus/images/main/rootfs/usr/local/bin/print +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env node - -// Workarounds? -process.env.NODE_PATH = "/usr/local/lib/node_modules"; -require("module").Module._initPaths(); - -/** - * Read lines from stdin - */ -async function readlines() { - const chunks = []; - for await (const chunk of process.stdin) { - chunks.push(chunk); - } - - const lines = chunks.join("").split("\n"); - lines.pop(); - return lines; -} - -(async function () { - // Logger - const yargs = require("yargs"); - const logger = require("pino")({ - prettyPrint: process.env.LOG_STYLE !== "raw", - prettifier: require("pino-colada"), - level: process.env.LOG_LEVEL || "info", - }); - - function write(...message) { - if (level in logger) { - logger[level](...message); - } else { - logger.info(...message); - } - } - - const args = yargs.argv; - const level = args.level || "info"; - const stdin = args.stdin || false; - - if (stdin) { - const lines = await readlines(); - lines.forEach((line) => write(line)); - } else { - write(...args._); - } -})(); diff --git a/.github/actions/build-images/rootfs/usr/bin/entrypoint b/.github/actions/build-images/rootfs/usr/bin/entrypoint deleted file mode 100644 index d08bc87bb2..0000000000 --- a/.github/actions/build-images/rootfs/usr/bin/entrypoint +++ /dev/null @@ -1,138 +0,0 @@ -#!/usr/bin/env bash - -set -e - -root=$(dirname ${0}) -source ${root}/lib/argsf - -# -# Makes a set of tags -# -function make_tags() { - local prefix="" - local version=${1} - - semver get major ${version} > /dev/null 2>&1 - if [ "$?" != "0" ]; then - echo "${version}" - else - if [ "${version:0:1}" == "v" ]; then - prefix="v" - fi - - major="$(semver get major ${version})" - minor="${major}.$(semver get minor ${version})" - patch="${minor}.$(semver get patch ${version})" - - prerel="$(semver get prerel ${version})" - if [ "${prerel}" == "" ]; then - is_prerel=false - else - is_prerel=true - fi - - build="$(semver get build ${version})" - if [ "${build}" == "" ]; then - is_build=false - else - is_build=true - fi - - if [ "${is_prerel}" == "true" ]; then - echo "${prefix}${major}-${prerel}" - echo "${prefix}${minor}-${prerel}" - echo "${prefix}${patch}-${prerel}" - if [ "${is_build}" == "true" ]; then - echo "${prefix}${major}-${prerel}-${build}" - fi - else - echo "${prefix}${major}" - echo "${prefix}${minor}" - echo "${prefix}${patch}" - if [ "${is_build}" == "true" ]; then - echo "${prefix}${patch}-${build}" - fi - fi - fi -} - -# -# Build script -# -function main() { - username=$(argument username) - password=$(argument password) - - push=$(argument push "false") - latest=$(argument latest "false") - - registry=$(argument registry "") - registry=$(echo "${registry}" | tr '[:upper:]' '[:lower:]') - - repository=$(argument repository "directus/directus") - repository=$(echo "${repository}" | tr '[:upper:]' '[:lower:]') - - version=$(argument version "") - context=$(argument context ".") - - image="${repository}" - if [ "${registry}" != "" ]; then - image="${registry}/${image}" - fi - - # Normalize tag - if [ "${version}" == "" ]; then - version=${GITHUB_REF##*/} - else - version=${version##*/} - fi - - if [ "${version}" == "" ]; then - version=$(echo ${GITHUB_SHA:-"000000000000"} | cut -c1-12) - fi - - tags=$(make_tags ${version}) - echo "Tags = ${tags}" - - # build image - docker build \ - -t directus:main \ - --build-arg VERSION=${version} \ - --build-arg REPOSITORY=${repository} \ - /directus/images/main - - # login into registry - docker login -u "${username}" -p "${password}" "${registry}" - - # Push latest - # TODO: check if it's really the latest - if [ "${latest}" == "true" ]; then - fqin="${image}:latest" - echo "Tagging ${fqin}" - docker tag directus:main ${fqin} - if [ "${push}" == "true" ]; then - echo "Pushing tag ${fqin}" - docker push "${fqin}" - fi - fi - - # Push tags - for tag in $tags - do - tag=$(echo "${tag}" | tr '[:upper:]' '[:lower:]') - fqin="${image}:${tag}" - echo "Tagging ${fqin}" - docker tag directus:main "${fqin}" - if [ "${push}" == "true" ]; then - echo "Pushing tag ${fqin}" - docker push "${fqin}" - fi - done - - echo "Finished." - - exit $? -} - -main -exit $? diff --git a/.github/actions/build-images/rootfs/usr/bin/lib/argsf b/.github/actions/build-images/rootfs/usr/bin/lib/argsf deleted file mode 100644 index 0869fa25bd..0000000000 --- a/.github/actions/build-images/rootfs/usr/bin/lib/argsf +++ /dev/null @@ -1,98 +0,0 @@ -# -# Arguments and Flags (argsf) -# This is meant to work with bash shell -# To use, source this file into your bash scripts -# -# Implemented by João Biondo -# https://github.com/WoLfulus/argsf -# - -declare _ARGCOUNT=$# -declare _ARGDATA=("$@") -declare -A _ARGMAP -declare -A _FLAGMAP - -for ((_arg_index_key=1;_arg_index_key<=$#;_arg_index_key++)) -do - _arg_index_value=$(expr $_arg_index_key + 1) - _arg_key=${!_arg_index_key} - _arg_value=${!_arg_index_value} - if [[ $_arg_key == *"--"* ]]; then - if [[ $_arg_key == *" "* ]]; then - continue - fi - _arg_name="${_arg_key:2}" - _FLAGMAP[${_arg_name}]=1 - if [[ $_arg_value != *"--"* ]] || [[ $_arg_value == *" "* ]] ; then - _ARGMAP[${_arg_name}]="$_arg_value" - else - _ARGMAP[${_arg_name}]="" - fi - fi -done - -function _argument() { - if test "${_ARGMAP[${ARG_NAME}]+isset}" ; then - echo ${_ARGMAP[${ARG_NAME}]} - else - if [ ${ARG_DEFAULT} -eq 0 ]; then - echo "Error: required argument '--${ARG_NAME}' not specified" 1>&2 - exit 1 - else - echo ${ARG_DEFAULT_VALUE} - fi - fi -} - -function argument() { - if [ $# -eq 1 ]; then - ARG_NAME="$1" ARG_DEFAULT=0 ARG_DEFAULT_VALUE= _argument "${_ARGUMENT_DATA}" - elif [ $# -eq 2 ]; then - ARG_NAME="$1" ARG_DEFAULT=1 ARG_DEFAULT_VALUE="$2" _argument "${_ARGUMENT_DATA}" - else - echo "argument: invalid number of arguments" 1>&2 - return 1 - fi - return 0 -} - -function flage() { - if [ $# -eq 1 ]; then - if [[ ${_FLAGMAP[$1]} ]] ; then - echo "true" - return 0 - elif [[ ${_FLAGMAP[no-$1]} ]] ; then - echo "false" - return 0 - else - echo "true" - return 0 - fi - else - echo "flag: invalid number of arguments" 1>&2 - return 1 - fi -} - -function flagd() { - if [ $# -eq 1 ]; then - if [[ ${_FLAGMAP[$1]} ]] ; then - echo "true" - return 0 - elif [[ ${_FLAGMAP[no-$1]} ]] ; then - echo "false" - return 0 - else - echo "false" - return 0 - fi - else - echo "flag: invalid number of arguments" 1>&2 - return 1 - fi -} - -function flag() { - flagd $1 - return $? -} diff --git a/.github/actions/build-images/rootfs/usr/bin/semver b/.github/actions/build-images/rootfs/usr/bin/semver deleted file mode 100644 index c3d5075162..0000000000 --- a/.github/actions/build-images/rootfs/usr/bin/semver +++ /dev/null @@ -1,284 +0,0 @@ -#!/usr/bin/env bash - -# -# Copyright (c) 2014-2015 François Saint-Jacques -# -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 3, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A -# PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# this program. If not, see . -# - -set -o errexit -o nounset -o pipefail - -NAT='0|[1-9][0-9]*' -ALPHANUM='[0-9]*[A-Za-z-][0-9A-Za-z-]*' -IDENT="$NAT|$ALPHANUM" -FIELD='[0-9A-Za-z-]+' - -SEMVER_REGEX="\ -^[vV]?\ -($NAT)\\.($NAT)\\.($NAT)\ -(\\-(${IDENT})(\\.(${IDENT}))*)?\ -(\\+${FIELD}(\\.${FIELD})*)?$" - -PROG=semver -PROG_VERSION="3.0.0" - -USAGE="\ -Usage: - $PROG bump (major|minor|patch|release|prerel |build ) - $PROG compare - $PROG get (major|minor|patch|release|prerel|build) - $PROG --help - $PROG --version -Arguments: - A version must match the following regular expression: - \"${SEMVER_REGEX}\" - In English: - -- The version must match X.Y.Z[-PRERELEASE][+BUILD] - where X, Y and Z are non-negative integers. - -- PRERELEASE is a dot separated sequence of non-negative integers and/or - identifiers composed of alphanumeric characters and hyphens (with - at least one non-digit). Numeric identifiers must not have leading - zeros. A hyphen (\"-\") introduces this optional part. - -- BUILD is a dot separated sequence of identifiers composed of alphanumeric - characters and hyphens. A plus (\"+\") introduces this optional part. - See definition. - A string as defined by PRERELEASE above. - A string as defined by BUILD above. -Options: - -v, --version Print the version of this tool. - -h, --help Print this help message. -Commands: - bump Bump by one of major, minor, patch; zeroing or removing - subsequent parts. \"bump prerel\" sets the PRERELEASE part and - removes any BUILD part. \"bump build\" sets the BUILD part. - \"bump release\" removes any PRERELEASE or BUILD parts. - The bumped version is written to stdout. - compare Compare with , output to stdout the - following values: -1 if is newer, 0 if equal, 1 if - older. The BUILD part is not used in comparisons. - get Extract given part of , where part is one of major, minor, - patch, prerel, build, or release. -See also: - https://semver.org -- Semantic Versioning 2.0.0" - -function error { - echo -e "$1" >&2 - exit 1 -} - -function usage-help { - error "$USAGE" -} - -function usage-version { - echo -e "${PROG}: $PROG_VERSION" - exit 0 -} - -function validate-version { - local version=$1 - if [[ "$version" =~ $SEMVER_REGEX ]]; then - # if a second argument is passed, store the result in var named by $2 - if [ "$#" -eq "2" ]; then - local major=${BASH_REMATCH[1]} - local minor=${BASH_REMATCH[2]} - local patch=${BASH_REMATCH[3]} - local prere=${BASH_REMATCH[4]} - local build=${BASH_REMATCH[8]} - eval "$2=(\"$major\" \"$minor\" \"$patch\" \"$prere\" \"$build\")" - else - echo "$version" - fi - else - error "version $version does not match the semver scheme 'X.Y.Z(-PRERELEASE)(+BUILD)'. See help for more information." - fi -} - -function is-nat { - [[ "$1" =~ ^($NAT)$ ]] -} - -function is-null { - [ -z "$1" ] -} - -function order-nat { - [ "$1" -lt "$2" ] && { echo -1 ; return ; } - [ "$1" -gt "$2" ] && { echo 1 ; return ; } - echo 0 -} - -function order-string { - [[ $1 < $2 ]] && { echo -1 ; return ; } - [[ $1 > $2 ]] && { echo 1 ; return ; } - echo 0 -} - -# given two (named) arrays containing NAT and/or ALPHANUM fields, compare them -# one by one according to semver 2.0.0 spec. Return -1, 0, 1 if left array ($1) -# is less-than, equal, or greater-than the right array ($2). The longer array -# is considered greater-than the shorter if the shorter is a prefix of the longer. -# -function compare-fields { - local l="$1[@]" - local r="$2[@]" - local leftfield=( "${!l}" ) - local rightfield=( "${!r}" ) - local left - local right - - local i=$(( -1 )) - local order=$(( 0 )) - - while true - do - [ $order -ne 0 ] && { echo $order ; return ; } - - : $(( i++ )) - left="${leftfield[$i]}" - right="${rightfield[$i]}" - - is-null "$left" && is-null "$right" && { echo 0 ; return ; } - is-null "$left" && { echo -1 ; return ; } - is-null "$right" && { echo 1 ; return ; } - - is-nat "$left" && is-nat "$right" && { order=$(order-nat "$left" "$right") ; continue ; } - is-nat "$left" && { echo -1 ; return ; } - is-nat "$right" && { echo 1 ; return ; } - { order=$(order-string "$left" "$right") ; continue ; } - done -} - -# shellcheck disable=SC2206 # checked by "validate"; ok to expand prerel id's into array -function compare-version { - local order - validate-version "$1" V - validate-version "$2" V_ - - # compare major, minor, patch - - local left=( "${V[0]}" "${V[1]}" "${V[2]}" ) - local right=( "${V_[0]}" "${V_[1]}" "${V_[2]}" ) - - order=$(compare-fields left right) - [ "$order" -ne 0 ] && { echo "$order" ; return ; } - - # compare pre-release ids when M.m.p are equal - - local prerel="${V[3]:1}" - local prerel_="${V_[3]:1}" - local left=( ${prerel//./ } ) - local right=( ${prerel_//./ } ) - - # if left and right have no pre-release part, then left equals right - # if only one of left/right has pre-release part, that one is less than simple M.m.p - - [ -z "$prerel" ] && [ -z "$prerel_" ] && { echo 0 ; return ; } - [ -z "$prerel" ] && { echo 1 ; return ; } - [ -z "$prerel_" ] && { echo -1 ; return ; } - - # otherwise, compare the pre-release id's - - compare-fields left right -} - -function command-bump { - local new; local version; local sub_version; local command; - - case $# in - 2) case $1 in - major|minor|patch|release) command=$1; version=$2;; - *) usage-help;; - esac ;; - 3) case $1 in - prerel|build) command=$1; sub_version=$2 version=$3 ;; - *) usage-help;; - esac ;; - *) usage-help;; - esac - - validate-version "$version" parts - # shellcheck disable=SC2154 - local major="${parts[0]}" - local minor="${parts[1]}" - local patch="${parts[2]}" - local prere="${parts[3]}" - local build="${parts[4]}" - - case "$command" in - major) new="$((major + 1)).0.0";; - minor) new="${major}.$((minor + 1)).0";; - patch) new="${major}.${minor}.$((patch + 1))";; - release) new="${major}.${minor}.${patch}";; - prerel) new=$(validate-version "${major}.${minor}.${patch}-${sub_version}");; - build) new=$(validate-version "${major}.${minor}.${patch}${prere}+${sub_version}");; - *) usage-help ;; - esac - - echo "$new" - exit 0 -} - -function command-compare { - local v; local v_; - - case $# in - 2) v=$(validate-version "$1"); v_=$(validate-version "$2") ;; - *) usage-help ;; - esac - - set +u # need unset array element to evaluate to null - compare-version "$v" "$v_" - exit 0 -} - - -# shellcheck disable=SC2034 -function command-get { - local part version - - if [[ "$#" -ne "2" ]] || [[ -z "$1" ]] || [[ -z "$2" ]]; then - usage-help - exit 0 - fi - - part="$1" - version="$2" - - validate-version "$version" parts - local major="${parts[0]}" - local minor="${parts[1]}" - local patch="${parts[2]}" - local prerel="${parts[3]:1}" - local build="${parts[4]:1}" - local release="${major}.${minor}.${patch}" - - case "$part" in - major|minor|patch|release|prerel|build) echo "${!part}" ;; - *) usage-help ;; - esac - - exit 0 -} - -case $# in - 0) echo "Unknown command: $*"; usage-help;; -esac - -case $1 in - --help|-h) echo -e "$USAGE"; exit 0;; - --version|-v) usage-version ;; - bump) shift; command-bump "$@";; - get) shift; command-get "$@";; - compare) shift; command-compare "$@";; - *) echo "Unknown arguments: $*"; usage-help;; -esac diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml deleted file mode 100644 index 4ecc900801..0000000000 --- a/.github/workflows/build-images.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: build-images -on: - release: - types: - - published - -jobs: - build: - runs-on: ubuntu-latest - steps: - - name: Sleep for 30 seconds - uses: jakejarvis/wait-action@master - with: - time: '30s' - - - name: Checkout - uses: actions/checkout@v2 - - - name: Build GitHub Container Registry - uses: ./.github/actions/build-images - with: - registry: "ghcr.io" - repository: "${{ github.repository }}" - username: "${{ secrets.REGISTRY_USERNAME }}" - password: "${{ secrets.REGISTRY_PASSWORD }}" - version: "${{ github.ref }}" - latest: "true" - push: "true" - - - name: Build Docker Hub - uses: ./.github/actions/build-images - with: - registry: "docker.io" - repository: "${{ github.repository }}" - username: "${{ secrets.DOCKERHUB_USERNAME }}" - password: "${{ secrets.DOCKERHUB_PASSWORD }}" - version: "${{ github.ref }}" - latest: "true" - push: "true" diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml deleted file mode 100644 index 5071a2aa14..0000000000 --- a/.github/workflows/create-release.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: create-release -on: - push: - tags: - - 'v*' -jobs: - build: - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v2 - - - name: Create Release - id: create_release - uses: actions/create-release@v1 - env: - GITHUB_TOKEN: ${{ secrets.REPOSITORY_TOKEN }} - with: - tag_name: ${{ github.ref }} - release_name: ${{ github.ref }} - body: | - Directus ${{ github.ref }} - draft: false - prerelease: false diff --git a/.github/workflows/e2e-tests.yml b/.github/workflows/e2e-full.yml similarity index 91% rename from .github/workflows/e2e-tests.yml rename to .github/workflows/e2e-full.yml index f4db1a211b..abfe21f7cd 100644 --- a/.github/workflows/e2e-tests.yml +++ b/.github/workflows/e2e-full.yml @@ -1,4 +1,4 @@ -name: Run e2e tests +name: E2E on: push: branches: @@ -10,12 +10,13 @@ jobs: fail-fast: false matrix: db: ['mssql', 'mysql', 'postgres', 'maria', 'sqlite3'] - node-version: ['12-alpine', '14-alpine', '16-alpine'] + # node-version: ['12-alpine', '14-alpine', '16-alpine'] + node-version: ['16-alpine'] env: CACHED_IMAGE: ghcr.io/directus/directus-e2e-test-cache:${{ matrix.node-version }} steps: - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.9.0 + uses: styfle/cancel-workflow-action@0.9.1 with: access_token: ${{ secrets.GITHUB_TOKEN }} - name: Login to GitHub Container Registry diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml new file mode 100644 index 0000000000..da384331de --- /dev/null +++ b/.github/workflows/e2e.yml @@ -0,0 +1,42 @@ +name: E2E +on: + pull_request: + branches: + - main +jobs: + tests: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + db: ['postgres'] + node-version: ['16-alpine'] + env: + CACHED_IMAGE: ghcr.io/directus/directus-e2e-test-cache:${{ matrix.node-version }} + steps: + - name: Cancel Previous Runs + uses: styfle/cancel-workflow-action@0.9.1 + with: + access_token: ${{ secrets.GITHUB_TOKEN }} + - uses: actions/checkout@v2 + - uses: actions/setup-node@v2 + with: + node-version: '16' + - name: restore node_modules cache + uses: actions/cache@v2 + with: + path: | + node_modules + **/node_modules + key: ${{ runner.os }}-${{ hashFiles('**/package-lock.json') }} + - name: Install dependencies + run: | + npm install + - name: Build + run: | + npm run build + - name: Run tests + env: + TEST_NODE_VERSION: ${{ matrix.node-version }} + TEST_DB: ${{ matrix.db }} + run: npm run test:e2e diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index a09b6fa102..2911c4a036 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -14,7 +14,7 @@ jobs: steps: - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.9.0 + uses: styfle/cancel-workflow-action@0.9.1 with: access_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000000..f11d0d85c8 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,154 @@ +name: Release + +on: + push: + tags: + - 'v*' + +env: + GHCR_IMAGE: ghcr.io/${{ github.repository }} + DOCKERHUB_IMAGE: ${{ github.repository }} + +jobs: + create-release: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Create Release + id: create_release + uses: actions/create-release@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + tag_name: ${{ github.ref }} + release_name: ${{ github.ref }} + body: | + Directus ${{ github.ref }} + draft: false + prerelease: false + + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Use Node.js + uses: actions/setup-node@v2 + with: + node-version: '16.x' + + # See https://github.com/npm/cli/issues/3637 + - run: npm i -g npm@7.20.2 + + - uses: c-hive/gha-npm-cache@v1 + - run: npm ci + - run: npm run build + - run: node docker/pack + + - name: Cache build artifacts + uses: actions/cache@v2 + with: + path: '**/dist' + key: build-artifacts-${{ github.sha }} + + publish-npm: + runs-on: ubuntu-latest + needs: build + steps: + - uses: actions/checkout@v2 + - name: Restore build artifacts + uses: actions/cache@v2 + with: + path: '**/dist' + key: build-artifacts-${{ github.sha }} + - name: Use Node.js + uses: actions/setup-node@v2 + with: + node-version: '16.x' + registry-url: 'https://registry.npmjs.org' + + # See https://github.com/npm/cli/issues/3637 + - run: npm i -g npm@7.20.2 + + - run: npm ci + + - run: npx lerna publish from-git --no-verify-access --yes + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + + build-images: + runs-on: ubuntu-latest + needs: build + steps: + - uses: actions/checkout@v2 + + - name: Restore build artifacts + uses: actions/cache@v2 + with: + path: '**/dist' + key: build-artifacts-${{ github.sha }} + + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + + - name: Cache Docker layers + uses: actions/cache@v2 + with: + path: /tmp/.buildx-cache + key: ${{ runner.os }}-buildx-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-buildx- + + - name: Docker meta + id: meta + uses: docker/metadata-action@v3 + with: + images: | + ${{ env.DOCKERHUB_IMAGE }} + ${{ env.GHCR_IMAGE }} + # Remove this once v9 is released + flavor: | + latest=true + tags: | + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=semver,pattern={{major}} + + - name: Login to DockerHub + uses: docker/login-action@v1 + if: ${{ env.DOCKERHUB_IMAGE }} + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + + - name: Login to GHCR + uses: docker/login-action@v1 + if: ${{ env.GHCR_IMAGE }} + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push + uses: docker/build-push-action@v2 + with: + context: . + file: './docker/Dockerfile' + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + platforms: linux/amd64,linux/arm64 + push: true + cache-from: type=local,src=/tmp/.buildx-cache + cache-to: type=local,dest=/tmp/.buildx-cache-new + + # Temp fix + # https://github.com/docker/build-push-action/issues/252 + # https://github.com/moby/buildkit/issues/1896 + - name: Move cache + run: | + rm -rf /tmp/.buildx-cache + mv /tmp/.buildx-cache-new /tmp/.buildx-cache diff --git a/.github/workflows/sync-dockerhub-readme.yml b/.github/workflows/sync-dockerhub-readme.yml new file mode 100644 index 0000000000..1e8fc57c5e --- /dev/null +++ b/.github/workflows/sync-dockerhub-readme.yml @@ -0,0 +1,24 @@ +name: Sync Readme to Docker Hub + +on: + push: + branches: + - main + paths: # ensures this workflow only runs when the readme.md or this file changes. + - 'readme.md' + - '.github/workflows/sync-dockerhub-readme.yml' + workflow_dispatch: + +jobs: + sync-dockerhub-readme: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Sync Readme to Docker Hub + uses: peter-evans/dockerhub-description@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + readme-filepath: ./readme.md diff --git a/.gitignore b/.gitignore index 2bfd6d11f3..b25c5868be 100644 --- a/.gitignore +++ b/.gitignore @@ -17,3 +17,4 @@ dist app/public/img/docs/* *.tsbuildinfo .e2e-containers.json +coverage diff --git a/Dockerfile b/Dockerfile index 1c12ca1d99..870de37f6d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -26,33 +26,13 @@ FROM node:${NODE_VERSION} #ENV TNS_ADMIN /usr/lib/instantclient #ENV ORACLE_HOME /usr/lib/instantclient -RUN npm i -g lerna - WORKDIR /directus -COPY package*.json ./ -COPY lerna.json ./ -COPY api/package.json api/ -COPY api/cli.js api/ -COPY app/package.json app/ -COPY docs/package.json docs/ -COPY packages/create-directus-project/package.json packages/create-directus-project/ -COPY packages/create-directus-project/lib/index.js packages/create-directus-project/lib/ -COPY packages/drive/package.json packages/drive/ -COPY packages/drive-azure/package.json packages/drive-azure/ -COPY packages/drive-gcs/package.json packages/drive-gcs/ -COPY packages/drive-s3/package.json packages/drive-s3/ -COPY packages/format-title/package.json packages/format-title/ -COPY packages/gatsby-source-directus/package.json packages/gatsby-source-directus/ -COPY packages/schema/package.json packages/schema/ -COPY packages/sdk/package.json packages/sdk/ -COPY packages/specs/package.json packages/specs/ - -RUN npx lerna bootstrap - COPY . . +RUN npm install + WORKDIR /directus/api -CMD ["sh", "-c", "node ./dist/cli/index.js bootstrap; node ./dist/start.js;"] +CMD ["sh", "-c", "node ./cli.js bootstrap; node ./dist/start.js;"] EXPOSE 8055/tcp diff --git a/api/.gitignore b/api/.gitignore index c813612f69..16f3a346cc 100644 --- a/api/.gitignore +++ b/api/.gitignore @@ -10,4 +10,4 @@ test dist tmp keys.json - +coverage diff --git a/api/cli.js b/api/cli.js index d7ce758fbc..6c1cc9f5c0 100755 --- a/api/cli.js +++ b/api/cli.js @@ -1,2 +1,2 @@ #!/usr/bin/env node -require('./dist/cli/index.js'); +require('./dist/cli/run.js'); diff --git a/api/example.env b/api/example.env index 9249c9483c..25ca618190 100644 --- a/api/example.env +++ b/api/example.env @@ -9,18 +9,58 @@ LOG_STYLE="pretty" #################################################################################################### # Database -## PostgreSQL Example +## These match the databases defined in the docker-compose file in the root of this repo + +## Postgres DB_CLIENT="pg" DB_HOST="localhost" -DB_PORT=5432 +DB_PORT=5100 DB_DATABASE="directus" DB_USER="postgres" -DB_PASSWORD="psql1234" +DB_PASSWORD="secret" + +## MySQL 8 +# DB_CLIENT="mysql" +# DB_HOST="localhost" +# DB_PORT=5101 +# DB_DATABASE="directus" +# DB_USER="root" +# DB_PASSWORD="secret" + +## MariaDB +# DB_CLIENT="mysql" +# DB_HOST="localhost" +# DB_PORT=5102 +# DB_DATABASE="directus" +# DB_USER="root" +# DB_PASSWORD="secret" + +## MS SQL +# DB_CLIENT="mssql" +# DB_HOST="localhost" +# DB_PORT=5103 +# DB_DATABASE="directus" +# DB_USER="sa" +# DB_PASSWORD="Test@123" + +## OracleDB +# DB_CLIENT="oracle" +# DB_CONNECT_STRING="localhost:5104/XE" +# DB_USER="secretsysuser" +# DB_PASSWORD="secretpassword" ## SQLite Example # DB_CLIENT="sqlite3" # DB_FILENAME="./data.db" +## MySQL 5.7 +# DB_CLIENT="mysql" +# DB_HOST="localhost" +# DB_PORT=5102 +# DB_DATABASE="directus" +# DB_USER="root" +# DB_PASSWORD="secret" + #################################################################################################### # Rate Limiting @@ -32,45 +72,25 @@ RATE_LIMITER_DURATION=1 RATE_LIMITER_STORE=memory # memory | redis | memcache -## Redis (see https://github.com/animir/node-rate-limiter-flexible/wiki/Redis and -## https://www.npmjs.com/package/ioredis#connect-to-redis) -# RATE_LIMITER_EXEC_EVENLY=false -# RATE_LIMITER_BLOCK_DURATION=0 -# RATE_LIMITER_KEY_PREFIX=rlflx - -# RATE_LIMITER_REDIS="redis://:authpassword@127.0.0.1:6380/4" -# --OR-- -# RATE_LIMITER_REDIS_HOST="127.0.0.1" -# RATE_LIMITER_REDIS_PORT="127.0.0.1" -# RATE_LIMITER_REDIS_PASSWORD="127.0.0.1" -# RATE_LIMITER_REDIS_DB="127.0.0.1" - -## Memcache (see https://github.com/animir/node-rate-limiter-flexible/wiki/Memcache and -## https://www.npmjs.com/package/memcached) -# RATE_LIMITER_MEMCACHE='localhost:11211' +# RATE_LIMITER_REDIS="redis://@127.0.0.1:5105" +# RATE_LIMITER_MEMCACHE="localhost:5109" #################################################################################################### # Caching CACHE_ENABLED=true CACHE_TTL="30m" CACHE_NAMESPACE="directus-cache" -CACHE_STORE=memory -# memory | redis | memcache CACHE_AUTO_PURGE=true +# memory | redis | memcache +CACHE_STORE=memory + ASSETS_CACHE_TTL="30m" -# CACHE_REDIS="redis://:authpassword@127.0.0.1:6380/4" -# --OR-- -# CACHE_REDIS_HOST="127.0.0.1" -# CACHE_REDIS_PORT="127.0.0.1" -# CACHE_REDIS_PASSWORD="127.0.0.1" -# CACHE_REDIS_DB="127.0.0.1" +# CACHE_REDIS="redis://@127.0.0.1:5105" -## Memcache (see https://github.com/animir/node-rate-limiter-flexible/wiki/Memcache and -## https://www.npmjs.com/package/memcached) -# CACHE_MEMCACHE='localhost:11211' +# CACHE_MEMCACHE="localhost:5109" #################################################################################################### # File Storage @@ -103,6 +123,7 @@ ACCESS_TOKEN_TTL="15m" REFRESH_TOKEN_TTL="7d" REFRESH_TOKEN_COOKIE_SECURE="false" REFRESH_TOKEN_COOKIE_SAME_SITE="lax" +REFRESH_TOKEN_COOKIE_NAME="directus_refresh_token" CORS_ENABLED="true" CORS_ORIGIN="true" @@ -112,6 +133,16 @@ CORS_EXPOSED_HEADERS=Content-Range CORS_CREDENTIALS="true" CORS_MAX_AGE=18000 +#################################################################################################### +# Argon2 + +# HASH_MEMORY_COST=81920 +# HASH_HASH_LENGTH=32 +# HASH_TIME_COST=10 +# HASH_PARALLELISM=2 +# HASH_TYPE=2 +# HASH_ASSOCIATED_DATA=foo + #################################################################################################### # SSO (OAuth) Providers diff --git a/api/jest.config.js b/api/jest.config.js new file mode 100644 index 0000000000..86e9e44003 --- /dev/null +++ b/api/jest.config.js @@ -0,0 +1,12 @@ +const base = require('../jest.config.js'); + +require('dotenv').config(); + +module.exports = { + ...base, + roots: ['/src'], + verbose: true, + setupFiles: ['dotenv/config'], + testURL: process.env.TEST_URL || 'http://localhost', + collectCoverageFrom: ['src/**/*.ts'], +}; diff --git a/api/package.json b/api/package.json index 1d28e0ae3e..3ceffbb4d4 100644 --- a/api/package.json +++ b/api/package.json @@ -1,6 +1,6 @@ { "name": "directus", - "version": "9.0.0-rc.83", + "version": "9.0.0-rc.92", "license": "GPL-3.0-only", "homepage": "https://github.com/directus/directus#readme", "description": "Directus is a real-time API and App dashboard for managing SQL database content.", @@ -55,9 +55,10 @@ "prebuild": "npm run cleanup", "build": "tsc --build && copyfiles \"src/**/*.*\" -e \"src/**/*.ts\" -u 1 dist", "cleanup": "rimraf dist", - "dev": "cross-env DIRECTUS_DEV=true NODE_ENV=development ts-node-dev --files --transpile-only --respawn --watch \".env\" --inspect --exit-child -- src/start.ts", - "cli": "cross-env DIRECTUS_DEV=true NODE_ENV=development ts-node --script-mode --transpile-only src/cli/index.ts", - "prepublishOnly": "npm run build" + "dev": "cross-env NODE_ENV=development SERVE_APP=false ts-node-dev --files --transpile-only --respawn --watch \".env\" --inspect --exit-child -- src/start.ts", + "cli": "cross-env NODE_ENV=development SERVE_APP=false ts-node --script-mode --transpile-only src/cli/run.ts", + "test": "jest --coverage", + "test:watch": "jest --watchAll" }, "engines": { "node": ">=12.20.0" @@ -69,19 +70,20 @@ "example.env" ], "dependencies": { - "@directus/app": "9.0.0-rc.83", - "@directus/drive": "9.0.0-rc.83", - "@directus/drive-azure": "9.0.0-rc.83", - "@directus/drive-gcs": "9.0.0-rc.83", - "@directus/drive-s3": "9.0.0-rc.83", - "@directus/format-title": "9.0.0-rc.83", - "@directus/schema": "9.0.0-rc.83", - "@directus/shared": "9.0.0-rc.83", - "@directus/specs": "9.0.0-rc.83", + "@directus/app": "9.0.0-rc.92", + "@directus/drive": "9.0.0-rc.92", + "@directus/drive-azure": "9.0.0-rc.92", + "@directus/drive-gcs": "9.0.0-rc.92", + "@directus/drive-s3": "9.0.0-rc.92", + "@directus/extensions-sdk": "9.0.0-rc.92", + "@directus/format-title": "9.0.0-rc.92", + "@directus/schema": "9.0.0-rc.92", + "@directus/shared": "9.0.0-rc.92", + "@directus/specs": "9.0.0-rc.92", "@godaddy/terminus": "^4.9.0", "@rollup/plugin-alias": "^3.1.2", "@rollup/plugin-virtual": "^2.0.3", - "argon2": "^0.28.1", + "argon2": "^0.28.2", "async": "^3.2.0", "async-mutex": "^0.3.1", "atob": "^2.1.2", @@ -93,20 +95,20 @@ "cookie-parser": "^1.4.5", "cors": "^2.8.5", "csv-parser": "^3.0.0", - "date-fns": "^2.21.1", + "date-fns": "^2.22.1", "deep-map": "^2.0.0", "destroy": "^1.0.4", "dotenv": "^10.0.0", "eventemitter2": "^6.4.3", "execa": "^5.1.1", - "exif-reader": "^1.0.3", + "exifr": "^7.1.2", "express": "^4.17.1", "express-session": "^1.17.2", + "flat": "^5.0.2", "fs-extra": "^10.0.0", "grant": "^5.4.14", "graphql": "^15.5.0", "graphql-compose": "^9.0.1", - "icc": "^2.0.0", "inquirer": "^8.1.1", "joi": "^17.3.0", "js-yaml": "^4.1.0", @@ -115,7 +117,7 @@ "jsonwebtoken": "^8.5.1", "keyv": "^4.0.3", "knex": "^0.95.6", - "knex-schema-inspector": "^1.5.7", + "knex-schema-inspector": "1.6.0", "liquidjs": "^9.25.0", "lodash": "^4.17.21", "macos-release": "^2.4.1", @@ -125,21 +127,25 @@ "node-cron": "^3.0.0", "node-machine-id": "^1.1.12", "nodemailer": "^6.6.1", + "object-hash": "^2.2.0", "openapi3-ts": "^2.0.0", "ora": "^5.4.0", "otplib": "^12.0.1", - "pino": "^6.11.3", + "pino": "6.13.2", "pino-colada": "^2.1.0", - "pino-http": "^5.5.0", + "pino-http": "5.7.0", "prettier": "^2.3.1", "qs": "^6.9.4", "rate-limiter-flexible": "^2.2.2", "resolve-cwd": "^3.0.0", "rollup": "^2.52.1", - "sharp": "^0.28.3", + "sharp": "^0.29.0", "stream-json": "^1.7.1", + "supertest": "^6.1.6", + "update-check": "^1.5.4", "uuid": "^8.3.2", - "uuid-validate": "0.0.3" + "uuid-validate": "0.0.3", + "wellknown": "^0.5.0" }, "optionalDependencies": { "@keyv/redis": "^2.1.2", @@ -151,43 +157,49 @@ "memcached": "^2.2.2", "mysql": "^2.18.1", "nodemailer-mailgun-transport": "^2.1.3", - "oracledb": "^5.0.0", "pg": "^8.6.0", "sqlite3": "^5.0.2", - "tedious": "^11.0.8" + "tedious": "^12.0.0" }, "gitHead": "24621f3934dc77eb23441331040ed13c676ceffd", "devDependencies": { - "@types/async": "3.2.6", + "@types/async": "3.2.7", "@types/atob": "2.1.2", - "@types/body-parser": "1.19.0", - "@types/busboy": "0.2.3", + "@types/body-parser": "1.19.1", + "@types/busboy": "0.2.4", "@types/cookie-parser": "1.4.2", - "@types/cors": "2.8.10", + "@types/cors": "2.8.12", "@types/destroy": "1.0.0", - "@types/express": "4.17.12", + "@types/express": "4.17.13", "@types/express-pino-logger": "4.0.2", - "@types/express-session": "1.17.3", - "@types/fs-extra": "9.0.11", - "@types/inquirer": "7.3.2", - "@types/js-yaml": "4.0.1", - "@types/json2csv": "5.0.2", - "@types/jsonwebtoken": "8.5.2", - "@types/keyv": "3.1.1", - "@types/lodash": "4.14.170", - "@types/mime-types": "2.1.0", + "@types/express-session": "1.17.4", + "@types/flat": "^5.0.2", + "@types/fs-extra": "9.0.12", + "@types/inquirer": "8.1.1", + "@types/jest": "27.0.1", + "@types/js-yaml": "4.0.3", + "@types/json2csv": "5.0.3", + "@types/jsonwebtoken": "8.5.5", + "@types/keyv": "3.1.3", + "@types/lodash": "4.14.172", + "@types/mime-types": "2.1.1", "@types/ms": "0.7.31", "@types/node": "15.12.2", - "@types/node-cron": "2.0.3", - "@types/nodemailer": "6.4.2", - "@types/qs": "6.9.6", - "@types/sharp": "0.28.3", - "@types/stream-json": "1.7.0", - "@types/uuid": "8.3.0", + "@types/node-cron": "2.0.4", + "@types/nodemailer": "6.4.4", + "@types/object-hash": "2.2.0", + "@types/qs": "6.9.7", + "@types/sharp": "0.29.1", + "@types/stream-json": "1.7.1", + "@types/supertest": "2.0.11", + "@types/uuid": "8.3.1", "@types/uuid-validate": "0.0.1", + "@types/wellknown": "0.5.1", "copyfiles": "2.4.1", "cross-env": "7.0.3", - "ts-node-dev": "1.1.7", - "typescript": "4.3.4" + "jest": "27.2.0", + "ts-jest": "27.0.5", + "ts-node-dev": "1.1.8", + "typescript": "4.4.3" } } diff --git a/api/src/__mocks__/cache.ts b/api/src/__mocks__/cache.ts new file mode 100644 index 0000000000..ff7440d3b1 --- /dev/null +++ b/api/src/__mocks__/cache.ts @@ -0,0 +1,6 @@ +export const cache = { + get: jest.fn().mockResolvedValue(undefined), + set: jest.fn().mockResolvedValue(true), +}; + +export const getCache = jest.fn().mockReturnValue({ cache }); diff --git a/api/src/app.ts b/api/src/app.ts index c5db932afc..6a7cd99c0c 100644 --- a/api/src/app.ts +++ b/api/src/app.ts @@ -4,14 +4,6 @@ import fse from 'fs-extra'; import path from 'path'; import qs from 'qs'; -import { emitAsyncSafe } from './emitter'; -import { initializeExtensions, registerExtensionEndpoints, registerExtensionHooks } from './extensions'; -import { InvalidPayloadException } from './exceptions'; -import { isInstalled, validateDBConnection } from './database'; -import { register as registerWebhooks } from './webhooks'; -import env from './env'; -import logger, { expressLogger } from './logger'; - import activityRouter from './controllers/activity'; import assetsRouter from './controllers/assets'; import authRouter from './controllers/auth'; @@ -35,11 +27,15 @@ import settingsRouter from './controllers/settings'; import usersRouter from './controllers/users'; import utilsRouter from './controllers/utils'; import webhooksRouter from './controllers/webhooks'; - -import { checkIP } from './middleware/check-ip'; -import { session } from './middleware/session'; +import { isInstalled, validateDatabaseConnection, validateDatabaseExtensions, validateMigrations } from './database'; +import { emitAsyncSafe } from './emitter'; +import env from './env'; +import { InvalidPayloadException } from './exceptions'; +import { initializeExtensions, registerExtensionEndpoints, registerExtensionHooks } from './extensions'; +import logger, { expressLogger } from './logger'; import authenticate from './middleware/authenticate'; import cache from './middleware/cache'; +import { checkIP } from './middleware/check-ip'; import cors from './middleware/cors'; import errorHandler from './middleware/error-handler'; import extractToken from './middleware/extract-token'; @@ -49,17 +45,35 @@ import schema from './middleware/schema'; import { track } from './utils/track'; import { validateEnv } from './utils/validate-env'; +import { validateStorage } from './utils/validate-storage'; +import { register as registerWebhooks } from './webhooks'; +import { session } from './middleware/session'; +import { flushCaches } from './cache'; +import { Url } from './utils/url'; export default async function createApp(): Promise { validateEnv(['KEY', 'SECRET']); - await validateDBConnection(); + if (!new Url(env.PUBLIC_URL).isAbsolute()) { + logger.warn('PUBLIC_URL should be a full URL'); + } + + await validateStorage(); + + await validateDatabaseConnection(); + await validateDatabaseExtensions(); if ((await isInstalled()) === false) { logger.error(`Database doesn't have Directus tables installed.`); process.exit(1); } + if ((await validateMigrations()) === false) { + logger.warn(`Database migrations have not all been run`); + } + + await flushCaches(); + await initializeExtensions(); registerExtensionHooks(); @@ -105,21 +119,24 @@ export default async function createApp(): Promise { app.use(cors); } - if (!('DIRECTUS_DEV' in process.env)) { + app.get('/', (req, res, next) => { + if (env.ROOT_REDIRECT) { + res.redirect(env.ROOT_REDIRECT); + } else { + next(); + } + }); + + if (env.SERVE_APP) { const adminPath = require.resolve('@directus/app/dist/index.html'); - const publicUrl = env.PUBLIC_URL.endsWith('/') ? env.PUBLIC_URL : env.PUBLIC_URL + '/'; + const adminUrl = new Url(env.PUBLIC_URL).addPath('admin'); // Set the App's base path according to the APIs public URL let html = fse.readFileSync(adminPath, 'utf-8'); - html = html.replace(//, `\n\t\t`); - - app.get('/', (req, res, next) => { - if (env.ROOT_REDIRECT) { - res.redirect(env.ROOT_REDIRECT); - } else { - next(); - } - }); + html = html.replace( + //, + `\n\t\t` + ); app.get('/admin', (req, res) => res.send(html)); app.use('/admin', express.static(path.join(adminPath, '..'))); @@ -169,12 +186,13 @@ export default async function createApp(): Promise { app.use('/relations', relationsRouter); app.use('/revisions', revisionsRouter); app.use('/roles', rolesRouter); - app.use('/server/', serverRouter); + app.use('/server', serverRouter); app.use('/settings', settingsRouter); app.use('/users', usersRouter); app.use('/utils', utilsRouter); app.use('/webhooks', webhooksRouter); - app.use('/custom', customRouter); + + app.use(customRouter); // Register custom hooks / endpoints await emitAsyncSafe('routes.custom.init.before', { app }); diff --git a/api/src/cache.ts b/api/src/cache.ts index 423a28b323..682bd5cf94 100644 --- a/api/src/cache.ts +++ b/api/src/cache.ts @@ -12,17 +12,23 @@ export function getCache(): { cache: Keyv | null; schemaCache: Keyv | null } { if (env.CACHE_ENABLED === true && cache === null) { validateEnv(['CACHE_NAMESPACE', 'CACHE_TTL', 'CACHE_STORE']); cache = getKeyvInstance(ms(env.CACHE_TTL as string)); - cache.on('error', (err) => logger.error(err)); + cache.on('error', (err) => logger.warn(err, `[cache] ${err}`)); } if (env.CACHE_SCHEMA !== false && schemaCache === null) { schemaCache = getKeyvInstance(typeof env.CACHE_SCHEMA === 'string' ? ms(env.CACHE_SCHEMA) : undefined); - schemaCache.on('error', (err) => logger.error(err)); + schemaCache.on('error', (err) => logger.warn(err, `[cache] ${err}`)); } return { cache, schemaCache }; } +export async function flushCaches(): Promise { + const { schemaCache, cache } = getCache(); + await schemaCache?.clear(); + await cache?.clear(); +} + function getKeyvInstance(ttl: number | undefined): Keyv { switch (env.CACHE_STORE) { case 'redis': @@ -43,7 +49,10 @@ function getConfig(store: 'memory' | 'redis' | 'memcache' = 'memory', ttl: numbe if (store === 'redis') { const KeyvRedis = require('@keyv/redis'); - config.store = new KeyvRedis(env.CACHE_REDIS || getConfigFromEnv('CACHE_REDIS_')); + + config.store = new KeyvRedis(env.CACHE_REDIS || getConfigFromEnv('CACHE_REDIS_'), { + commandTimeout: 500, + }); } if (store === 'memcache') { diff --git a/api/src/cli/commands/bootstrap/index.ts b/api/src/cli/commands/bootstrap/index.ts index af305fcc6b..8d30bd1be8 100644 --- a/api/src/cli/commands/bootstrap/index.ts +++ b/api/src/cli/commands/bootstrap/index.ts @@ -1,3 +1,4 @@ +import { Knex } from 'knex'; import { nanoid } from 'nanoid'; import runMigrations from '../../../database/migrations/run'; import installDatabase from '../../../database/seeds/run'; @@ -5,19 +6,16 @@ import env from '../../../env'; import logger from '../../../logger'; import { getSchema } from '../../../utils/get-schema'; import { RolesService, UsersService, SettingsService } from '../../../services'; -import getDatabase, { isInstalled, hasDatabaseConnection } from '../../../database'; +import getDatabase, { isInstalled, validateDatabaseConnection, hasDatabaseConnection } from '../../../database'; import { SchemaOverview } from '../../../types'; export default async function bootstrap({ skipAdminInit }: { skipAdminInit?: boolean }): Promise { logger.info('Initializing bootstrap...'); - if ((await isDatabaseAvailable()) === false) { - logger.error(`Can't connect to the database`); - process.exit(1); - } - const database = getDatabase(); + await waitForDatabase(database); + if ((await isInstalled()) === false) { logger.info('Installing Directus system tables...'); @@ -48,19 +46,20 @@ export default async function bootstrap({ skipAdminInit }: { skipAdminInit?: boo process.exit(0); } -async function isDatabaseAvailable() { +async function waitForDatabase(database: Knex) { const tries = 5; const secondsBetweenTries = 5; for (let i = 0; i < tries; i++) { - if (await hasDatabaseConnection()) { + if (await hasDatabaseConnection(database)) { return true; } await new Promise((resolve) => setTimeout(resolve, secondsBetweenTries * 1000)); } - return false; + // This will throw and exit the process if the database is not available + await validateDatabaseConnection(database); } async function createDefaultAdmin(schema: SchemaOverview) { diff --git a/api/src/cli/commands/count/index.ts b/api/src/cli/commands/count/index.ts index 62a7021eef..3ee4fed229 100644 --- a/api/src/cli/commands/count/index.ts +++ b/api/src/cli/commands/count/index.ts @@ -1,12 +1,11 @@ -/* eslint-disable no-console */ - import getDatabase from '../../../database'; +import logger from '../../../logger'; export default async function count(collection: string): Promise { const database = getDatabase(); if (!collection) { - console.error('Collection is required'); + logger.error('Collection is required'); process.exit(1); } @@ -14,11 +13,11 @@ export default async function count(collection: string): Promise { const records = await database(collection).count('*', { as: 'count' }); const count = Number(records[0].count); - console.log(count); + process.stdout.write(`${count}\n`); database.destroy(); process.exit(0); - } catch (err) { - console.error(err); + } catch (err: any) { + logger.error(err); database.destroy(); process.exit(1); } diff --git a/api/src/cli/commands/database/install.ts b/api/src/cli/commands/database/install.ts index 48fbc1c105..ea4d367c41 100644 --- a/api/src/cli/commands/database/install.ts +++ b/api/src/cli/commands/database/install.ts @@ -1,8 +1,7 @@ -/* eslint-disable no-console */ - import runMigrations from '../../../database/migrations/run'; import installSeeds from '../../../database/seeds/run'; import getDatabase from '../../../database'; +import logger from '../../../logger'; export default async function start(): Promise { const database = getDatabase(); @@ -12,8 +11,8 @@ export default async function start(): Promise { await runMigrations(database, 'latest'); database.destroy(); process.exit(0); - } catch (err) { - console.log(err); + } catch (err: any) { + logger.error(err); database.destroy(); process.exit(1); } diff --git a/api/src/cli/commands/database/migrate.ts b/api/src/cli/commands/database/migrate.ts index 8c5b32a17f..caae05408d 100644 --- a/api/src/cli/commands/database/migrate.ts +++ b/api/src/cli/commands/database/migrate.ts @@ -1,25 +1,24 @@ -/* eslint-disable no-console */ - import run from '../../../database/migrations/run'; import getDatabase from '../../../database'; +import logger from '../../../logger'; export default async function migrate(direction: 'latest' | 'up' | 'down'): Promise { const database = getDatabase(); try { - console.log('✨ Running migrations...'); + logger.info('Running migrations...'); await run(database, direction); if (direction === 'down') { - console.log('✨ Downgrade successful'); + logger.info('Downgrade successful'); } else { - console.log('✨ Database up to date'); + logger.info('Database up to date'); } database.destroy(); process.exit(); - } catch (err) { - console.log(err); + } catch (err: any) { + logger.error(err); database.destroy(); process.exit(1); } diff --git a/api/src/cli/commands/init/index.ts b/api/src/cli/commands/init/index.ts index ed5aca4dad..a20fdc82e4 100644 --- a/api/src/cli/commands/init/index.ts +++ b/api/src/cli/commands/init/index.ts @@ -1,6 +1,3 @@ -/* eslint-disable no-console */ - -import argon2 from 'argon2'; import chalk from 'chalk'; import execa from 'execa'; import inquirer from 'inquirer'; @@ -13,6 +10,7 @@ import createDBConnection, { Credentials } from '../../utils/create-db-connectio import createEnv from '../../utils/create-env'; import { drivers, getDriverForClient } from '../../utils/drivers'; import { databaseQuestions } from './questions'; +import { generateHash } from '../../../utils/generate-hash'; export default async function init(): Promise { const rootPath = process.cwd(); @@ -48,20 +46,17 @@ export default async function init(): Promise { try { await runSeed(db); await runMigrations(db, 'latest'); - } catch (err) { - console.log(); - console.log('Something went wrong while seeding the database:'); - console.log(); - console.log(`${chalk.red(`[${err.code || 'Error'}]`)} ${err.message}`); - console.log(); - console.log('Please try again'); - console.log(); + } catch (err: any) { + process.stdout.write('\nSomething went wrong while seeding the database:\n'); + process.stdout.write(`\n${chalk.red(`[${err.code || 'Error'}]`)} ${err.message}\n`); + process.stdout.write('\nPlease try again\n\n'); + attemptsRemaining--; if (attemptsRemaining > 0) { return await trySeed(); } else { - console.log(`Couldn't seed the database. Exiting.`); + process.stdout.write("Couldn't seed the database. Exiting.\n"); process.exit(1); } } @@ -71,10 +66,7 @@ export default async function init(): Promise { await createEnv(dbClient, credentials!, rootPath); - console.log(); - console.log(); - - console.log(`Create your first admin user:`); + process.stdout.write('\nCreate your first admin user:\n\n'); const firstUser = await inquirer.prompt([ { @@ -95,7 +87,7 @@ export default async function init(): Promise { }, ]); - firstUser.password = await argon2.hash(firstUser.password); + firstUser.password = await generateHash(firstUser.password); const userID = uuidV4(); const roleID = uuidV4(); @@ -120,15 +112,11 @@ export default async function init(): Promise { await db.destroy(); - console.log(` -Your project has been created at ${chalk.green(rootPath)}. - -The configuration can be found in ${chalk.green(rootPath + '/.env')} - -Start Directus by running: - ${chalk.blue('cd')} ${rootPath} - ${chalk.blue('npx directus')} start -`); + process.stdout.write(`\nYour project has been created at ${chalk.green(rootPath)}.\n`); + process.stdout.write(`\nThe configuration can be found in ${chalk.green(rootPath + '/.env')}\n`); + process.stdout.write(`\nStart Directus by running:\n`); + process.stdout.write(` ${chalk.blue('cd')} ${rootPath}\n`); + process.stdout.write(` ${chalk.blue('npx directus')} start\n`); process.exit(0); } diff --git a/api/src/cli/commands/init/questions.ts b/api/src/cli/commands/init/questions.ts index 26516b8667..3ab6468786 100644 --- a/api/src/cli/commands/init/questions.ts +++ b/api/src/cli/commands/init/questions.ts @@ -50,6 +50,13 @@ const password = (): Record => ({ mask: '*', }); +const encrypt = (): Record => ({ + type: 'confirm', + name: 'options__encrypt', + message: 'Encrypt Connection:', + default: false, +}); + const ssl = (): Record => ({ type: 'confirm', name: 'ssl', @@ -62,5 +69,5 @@ export const databaseQuestions = { mysql: [host, port, database, user, password], pg: [host, port, database, user, password, ssl], oracledb: [host, port, database, user, password], - mssql: [host, port, database, user, password], + mssql: [host, port, database, user, password, encrypt], }; diff --git a/api/src/cli/commands/roles/create.ts b/api/src/cli/commands/roles/create.ts index ab60884feb..acf6e0ddff 100644 --- a/api/src/cli/commands/roles/create.ts +++ b/api/src/cli/commands/roles/create.ts @@ -1,14 +1,13 @@ -/* eslint-disable no-console */ - import { getSchema } from '../../../utils/get-schema'; import { RolesService } from '../../../services'; import getDatabase from '../../../database'; +import logger from '../../../logger'; export default async function rolesCreate({ role: name, admin }: { role: string; admin: boolean }): Promise { const database = getDatabase(); if (!name) { - console.error('Name is required'); + logger.error('Name is required'); process.exit(1); } @@ -17,11 +16,11 @@ export default async function rolesCreate({ role: name, admin }: { role: string; const service = new RolesService({ schema: schema, knex: database }); const id = await service.createOne({ name, admin_access: admin }); - console.log(id); + process.stdout.write(`${String(id)}\n`); database.destroy(); process.exit(0); - } catch (err) { - console.error(err); + } catch (err: any) { + logger.error(err); process.exit(1); } } diff --git a/api/src/cli/commands/users/create.ts b/api/src/cli/commands/users/create.ts index 348eca565e..b07d4080c6 100644 --- a/api/src/cli/commands/users/create.ts +++ b/api/src/cli/commands/users/create.ts @@ -1,8 +1,7 @@ -/* eslint-disable no-console */ - import { getSchema } from '../../../utils/get-schema'; import { UsersService } from '../../../services'; import getDatabase from '../../../database'; +import logger from '../../../logger'; export default async function usersCreate({ email, @@ -16,7 +15,7 @@ export default async function usersCreate({ const database = getDatabase(); if (!email || !password || !role) { - console.error('Email, password, role are required'); + logger.error('Email, password, role are required'); process.exit(1); } @@ -25,11 +24,11 @@ export default async function usersCreate({ const service = new UsersService({ schema, knex: database }); const id = await service.createOne({ email, password, role, status: 'active' }); - console.log(id); + process.stdout.write(`${String(id)}\n`); database.destroy(); process.exit(0); - } catch (err) { - console.error(err); + } catch (err: any) { + logger.error(err); process.exit(1); } } diff --git a/api/src/cli/commands/users/passwd.ts b/api/src/cli/commands/users/passwd.ts index ca40cbfba5..25867d55a9 100644 --- a/api/src/cli/commands/users/passwd.ts +++ b/api/src/cli/commands/users/passwd.ts @@ -1,35 +1,34 @@ -/* eslint-disable no-console */ - -import argon2 from 'argon2'; import { getSchema } from '../../../utils/get-schema'; +import { generateHash } from '../../../utils/generate-hash'; import { UsersService } from '../../../services'; import getDatabase from '../../../database'; +import logger from '../../../logger'; export default async function usersPasswd({ email, password }: { email?: string; password?: string }): Promise { const database = getDatabase(); if (!email || !password) { - console.error('Email and password are required'); + logger.error('Email and password are required'); process.exit(1); } try { - const passwordHashed = await argon2.hash(password); + const passwordHashed = await generateHash(password); const schema = await getSchema(); const service = new UsersService({ schema, knex: database }); const user = await service.knex.select('id').from('directus_users').where({ email }).first(); if (user) { await service.knex('directus_users').update({ password: passwordHashed }).where({ id: user.id }); - console.log(`Password is updated for user ${user.id}`); + logger.info(`Password is updated for user ${user.id}`); } else { - console.log('No such user by this email'); + logger.error('No such user by this email'); } await database.destroy(); process.exit(user ? 0 : 1); - } catch (err) { - console.error(err); + } catch (err: any) { + logger.error(err); process.exit(1); } } diff --git a/api/src/cli/index.test.ts b/api/src/cli/index.test.ts new file mode 100644 index 0000000000..d1af25ed8a --- /dev/null +++ b/api/src/cli/index.test.ts @@ -0,0 +1,62 @@ +import { Command } from 'commander'; +import { Extension } from '@directus/shared/types'; +import { createCli } from '.'; + +jest.mock('../env', () => ({ + ...jest.requireActual('../env').default, + LOG_LEVEL: 'silent', + EXTENSIONS_PATH: '', + SERVE_APP: false, +})); + +jest.mock('@directus/shared/utils/node/get-extensions', () => ({ + getPackageExtensions: jest.fn(() => Promise.resolve([])), + getLocalExtensions: jest.fn(() => Promise.resolve([customCliExtension])), +})); + +jest.mock(`/hooks/custom-cli/index.js`, () => () => customCliHook, { virtual: true }); + +const customCliExtension: Extension = { + path: `/hooks/custom-cli`, + name: 'custom-cli', + type: 'hook', + entrypoint: 'index.js', + local: true, + root: true, +}; + +const beforeHook = jest.fn(); +const afterAction = jest.fn(); +const afterHook = jest.fn(({ program }: { program: Command }) => program.command('custom').action(afterAction)); +const customCliHook = { 'cli.init.before': beforeHook, 'cli.init.after': afterHook }; + +const writeOut = jest.fn(); +const writeErr = jest.fn(); + +const setup = async () => { + const program = await createCli(); + program.exitOverride(); + program.configureOutput({ writeOut, writeErr }); + return program; +}; + +beforeEach(jest.clearAllMocks); + +describe('cli hooks', () => { + test('should call hooks before and after creating the cli', async () => { + const program = await setup(); + + expect(beforeHook).toHaveBeenCalledTimes(1); + expect(beforeHook).toHaveBeenCalledWith({ program }); + + expect(afterHook).toHaveBeenCalledTimes(1); + expect(afterHook).toHaveBeenCalledWith({ program }); + }); + + test('should be able to add a custom cli command', async () => { + const program = await setup(); + program.parseAsync(['custom'], { from: 'user' }); + + expect(afterAction).toHaveBeenCalledTimes(1); + }); +}); diff --git a/api/src/cli/index.ts b/api/src/cli/index.ts index 6e657020ef..b09e1ec57a 100644 --- a/api/src/cli/index.ts +++ b/api/src/cli/index.ts @@ -1,9 +1,7 @@ -#!/usr/bin/env node - -/* eslint-disable no-console */ - -import { program } from 'commander'; +import { Command } from 'commander'; import start from '../start'; +import { emitAsyncSafe } from '../emitter'; +import { initializeExtensions, registerExtensionHooks } from '../extensions'; import bootstrap from './commands/bootstrap'; import count from './commands/count'; import dbInstall from './commands/database/install'; @@ -15,61 +13,69 @@ import usersPasswd from './commands/users/passwd'; const pkg = require('../../package.json'); -program.name('directus').usage('[command] [options]'); -program.version(pkg.version, '-v, --version'); +export async function createCli(): Promise { + const program = new Command(); -program.command('start').description('Start the Directus API').action(start); -program.command('init').description('Create a new Directus Project').action(init); + await initializeExtensions(); + registerExtensionHooks(); -const dbCommand = program.command('database'); -dbCommand.command('install').description('Install the database').action(dbInstall); -dbCommand - .command('migrate:latest') - .description('Upgrade the database') - .action(() => dbMigrate('latest')); -dbCommand - .command('migrate:up') - .description('Upgrade the database') - .action(() => dbMigrate('up')); -dbCommand - .command('migrate:down') - .description('Downgrade the database') - .action(() => dbMigrate('down')); + await emitAsyncSafe('cli.init.before', { program }); -const usersCommand = program.command('users'); + program.name('directus').usage('[command] [options]'); + program.version(pkg.version, '-v, --version'); -usersCommand - .command('create') - .description('Create a new user') - .option('--email ', `user's email`) - .option('--password ', `user's password`) - .option('--role ', `user's role`) - .action(usersCreate); + program.command('start').description('Start the Directus API').action(start); + program.command('init').description('Create a new Directus Project').action(init); -usersCommand - .command('passwd') - .description('Set user password') - .option('--email ', `user's email`) - .option('--password ', `user's new password`) - .action(usersPasswd); + const dbCommand = program.command('database'); + dbCommand.command('install').description('Install the database').action(dbInstall); + dbCommand + .command('migrate:latest') + .description('Upgrade the database') + .action(() => dbMigrate('latest')); + dbCommand + .command('migrate:up') + .description('Upgrade the database') + .action(() => dbMigrate('up')); + dbCommand + .command('migrate:down') + .description('Downgrade the database') + .action(() => dbMigrate('down')); -const rolesCommand = program.command('roles'); -rolesCommand - .command('create') - .description('Create a new role') - .option('--role ', `name for the role`) - .option('--admin', `whether or not the role has admin access`) - .action(rolesCreate); + const usersCommand = program.command('users'); -program.command('count ').description('Count the amount of items in a given collection').action(count); + usersCommand + .command('create') + .description('Create a new user') + .option('--email ', `user's email`) + .option('--password ', `user's password`) + .option('--role ', `user's role`) + .action(usersCreate); -program - .command('bootstrap') - .description('Initialize or update the database') - .option('--skipAdminInit', 'Skips the creation of the default Admin Role and User') - .action(bootstrap); + usersCommand + .command('passwd') + .description('Set user password') + .option('--email ', `user's email`) + .option('--password ', `user's new password`) + .action(usersPasswd); -program.parseAsync(process.argv).catch((err) => { - console.error(err); - process.exit(1); -}); + const rolesCommand = program.command('roles'); + rolesCommand + .command('create') + .description('Create a new role') + .option('--role ', `name for the role`) + .option('--admin', `whether or not the role has admin access`) + .action(rolesCreate); + + program.command('count ').description('Count the amount of items in a given collection').action(count); + + program + .command('bootstrap') + .description('Initialize or update the database') + .option('--skipAdminInit', 'Skips the creation of the default Admin Role and User') + .action(bootstrap); + + await emitAsyncSafe('cli.init.after', { program }); + + return program; +} diff --git a/api/src/cli/run.ts b/api/src/cli/run.ts new file mode 100644 index 0000000000..5bbf0fd1ea --- /dev/null +++ b/api/src/cli/run.ts @@ -0,0 +1,9 @@ +import { createCli } from './index'; + +createCli() + .then((program) => program.parseAsync(process.argv)) + .catch((err) => { + // eslint-disable-next-line no-console + console.error(err); + process.exit(1); + }); diff --git a/api/src/cli/utils/create-db-connection.ts b/api/src/cli/utils/create-db-connection.ts index 13d6273381..2eac7fc6df 100644 --- a/api/src/cli/utils/create-db-connection.ts +++ b/api/src/cli/utils/create-db-connection.ts @@ -9,6 +9,7 @@ export type Credentials = { user?: string; password?: string; ssl?: boolean; + options__encrypt?: boolean; }; export default function createDBConnection( client: 'sqlite3' | 'mysql' | 'pg' | 'oracledb' | 'mssql', @@ -23,26 +24,26 @@ export default function createDBConnection( filename: filename as string, }; } else { - if (client !== 'pg') { - const { host, port, database, user, password } = credentials as Credentials; + const { host, port, database, user, password } = credentials as Credentials; - connection = { - host: host, - port: Number(port), - database: database, - user: user, - password: password, - }; - } else { - const { host, port, database, user, password, ssl } = credentials as Credentials; + connection = { + host: host, + port: Number(port), + database: database, + user: user, + password: password, + }; - connection = { - host: host, - port: Number(port), - database: database, - user: user, - password: password, - ssl: ssl, + if (client === 'pg') { + const { ssl } = credentials as Credentials; + connection['ssl'] = ssl; + } + + if (client === 'mssql') { + const { options__encrypt } = credentials as Credentials; + + (connection as Knex.MsSqlConnectionConfig)['options'] = { + encrypt: options__encrypt, }; } } diff --git a/api/src/cli/utils/create-env/env-stub.liquid b/api/src/cli/utils/create-env/env-stub.liquid index 21ca7e197b..d381a60185 100644 --- a/api/src/cli/utils/create-env/env-stub.liquid +++ b/api/src/cli/utils/create-env/env-stub.liquid @@ -38,6 +38,7 @@ ACCESS_TOKEN_TTL="15m" REFRESH_TOKEN_TTL="7d" REFRESH_TOKEN_COOKIE_SECURE=false REFRESH_TOKEN_COOKIE_SAME_SITE="lax" +REFRESH_TOKEN_COOKIE_NAME="directus_refresh_token" #################################################################################################### ## SSO (OAuth) Providers diff --git a/api/src/constants.ts b/api/src/constants.ts index 2fdbd23fb0..3bbf2d5e45 100644 --- a/api/src/constants.ts +++ b/api/src/constants.ts @@ -1,47 +1,45 @@ -import { Transformation } from './types'; +import { TransformationParams } from './types'; -export const SYSTEM_ASSET_ALLOW_LIST: Transformation[] = [ +export const SYSTEM_ASSET_ALLOW_LIST: TransformationParams[] = [ { key: 'system-small-cover', - width: 64, - height: 64, - fit: 'cover', + transforms: [['resize', { width: 64, height: 64, fit: 'cover' }]], }, { key: 'system-small-contain', - width: 64, - fit: 'contain', + transforms: [['resize', { width: 64, fit: 'contain' }]], }, { key: 'system-medium-cover', - width: 300, - height: 300, - fit: 'cover', + transforms: [['resize', { width: 300, height: 300, fit: 'cover' }]], }, { key: 'system-medium-contain', - width: 300, - fit: 'contain', + transforms: [['resize', { width: 300, fit: 'contain' }]], }, { key: 'system-large-cover', - width: 800, - height: 600, - fit: 'cover', + transforms: [['resize', { width: 800, height: 800, fit: 'cover' }]], }, { key: 'system-large-contain', - width: 800, - fit: 'contain', + transforms: [['resize', { width: 800, fit: 'contain' }]], }, ]; -export const ASSET_TRANSFORM_QUERY_KEYS = ['key', 'width', 'height', 'fit', 'withoutEnlargement', 'quality']; +export const ASSET_TRANSFORM_QUERY_KEYS = [ + 'key', + 'transforms', + 'width', + 'height', + 'format', + 'fit', + 'quality', + 'withoutEnlargement', +]; export const FILTER_VARIABLES = ['$NOW', '$CURRENT_USER', '$CURRENT_ROLE']; export const ALIAS_TYPES = ['alias', 'o2m', 'm2m', 'm2a', 'files', 'files', 'translations']; export const COLUMN_TRANSFORMS = ['year', 'month', 'day', 'weekday', 'hour', 'minute', 'second']; - -export const REGEX_BETWEEN_PARENS = /\(([^)]+)\)/; diff --git a/api/src/controllers/activity.ts b/api/src/controllers/activity.ts index e5fe4d1f30..398e883889 100644 --- a/api/src/controllers/activity.ts +++ b/api/src/controllers/activity.ts @@ -99,7 +99,7 @@ router.post( res.locals.payload = { data: record || null, }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -138,7 +138,7 @@ router.patch( res.locals.payload = { data: record || null, }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } diff --git a/api/src/controllers/assets.ts b/api/src/controllers/assets.ts index b64775af98..b48e939aa8 100644 --- a/api/src/controllers/assets.ts +++ b/api/src/controllers/assets.ts @@ -10,7 +10,7 @@ import { ForbiddenException, InvalidQueryException, RangeNotSatisfiableException import useCollection from '../middleware/use-collection'; import { AssetsService, PayloadService } from '../services'; import storage from '../storage'; -import { Transformation } from '../types/assets'; +import { TransformationParams, TransformationMethods, TransformationPreset } from '../types/assets'; import asyncHandler from '../utils/async-handler'; const router = Router(); @@ -68,26 +68,63 @@ router.get( if ('key' in transformation && Object.keys(transformation).length > 1) { throw new InvalidQueryException(`You can't combine the "key" query parameter with any other transformation.`); } - if ('quality' in transformation && (Number(transformation.quality) < 1 || Number(transformation.quality) > 100)) { - throw new InvalidQueryException(`"quality" Parameter has to between 1 to 100`); + + if ('transforms' in transformation) { + let transforms: unknown; + + // Try parse the JSON array + try { + transforms = JSON.parse(transformation['transforms'] as string); + } catch { + throw new InvalidQueryException(`"transforms" Parameter needs to be a JSON array of allowed transformations.`); + } + + // Check if it is actually an array. + if (!Array.isArray(transforms)) { + throw new InvalidQueryException(`"transforms" Parameter needs to be a JSON array of allowed transformations.`); + } + + // Check against ASSETS_TRANSFORM_MAX_OPERATIONS + if (transforms.length > Number(env.ASSETS_TRANSFORM_MAX_OPERATIONS)) { + throw new InvalidQueryException( + `"transforms" Parameter is only allowed ${env.ASSETS_TRANSFORM_MAX_OPERATIONS} transformations.` + ); + } + + // Check the transformations are valid + transforms.forEach((transform) => { + const name = transform[0]; + + if (!TransformationMethods.includes(name)) { + throw new InvalidQueryException(`"transforms" Parameter does not allow "${name}" as a transformation.`); + } + }); + + transformation.transforms = transforms; } - const systemKeys = SYSTEM_ASSET_ALLOW_LIST.map((transformation) => transformation.key); + const systemKeys = SYSTEM_ASSET_ALLOW_LIST.map((transformation) => transformation.key!); const allKeys: string[] = [ ...systemKeys, - ...(assetSettings.storage_asset_presets || []).map((transformation: Transformation) => transformation.key), + ...(assetSettings.storage_asset_presets || []).map((transformation: TransformationParams) => transformation.key), ]; // For use in the next request handler res.locals.shortcuts = [...SYSTEM_ASSET_ALLOW_LIST, ...(assetSettings.storage_asset_presets || [])]; res.locals.transformation = transformation; - if (Object.keys(transformation).length === 0) { + if ( + Object.keys(transformation).length === 0 || + ('transforms' in transformation && transformation.transforms!.length === 0) + ) { return next(); } + if (assetSettings.storage_asset_transform === 'all') { - if (transformation.key && allKeys.includes(transformation.key as string) === false) + if (transformation.key && allKeys.includes(transformation.key as string) === false) { throw new InvalidQueryException(`Key "${transformation.key}" isn't configured.`); + } + return next(); } else if (assetSettings.storage_asset_transform === 'presets') { if (allKeys.includes(transformation.key as string)) return next(); @@ -107,9 +144,9 @@ router.get( schema: req.schema, }); - const transformation: Transformation = res.locals.transformation.key - ? res.locals.shortcuts.find( - (transformation: Transformation) => transformation.key === res.locals.transformation.key + const transformation: TransformationParams | TransformationPreset = res.locals.transformation.key + ? (res.locals.shortcuts as TransformationPreset[]).find( + (transformation) => transformation.key === res.locals.transformation.key ) : res.locals.transformation; diff --git a/api/src/controllers/auth.ts b/api/src/controllers/auth.ts index 2f6229a9fd..ab41d3729a 100644 --- a/api/src/controllers/auth.ts +++ b/api/src/controllers/auth.ts @@ -11,7 +11,8 @@ import { respond } from '../middleware/respond'; import { AuthenticationService, UsersService } from '../services'; import asyncHandler from '../utils/async-handler'; import getEmailFromProfile from '../utils/get-email-from-profile'; -import { toArray } from '../utils/to-array'; +import { toArray } from '@directus/shared/utils'; +import logger from '../logger'; const router = Router(); @@ -59,7 +60,7 @@ router.post( } if (mode === 'cookie') { - res.cookie('directus_refresh_token', refreshToken, { + res.cookie(env.REFRESH_TOKEN_COOKIE_NAME, refreshToken, { httpOnly: true, domain: env.REFRESH_TOKEN_COOKIE_DOMAIN, maxAge: ms(env.REFRESH_TOKEN_TTL as string), @@ -88,7 +89,7 @@ router.post( schema: req.schema, }); - const currentRefreshToken = req.body.refresh_token || req.cookies.directus_refresh_token; + const currentRefreshToken = req.body.refresh_token || req.cookies[env.REFRESH_TOKEN_COOKIE_NAME]; if (!currentRefreshToken) { throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`); @@ -107,7 +108,7 @@ router.post( } if (mode === 'cookie') { - res.cookie('directus_refresh_token', refreshToken, { + res.cookie(env.REFRESH_TOKEN_COOKIE_NAME, refreshToken, { httpOnly: true, domain: env.REFRESH_TOKEN_COOKIE_DOMAIN, maxAge: ms(env.REFRESH_TOKEN_TTL as string), @@ -136,7 +137,7 @@ router.post( schema: req.schema, }); - const currentRefreshToken = req.body.refresh_token || req.cookies.directus_refresh_token; + const currentRefreshToken = req.body.refresh_token || req.cookies[env.REFRESH_TOKEN_COOKIE_NAME]; if (!currentRefreshToken) { throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`); @@ -144,8 +145,8 @@ router.post( await authenticationService.logout(currentRefreshToken); - if (req.cookies.directus_refresh_token) { - res.clearCookie('directus_refresh_token', { + if (req.cookies[env.REFRESH_TOKEN_COOKIE_NAME]) { + res.clearCookie(env.REFRESH_TOKEN_COOKIE_NAME, { httpOnly: true, domain: env.REFRESH_TOKEN_COOKIE_DOMAIN, secure: env.REFRESH_TOKEN_COOKIE_SECURE ?? false, @@ -161,7 +162,7 @@ router.post( router.post( '/password/request', asyncHandler(async (req, res, next) => { - if (!req.body.email) { + if (typeof req.body.email !== 'string') { throw new InvalidPayloadException(`"email" field is required.`); } @@ -176,10 +177,11 @@ router.post( try { await service.requestPasswordReset(req.body.email, req.body.reset_url || null); return next(); - } catch (err) { + } catch (err: any) { if (err instanceof InvalidPayloadException) { throw err; } else { + logger.warn(err, `[email] ${err}`); return next(); } } @@ -190,11 +192,11 @@ router.post( router.post( '/password/reset', asyncHandler(async (req, res, next) => { - if (!req.body.token) { + if (typeof req.body.token !== 'string') { throw new InvalidPayloadException(`"token" field is required.`); } - if (!req.body.password) { + if (typeof req.body.password !== 'string') { throw new InvalidPayloadException(`"password" field is required.`); } @@ -318,8 +320,11 @@ router.get( authResponse = await authenticationService.authenticate({ email, }); - } catch (error) { + } catch (error: any) { emitStatus('fail'); + + logger.warn(error); + if (redirect) { let reason = 'UNKNOWN_EXCEPTION'; @@ -340,7 +345,7 @@ router.get( emitStatus('success'); if (redirect) { - res.cookie('directus_refresh_token', refreshToken, { + res.cookie(env.REFRESH_TOKEN_COOKIE_NAME, refreshToken, { httpOnly: true, domain: env.REFRESH_TOKEN_COOKIE_DOMAIN, maxAge: ms(env.REFRESH_TOKEN_TTL as string), diff --git a/api/src/controllers/collections.ts b/api/src/controllers/collections.ts index 03d4731556..1868f4e56f 100644 --- a/api/src/controllers/collections.ts +++ b/api/src/controllers/collections.ts @@ -88,7 +88,7 @@ router.patch( try { const collection = await collectionsService.readOne(req.params.collection); res.locals.payload = { data: collection || null }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } diff --git a/api/src/controllers/extensions.ts b/api/src/controllers/extensions.ts index 7bfa287604..72a1708815 100644 --- a/api/src/controllers/extensions.ts +++ b/api/src/controllers/extensions.ts @@ -3,18 +3,17 @@ import asyncHandler from '../utils/async-handler'; import { RouteNotFoundException } from '../exceptions'; import { listExtensions, getAppExtensionSource } from '../extensions'; import { respond } from '../middleware/respond'; -import { depluralize } from '@directus/shared/utils'; -import { AppExtensionType, Plural } from '@directus/shared/types'; -import { APP_EXTENSION_TYPES } from '@directus/shared/constants'; +import { depluralize, isAppExtension } from '@directus/shared/utils'; +import { Plural } from '@directus/shared/types'; const router = Router(); router.get( '/:type', asyncHandler(async (req, res, next) => { - const type = depluralize(req.params.type as Plural); + const type = depluralize(req.params.type as Plural); - if (APP_EXTENSION_TYPES.includes(type) === false) { + if (!isAppExtension(type)) { throw new RouteNotFoundException(req.path); } @@ -32,9 +31,9 @@ router.get( router.get( '/:type/index.js', asyncHandler(async (req, res) => { - const type = depluralize(req.params.type as Plural); + const type = depluralize(req.params.type as Plural); - if (APP_EXTENSION_TYPES.includes(type) === false) { + if (!isAppExtension(type)) { throw new RouteNotFoundException(req.path); } diff --git a/api/src/controllers/fields.ts b/api/src/controllers/fields.ts index 2d45235b2b..8409a1c71c 100644 --- a/api/src/controllers/fields.ts +++ b/api/src/controllers/fields.ts @@ -6,7 +6,8 @@ import validateCollection from '../middleware/collection-exists'; import { respond } from '../middleware/respond'; import useCollection from '../middleware/use-collection'; import { FieldsService } from '../services/fields'; -import { Field, types } from '../types'; +import { Field, Type } from '@directus/shared/types'; +import { TYPES } from '@directus/shared/constants'; import asyncHandler from '../utils/async-handler'; const router = Router(); @@ -65,7 +66,7 @@ const newFieldSchema = Joi.object({ collection: Joi.string().optional(), field: Joi.string().required(), type: Joi.string() - .valid(...types, ...ALIAS_TYPES) + .valid(...TYPES, ...ALIAS_TYPES) .allow(null) .optional(), schema: Joi.object({ @@ -93,14 +94,14 @@ router.post( throw new InvalidPayloadException(error.message); } - const field: Partial & { field: string; type: typeof types[number] | null } = req.body; + const field: Partial & { field: string; type: Type | null } = req.body; await service.createField(req.params.collection, field); try { const createdField = await service.readOne(req.params.collection, field.field); res.locals.payload = { data: createdField || null }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -137,7 +138,7 @@ router.patch( results.push(updatedField); res.locals.payload = { data: results || null }; } - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -152,7 +153,7 @@ router.patch( const updateSchema = Joi.object({ type: Joi.string() - .valid(...types, ...ALIAS_TYPES) + .valid(...TYPES, ...ALIAS_TYPES) .allow(null), schema: Joi.object({ default_value: Joi.any(), @@ -183,7 +184,7 @@ router.patch( throw new InvalidPayloadException(`You need to provide "type" when providing "schema".`); } - const fieldData: Partial & { field: string; type: typeof types[number] } = req.body; + const fieldData: Partial & { field: string; type: Type } = req.body; if (!fieldData.field) fieldData.field = req.params.field; @@ -192,7 +193,7 @@ router.patch( try { const updatedField = await service.readOne(req.params.collection, req.params.field); res.locals.payload = { data: updatedField || null }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } diff --git a/api/src/controllers/files.ts b/api/src/controllers/files.ts index 88982d5b55..9fe6753ab1 100644 --- a/api/src/controllers/files.ts +++ b/api/src/controllers/files.ts @@ -11,7 +11,7 @@ import { validateBatch } from '../middleware/validate-batch'; import { FilesService, MetaService } from '../services'; import { File, PrimaryKey } from '../types'; import asyncHandler from '../utils/async-handler'; -import { toArray } from '../utils/to-array'; +import { toArray } from '@directus/shared/utils'; const router = express.Router(); @@ -33,7 +33,7 @@ const multipartHandler = asyncHandler(async (req, res, next) => { */ let disk: string = toArray(env.STORAGE_LOCATIONS)[0]; - const payload: Partial = {}; + let payload: Partial = {}; let fileCount = 0; busboy.on('field', (fieldname: keyof File, val) => { @@ -55,10 +55,6 @@ const multipartHandler = asyncHandler(async (req, res, next) => { payload.title = formatTitle(path.parse(filename).name); } - if (req.accountability?.user) { - payload.uploaded_by = req.accountability.user; - } - const payloadWithRequiredFields: Partial & { filename_download: string; type: string; @@ -70,11 +66,14 @@ const multipartHandler = asyncHandler(async (req, res, next) => { storage: payload.storage || disk, }; + // Clear the payload for the next to-be-uploaded file + payload = {}; + try { const primaryKey = await service.uploadOne(fileStream, payloadWithRequiredFields, existingPrimaryKey); savedFiles.push(primaryKey); tryDone(); - } catch (error) { + } catch (error: any) { busboy.emit('error', error); } }); @@ -128,7 +127,7 @@ router.post( data: record, }; } - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -165,7 +164,7 @@ router.post( try { const record = await service.readOne(primaryKey, req.sanitizedQuery); res.locals.payload = { data: record || null }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -243,7 +242,7 @@ router.patch( try { const result = await service.readMany(keys, req.sanitizedQuery); res.locals.payload = { data: result || null }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -270,7 +269,7 @@ router.patch( try { const record = await service.readOne(req.params.pk, req.sanitizedQuery); res.locals.payload = { data: record || null }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } diff --git a/api/src/controllers/folders.ts b/api/src/controllers/folders.ts index 46c44cf878..049e8afae0 100644 --- a/api/src/controllers/folders.ts +++ b/api/src/controllers/folders.ts @@ -37,7 +37,7 @@ router.post( const record = await service.readOne(savedKeys[0], req.sanitizedQuery); res.locals.payload = { data: record }; } - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -114,7 +114,7 @@ router.patch( try { const result = await service.readMany(keys, req.sanitizedQuery); res.locals.payload = { data: result || null }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -140,7 +140,7 @@ router.patch( try { const record = await service.readOne(primaryKey, req.sanitizedQuery); res.locals.payload = { data: record || null }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } diff --git a/api/src/controllers/items.ts b/api/src/controllers/items.ts index 36121abd34..c0e1fe8ad4 100644 --- a/api/src/controllers/items.ts +++ b/api/src/controllers/items.ts @@ -42,7 +42,7 @@ router.post( const result = await service.readOne(savedKeys[0], req.sanitizedQuery); res.locals.payload = { data: result || null }; } - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -97,10 +97,6 @@ router.get( asyncHandler(async (req, res, next) => { if (req.params.collection.startsWith('directus_')) throw new ForbiddenException(); - if (req.singleton) { - throw new RouteNotFoundException(req.path); - } - const service = new ItemsService(req.collection, { accountability: req.accountability, schema: req.schema, @@ -111,6 +107,7 @@ router.get( res.locals.payload = { data: result || null, }; + return next(); }), respond @@ -147,7 +144,7 @@ router.patch( try { const result = await service.readMany(keys, req.sanitizedQuery); res.locals.payload = { data: result }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -180,7 +177,7 @@ router.patch( try { const result = await service.readOne(updatedPrimaryKey, req.sanitizedQuery); res.locals.payload = { data: result || null }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } diff --git a/api/src/controllers/not-found.ts b/api/src/controllers/not-found.ts index 09d94d62a4..9d1d30fd74 100644 --- a/api/src/controllers/not-found.ts +++ b/api/src/controllers/not-found.ts @@ -20,7 +20,7 @@ const notFound: RequestHandler = async (req, res, next) => { return next(); } next(new RouteNotFoundException(req.path)); - } catch (err) { + } catch (err: any) { next(err); } }; diff --git a/api/src/controllers/permissions.ts b/api/src/controllers/permissions.ts index cd4137c4a0..b26b40f389 100644 --- a/api/src/controllers/permissions.ts +++ b/api/src/controllers/permissions.ts @@ -37,7 +37,7 @@ router.post( const item = await service.readOne(savedKeys[0], req.sanitizedQuery); res.locals.payload = { data: item }; } - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -116,7 +116,7 @@ router.patch( try { const result = await service.readMany(keys, req.sanitizedQuery); res.locals.payload = { data: result }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -142,7 +142,7 @@ router.patch( try { const item = await service.readOne(primaryKey, req.sanitizedQuery); res.locals.payload = { data: item || null }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } diff --git a/api/src/controllers/presets.ts b/api/src/controllers/presets.ts index f9cd9a248e..b1cff328a4 100644 --- a/api/src/controllers/presets.ts +++ b/api/src/controllers/presets.ts @@ -37,7 +37,7 @@ router.post( const record = await service.readOne(savedKeys[0], req.sanitizedQuery); res.locals.payload = { data: record }; } - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -115,7 +115,7 @@ router.patch( try { const result = await service.readMany(keys, req.sanitizedQuery); res.locals.payload = { data: result }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -141,7 +141,7 @@ router.patch( try { const record = await service.readOne(primaryKey, req.sanitizedQuery); res.locals.payload = { data: record }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } diff --git a/api/src/controllers/relations.ts b/api/src/controllers/relations.ts index 878d422985..a4a2b91efd 100644 --- a/api/src/controllers/relations.ts +++ b/api/src/controllers/relations.ts @@ -90,7 +90,7 @@ router.post( try { const createdRelation = await service.readOne(req.body.collection, req.body.field); res.locals.payload = { data: createdRelation || null }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -135,7 +135,7 @@ router.patch( try { const updatedField = await service.readOne(req.params.collection, req.params.field); res.locals.payload = { data: updatedField || null }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } diff --git a/api/src/controllers/roles.ts b/api/src/controllers/roles.ts index f852abefa8..8b47ecf05b 100644 --- a/api/src/controllers/roles.ts +++ b/api/src/controllers/roles.ts @@ -37,7 +37,7 @@ router.post( const item = await service.readOne(savedKeys[0], req.sanitizedQuery); res.locals.payload = { data: item }; } - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -106,7 +106,7 @@ router.patch( try { const result = await service.readMany(keys, req.sanitizedQuery); res.locals.payload = { data: result }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -132,7 +132,7 @@ router.patch( try { const item = await service.readOne(primaryKey, req.sanitizedQuery); res.locals.payload = { data: item || null }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } diff --git a/api/src/controllers/settings.ts b/api/src/controllers/settings.ts index 98115b6041..5a22694249 100644 --- a/api/src/controllers/settings.ts +++ b/api/src/controllers/settings.ts @@ -35,7 +35,7 @@ router.patch( try { const record = await service.readSingleton(req.sanitizedQuery); res.locals.payload = { data: record || null }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } diff --git a/api/src/controllers/users.ts b/api/src/controllers/users.ts index b148471f72..241c2135d5 100644 --- a/api/src/controllers/users.ts +++ b/api/src/controllers/users.ts @@ -38,7 +38,7 @@ router.post( const item = await service.readOne(savedKeys[0], req.sanitizedQuery); res.locals.payload = { data: item }; } - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -86,7 +86,7 @@ router.get( try { const item = await service.readOne(req.accountability.user, req.sanitizedQuery); res.locals.payload = { data: item || null }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { res.locals.payload = { data: { id: req.accountability.user } }; return next(); @@ -177,7 +177,7 @@ router.patch( try { const result = await service.readMany(keys, req.sanitizedQuery); res.locals.payload = { data: result }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -203,7 +203,7 @@ router.patch( try { const item = await service.readOne(primaryKey, req.sanitizedQuery); res.locals.payload = { data: item || null }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } diff --git a/api/src/controllers/utils.ts b/api/src/controllers/utils.ts index 1edf1e0ae7..dbd9d098d4 100644 --- a/api/src/controllers/utils.ts +++ b/api/src/controllers/utils.ts @@ -2,12 +2,14 @@ import argon2 from 'argon2'; import { Router } from 'express'; import Joi from 'joi'; import { nanoid } from 'nanoid'; -import { InvalidPayloadException, InvalidQueryException } from '../exceptions'; +import { ForbiddenException, InvalidPayloadException, InvalidQueryException } from '../exceptions'; import collectionExists from '../middleware/collection-exists'; import { respond } from '../middleware/respond'; import { RevisionsService, UtilsService, ImportService } from '../services'; import asyncHandler from '../utils/async-handler'; import Busboy from 'busboy'; +import { flushCaches } from '../cache'; +import { generateHash } from '../utils/generate-hash'; const router = Router(); @@ -30,7 +32,7 @@ router.post( throw new InvalidPayloadException(`"string" is required`); } - const hash = await argon2.hash(req.body.string); + const hash = await generateHash(req.body.string); return res.json({ data: hash }); }) @@ -102,7 +104,7 @@ router.post( busboy.on('file', async (fieldname, fileStream, filename, encoding, mimetype) => { try { await service.import(req.params.collection, mimetype, fileStream); - } catch (err) { + } catch (err: any) { return next(err); } @@ -115,4 +117,17 @@ router.post( }) ); +router.post( + '/cache/clear', + asyncHandler(async (req, res) => { + if (req.accountability?.admin !== true) { + throw new ForbiddenException(); + } + + await flushCaches(); + + res.status(200).end(); + }) +); + export default router; diff --git a/api/src/controllers/webhooks.ts b/api/src/controllers/webhooks.ts index edca81dfeb..9eef455ed7 100644 --- a/api/src/controllers/webhooks.ts +++ b/api/src/controllers/webhooks.ts @@ -37,7 +37,7 @@ router.post( const item = await service.readOne(savedKeys[0], req.sanitizedQuery); res.locals.payload = { data: item }; } - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -106,7 +106,7 @@ router.patch( try { const result = await service.readMany(keys, req.sanitizedQuery); res.locals.payload = { data: result }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } @@ -132,7 +132,7 @@ router.patch( try { const item = await service.readOne(primaryKey, req.sanitizedQuery); res.locals.payload = { data: item || null }; - } catch (error) { + } catch (error: any) { if (error instanceof ForbiddenException) { return next(); } diff --git a/api/src/database/helpers/geometry.ts b/api/src/database/helpers/geometry.ts new file mode 100644 index 0000000000..489fd3166c --- /dev/null +++ b/api/src/database/helpers/geometry.ts @@ -0,0 +1,164 @@ +import { Field, RawField } from '@directus/shared/types'; +import { Knex } from 'knex'; +import { stringify as geojsonToWKT, GeoJSONGeometry } from 'wellknown'; +import getDatabase from '..'; + +let geometryHelper: KnexSpatial | undefined; + +export function getGeometryHelper(): KnexSpatial { + if (!geometryHelper) { + const db = getDatabase(); + const client = db.client.config.client as string; + const constructor = { + mysql: KnexSpatial_MySQL, + mariadb: KnexSpatial_MySQL, + sqlite3: KnexSpatial, + pg: KnexSpatial_PG, + postgres: KnexSpatial_PG, + redshift: KnexSpatial_Redshift, + mssql: KnexSpatial_MSSQL, + oracledb: KnexSpatial_Oracle, + }[client]; + if (!constructor) { + throw new Error(`Geometry helper not implemented on ${client}.`); + } + geometryHelper = new constructor(db); + } + return geometryHelper; +} + +class KnexSpatial { + constructor(protected knex: Knex) {} + isTrue(expression: Knex.Raw) { + return expression; + } + isFalse(expression: Knex.Raw) { + return expression.wrap('NOT ', ''); + } + createColumn(table: Knex.CreateTableBuilder, field: RawField | Field) { + const type = field.schema?.geometry_type ?? 'geometry'; + return table.specificType(field.field, type); + } + asText(table: string, column: string): Knex.Raw { + return this.knex.raw('st_astext(??.??) as ??', [table, column, column]); + } + fromText(text: string): Knex.Raw { + return this.knex.raw('st_geomfromtext(?, 4326)', text); + } + fromGeoJSON(geojson: GeoJSONGeometry): Knex.Raw { + return this.fromText(geojsonToWKT(geojson)); + } + _intersects(key: string, geojson: GeoJSONGeometry): Knex.Raw { + const geometry = this.fromGeoJSON(geojson); + return this.knex.raw('st_intersects(??, ?)', [key, geometry]); + } + intersects(key: string, geojson: GeoJSONGeometry): Knex.Raw { + return this.isTrue(this._intersects(key, geojson)); + } + nintersects(key: string, geojson: GeoJSONGeometry): Knex.Raw { + return this.isFalse(this._intersects(key, geojson)); + } + _intersects_bbox(key: string, geojson: GeoJSONGeometry): Knex.Raw { + const geometry = this.fromGeoJSON(geojson); + return this.knex.raw('intersects(??, ?)', [key, geometry]); + } + intersects_bbox(key: string, geojson: GeoJSONGeometry): Knex.Raw { + return this.isTrue(this._intersects_bbox(key, geojson)); + } + nintersects_bbox(key: string, geojson: GeoJSONGeometry): Knex.Raw { + return this.isFalse(this._intersects_bbox(key, geojson)); + } + collect(table: string, column: string): Knex.Raw { + return this.knex.raw('st_astext(st_collect(??.??))', [table, column]); + } +} + +class KnexSpatial_PG extends KnexSpatial { + createColumn(table: Knex.CreateTableBuilder, field: RawField | Field) { + const type = field.schema?.geometry_type ?? 'geometry'; + return table.specificType(field.field, `geometry(${type})`); + } + _intersects_bbox(key: string, geojson: GeoJSONGeometry): Knex.Raw { + const geometry = this.fromGeoJSON(geojson); + return this.knex.raw('?? && ?', [key, geometry]); + } +} + +class KnexSpatial_MySQL extends KnexSpatial { + collect(table: string, column: string): Knex.Raw { + return this.knex.raw( + `concat('geometrycollection(', group_concat(? separator ', '), ')'`, + this.asText(table, column) + ); + } +} + +class KnexSpatial_Redshift extends KnexSpatial { + createColumn(table: Knex.CreateTableBuilder, field: RawField | Field) { + const type = field.schema?.geometry_type ?? 'geometry'; + if (type !== 'geometry') field.meta!.special![1] = type; + return table.specificType(field.field, 'geometry'); + } +} + +class KnexSpatial_MSSQL extends KnexSpatial { + isTrue(expression: Knex.Raw) { + return expression.wrap(``, ` = 1`); + } + isFalse(expression: Knex.Raw) { + return expression.wrap(``, ` = 0`); + } + createColumn(table: Knex.CreateTableBuilder, field: RawField | Field) { + const type = field.schema?.geometry_type ?? 'geometry'; + if (type !== 'geometry') field.meta!.special![1] = type; + return table.specificType(field.field, 'geometry'); + } + asText(table: string, column: string): Knex.Raw { + return this.knex.raw('??.??.STAsText() as ??', [table, column, column]); + } + fromText(text: string): Knex.Raw { + return this.knex.raw('geometry::STGeomFromText(?, 4326)', text); + } + _intersects(key: string, geojson: GeoJSONGeometry): Knex.Raw { + const geometry = this.fromGeoJSON(geojson); + return this.knex.raw('??.STIntersects(?)', [key, geometry]); + } + _intersects_bbox(key: string, geojson: GeoJSONGeometry): Knex.Raw { + const geometry = this.fromGeoJSON(geojson); + return this.knex.raw('??.STEnvelope().STIntersects(?.STEnvelope())', [key, geometry]); + } + collect(table: string, column: string): Knex.Raw { + return this.knex.raw('geometry::CollectionAggregate(??.??).STAsText()', [table, column]); + } +} + +class KnexSpatial_Oracle extends KnexSpatial { + isTrue(expression: Knex.Raw) { + return expression.wrap(``, ` = 'TRUE'`); + } + isFalse(expression: Knex.Raw) { + return expression.wrap(``, ` = 'FALSE'`); + } + createColumn(table: Knex.CreateTableBuilder, field: RawField | Field) { + const type = field.schema?.geometry_type ?? 'geometry'; + if (type !== 'geometry') field.meta!.special![1] = type; + return table.specificType(field.field, 'sdo_geometry'); + } + asText(table: string, column: string): Knex.Raw { + return this.knex.raw('sdo_util.from_wktgeometry(??.??) as ??', [table, column, column]); + } + fromText(text: string): Knex.Raw { + return this.knex.raw('sdo_geometry(?, 4326)', text); + } + _intersects(key: string, geojson: GeoJSONGeometry): Knex.Raw { + const geometry = this.fromGeoJSON(geojson); + return this.knex.raw(`sdo_overlapbdyintersect(??, ?)`, [key, geometry]); + } + _intersects_bbox(key: string, geojson: GeoJSONGeometry): Knex.Raw { + const geometry = this.fromGeoJSON(geojson); + return this.knex.raw(`sdo_overlapbdyintersect(sdo_geom.sdo_mbr(??), sdo_geom.sdo_mbr(?))`, [key, geometry]); + } + collect(table: string, column: string): Knex.Raw { + return this.knex.raw(`concat('geometrycollection(', listagg(?, ', '), ')'`, this.asText(table, column)); + } +} diff --git a/api/src/database/index.ts b/api/src/database/index.ts index 61824a64bd..9e952dfb91 100644 --- a/api/src/database/index.ts +++ b/api/src/database/index.ts @@ -5,6 +5,10 @@ import env from '../env'; import logger from '../logger'; import { getConfigFromEnv } from '../utils/get-config-from-env'; import { validateEnv } from '../utils/validate-env'; +import fse from 'fs-extra'; +import path from 'path'; +import { merge } from 'lodash'; +import { promisify } from 'util'; let database: Knex | null = null; let inspector: ReturnType | null = null; @@ -19,6 +23,7 @@ export default function getDatabase(): Knex { 'DB_SEARCH_PATH', 'DB_CONNECTION_STRING', 'DB_POOL', + 'DB_EXCLUDE_TABLES', ]); const poolConfig = getConfigFromEnv('DB_POOL'); @@ -50,7 +55,15 @@ export default function getDatabase(): Knex { searchPath: env.DB_SEARCH_PATH, connection: env.DB_CONNECTION_STRING || connectionConfig, log: { - warn: (msg) => logger.warn(msg), + warn: (msg) => { + // Ignore warnings about returning not being supported in some DBs + if (msg.startsWith('.returning()')) return; + + // Ignore warning about MySQL not supporting TRX for DDL + if (msg.startsWith('Transaction was implicitly committed, do not mix transactions and DDL with MySQL')) return; + + return logger.warn(msg); + }, error: (msg) => logger.error(msg), deprecate: (msg) => logger.info(msg), debug: (msg) => logger.debug(msg), @@ -60,11 +73,24 @@ export default function getDatabase(): Knex { if (env.DB_CLIENT === 'sqlite3') { knexConfig.useNullAsDefault = true; - poolConfig.afterCreate = (conn: any, cb: any) => { - conn.run('PRAGMA foreign_keys = ON', cb); + + poolConfig.afterCreate = async (conn: any, callback: any) => { + logger.trace('Enabling SQLite Foreign Keys support...'); + + const run = promisify(conn.run.bind(conn)); + await run('PRAGMA foreign_keys = ON'); + + callback(null, conn); }; } + if (env.DB_CLIENT === 'mssql') { + // This brings MS SQL in line with the other DB vendors. We shouldn't do any automatic + // timezone conversion on the database level, especially not when other database vendors don't + // act the same + merge(knexConfig, { connection: { options: { useUTC: false } } }); + } + database = knex(knexConfig); const times: Record = {}; @@ -94,36 +120,131 @@ export function getSchemaInspector(): ReturnType { return inspector; } -export async function hasDatabaseConnection(): Promise { - const database = getDatabase(); +export async function hasDatabaseConnection(database?: Knex): Promise { + database = database ?? getDatabase(); try { - if (env.DB_CLIENT === 'oracledb') { + if (getDatabaseClient(database) === 'oracle') { await database.raw('select 1 from DUAL'); } else { await database.raw('SELECT 1'); } + return true; } catch { return false; } } -export async function validateDBConnection(): Promise { +export async function validateDatabaseConnection(database?: Knex): Promise { + database = database ?? getDatabase(); + try { - await hasDatabaseConnection(); - } catch (error) { + if (getDatabaseClient(database) === 'oracle') { + await database.raw('select 1 from DUAL'); + } else { + await database.raw('SELECT 1'); + } + } catch (error: any) { logger.error(`Can't connect to the database.`); logger.error(error); process.exit(1); } } +export function getDatabaseClient(database?: Knex): 'mysql' | 'postgres' | 'sqlite' | 'oracle' | 'mssql' { + database = database ?? getDatabase(); + + switch (database.client.constructor.name) { + case 'Client_MySQL': + return 'mysql'; + case 'Client_PG': + return 'postgres'; + case 'Client_SQLite3': + return 'sqlite'; + case 'Client_Oracledb': + case 'Client_Oracle': + return 'oracle'; + case 'Client_MSSQL': + return 'mssql'; + } + + throw new Error(`Couldn't extract database client`); +} + export async function isInstalled(): Promise { const inspector = getSchemaInspector(); // The existence of a directus_collections table alone isn't a "proper" check to see if everything // is installed correctly of course, but it's safe enough to assume that this collection only - // exists when using the installer CLI. + // exists when Directus is properly installed. return await inspector.hasTable('directus_collections'); } + +export async function validateMigrations(): Promise { + const database = getDatabase(); + + try { + let migrationFiles = await fse.readdir(path.join(__dirname, 'migrations')); + + const customMigrationsPath = path.resolve(env.EXTENSIONS_PATH, 'migrations'); + + let customMigrationFiles = + ((await fse.pathExists(customMigrationsPath)) && (await fse.readdir(customMigrationsPath))) || []; + + migrationFiles = migrationFiles.filter( + (file: string) => file.startsWith('run') === false && file.endsWith('.d.ts') === false + ); + + customMigrationFiles = customMigrationFiles.filter((file: string) => file.endsWith('.js')); + + migrationFiles.push(...customMigrationFiles); + + const requiredVersions = migrationFiles.map((filePath) => filePath.split('-')[0]); + const completedVersions = (await database.select('version').from('directus_migrations')).map( + ({ version }) => version + ); + + return requiredVersions.every((version) => completedVersions.includes(version)); + } catch (error: any) { + logger.error(`Database migrations cannot be found`); + logger.error(error); + throw process.exit(1); + } +} + +/** + * These database extensions should be optional, so we don't throw or return any problem states when they don't + */ +export async function validateDatabaseExtensions(): Promise { + const database = getDatabase(); + const databaseClient = getDatabaseClient(database); + + if (databaseClient === 'postgres') { + let available = false; + let installed = false; + + const exists = await database.raw(`SELECT name FROM pg_available_extensions WHERE name = 'postgis';`); + + if (exists.rows.length > 0) { + available = true; + } + + if (available) { + try { + await database.raw(`SELECT PostGIS_version();`); + installed = true; + } catch { + installed = false; + } + } + + if (available === false) { + logger.warn(`PostGIS isn't installed. Geometry type support will be limited.`); + } else if (available === true && installed === false) { + logger.warn( + `PostGIS is installed, but hasn't been activated on this database. Geometry type support will be limited.` + ); + } + } +} diff --git a/api/src/database/migrations/20201105B-change-webhook-url-type.ts b/api/src/database/migrations/20201105B-change-webhook-url-type.ts index 3107d4d9dd..b733d5a21b 100644 --- a/api/src/database/migrations/20201105B-change-webhook-url-type.ts +++ b/api/src/database/migrations/20201105B-change-webhook-url-type.ts @@ -1,7 +1,6 @@ import { Knex } from 'knex'; // @ts-ignore import Client_Oracledb from 'knex/lib/dialects/oracledb'; -import env from '../../env'; async function oracleAlterUrl(knex: Knex, type: string): Promise { await knex.raw('ALTER TABLE "directus_webhooks" ADD "url__temp" ?', [knex.raw(type)]); @@ -23,7 +22,7 @@ export async function up(knex: Knex): Promise { } export async function down(knex: Knex): Promise { - if (env.DB_CLIENT === 'oracledb') { + if (knex.client instanceof Client_Oracledb) { await oracleAlterUrl(knex, 'VARCHAR2(255)'); return; } diff --git a/api/src/database/migrations/20210312A-webhooks-collections-text.ts b/api/src/database/migrations/20210312A-webhooks-collections-text.ts index 489e0253f5..4e93c092d1 100644 --- a/api/src/database/migrations/20210312A-webhooks-collections-text.ts +++ b/api/src/database/migrations/20210312A-webhooks-collections-text.ts @@ -1,7 +1,6 @@ import { Knex } from 'knex'; // @ts-ignore import Client_Oracledb from 'knex/lib/dialects/oracledb'; -import env from '../../env'; async function oracleAlterCollections(knex: Knex, type: string): Promise { await knex.raw('ALTER TABLE "directus_webhooks" ADD "collections__temp" ?', [knex.raw(type)]); @@ -23,7 +22,7 @@ export async function up(knex: Knex): Promise { } export async function down(knex: Knex): Promise { - if (env.DB_CLIENT === 'oracledb') { + if (knex.client instanceof Client_Oracledb) { await oracleAlterCollections(knex, 'VARCHAR2(255)'); return; } diff --git a/api/src/database/migrations/20210518A-add-foreign-key-constraints.ts b/api/src/database/migrations/20210518A-add-foreign-key-constraints.ts index d27efecc2e..60e24386c1 100644 --- a/api/src/database/migrations/20210518A-add-foreign-key-constraints.ts +++ b/api/src/database/migrations/20210518A-add-foreign-key-constraints.ts @@ -68,7 +68,7 @@ export async function up(knex: Knex): Promise { await knex(constraint.many_collection) .update({ [constraint.many_field]: null }) .whereIn(currentPrimaryKeyField, ids); - } catch (err) { + } catch (err: any) { logger.error( `${constraint.many_collection}.${constraint.many_field} contains illegal foreign keys which couldn't be set to NULL. Please fix these references and rerun this migration to complete the upgrade.` ); @@ -111,7 +111,7 @@ export async function up(knex: Knex): Promise { builder.onDelete('SET NULL'); } }); - } catch (err) { + } catch (err: any) { logger.warn( `Couldn't add foreign key constraint for ${constraint.many_collection}.${constraint.many_field}<->${constraint.one_collection}` ); @@ -140,7 +140,7 @@ export async function down(knex: Knex): Promise { await knex.schema.alterTable(relation.many_collection, (table) => { table.dropForeign([relation.many_field]); }); - } catch (err) { + } catch (err: any) { logger.warn( `Couldn't drop foreign key constraint for ${relation.many_collection}.${relation.many_field}<->${relation.one_collection}` ); diff --git a/api/src/database/migrations/20210519A-add-system-fk-triggers.ts b/api/src/database/migrations/20210519A-add-system-fk-triggers.ts index 2cf58f6a0c..e682eb023e 100644 --- a/api/src/database/migrations/20210519A-add-system-fk-triggers.ts +++ b/api/src/database/migrations/20210519A-add-system-fk-triggers.ts @@ -99,7 +99,7 @@ export async function up(knex: Knex): Promise { await knex.schema.alterTable(update.table, (table) => { table.dropForeign([constraint.column], existingForeignKey?.constraint_name || undefined); }); - } catch (err) { + } catch (err: any) { logger.warn(`Couldn't drop foreign key ${update.table}.${constraint.column}->${constraint.references}`); logger.warn(err); } @@ -114,7 +114,7 @@ export async function up(knex: Knex): Promise { // Knex uses a default convention for index names: `table_column_type` table.dropIndex([constraint.column], `${update.table}_${constraint.column}_foreign`); }); - } catch (err) { + } catch (err: any) { logger.warn( `Couldn't clean up index for foreign key ${update.table}.${constraint.column}->${constraint.references}` ); @@ -126,7 +126,7 @@ export async function up(knex: Knex): Promise { await knex.schema.alterTable(update.table, (table) => { table.foreign(constraint.column).references(constraint.references).onDelete(constraint.on_delete); }); - } catch (err) { + } catch (err: any) { logger.warn(`Couldn't add foreign key to ${update.table}.${constraint.column}->${constraint.references}`); logger.warn(err); } @@ -141,7 +141,7 @@ export async function down(knex: Knex): Promise { await knex.schema.alterTable(update.table, (table) => { table.dropForeign([constraint.column]); }); - } catch (err) { + } catch (err: any) { logger.warn(`Couldn't drop foreign key ${update.table}.${constraint.column}->${constraint.references}`); logger.warn(err); } @@ -156,7 +156,7 @@ export async function down(knex: Knex): Promise { // Knex uses a default convention for index names: `table_column_type` table.dropIndex([constraint.column], `${update.table}_${constraint.column}_foreign`); }); - } catch (err) { + } catch (err: any) { logger.warn( `Couldn't clean up index for foreign key ${update.table}.${constraint.column}->${constraint.references}` ); @@ -168,7 +168,7 @@ export async function down(knex: Knex): Promise { await knex.schema.alterTable(update.table, (table) => { table.foreign(constraint.column).references(constraint.references); }); - } catch (err) { + } catch (err: any) { logger.warn(`Couldn't add foreign key to ${update.table}.${constraint.column}->${constraint.references}`); logger.warn(err); } diff --git a/api/src/database/migrations/20210626A-change-filesize-bigint.ts b/api/src/database/migrations/20210626A-change-filesize-bigint.ts index 53544c34c5..03b2a8b420 100644 --- a/api/src/database/migrations/20210626A-change-filesize-bigint.ts +++ b/api/src/database/migrations/20210626A-change-filesize-bigint.ts @@ -1,12 +1,22 @@ import { Knex } from 'knex'; +// @ts-ignore +import Client_Oracledb from 'knex/lib/dialects/oracledb'; export async function up(knex: Knex): Promise { + if (knex.client instanceof Client_Oracledb) { + return; + } + await knex.schema.alterTable('directus_files', (table) => { table.bigInteger('filesize').nullable().defaultTo(null).alter(); }); } export async function down(knex: Knex): Promise { + if (knex.client instanceof Client_Oracledb) { + return; + } + await knex.schema.alterTable('directus_files', (table) => { table.integer('filesize').nullable().defaultTo(null).alter(); }); diff --git a/api/src/database/migrations/20210716A-add-conditions-to-fields.ts b/api/src/database/migrations/20210716A-add-conditions-to-fields.ts new file mode 100644 index 0000000000..efead1ddf5 --- /dev/null +++ b/api/src/database/migrations/20210716A-add-conditions-to-fields.ts @@ -0,0 +1,13 @@ +import { Knex } from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.schema.alterTable('directus_fields', (table) => { + table.json('conditions'); + }); +} + +export async function down(knex: Knex): Promise { + await knex.schema.alterTable('directus_files', (table) => { + table.dropColumn('conditions'); + }); +} diff --git a/api/src/database/migrations/20210721A-add-default-folder.ts b/api/src/database/migrations/20210721A-add-default-folder.ts new file mode 100644 index 0000000000..5bf17ef17a --- /dev/null +++ b/api/src/database/migrations/20210721A-add-default-folder.ts @@ -0,0 +1,22 @@ +import { Knex } from 'knex'; +import { getDefaultIndexName } from '../../utils/get-default-index-name'; + +const indexName = getDefaultIndexName('foreign', 'directus_settings', 'storage_default_folder'); + +export async function up(knex: Knex): Promise { + await knex.schema.alterTable('directus_settings', (table) => { + table + .uuid('storage_default_folder') + .references('id') + .inTable('directus_folders') + .withKeyName(indexName) + .onDelete('SET NULL'); + }); +} + +export async function down(knex: Knex): Promise { + await knex.schema.alterTable('directus_files', (table) => { + table.dropForeign(['storage_default_folder'], indexName); + table.dropColumn('storage_default_folder'); + }); +} diff --git a/api/src/database/migrations/20210802A-replace-groups.ts b/api/src/database/migrations/20210802A-replace-groups.ts new file mode 100644 index 0000000000..8cd924752a --- /dev/null +++ b/api/src/database/migrations/20210802A-replace-groups.ts @@ -0,0 +1,49 @@ +import { Knex } from 'knex'; +import logger from '../../logger'; + +export async function up(knex: Knex): Promise { + const dividerGroups = await knex.select('*').from('directus_fields').where('interface', '=', 'group-divider'); + + for (const dividerGroup of dividerGroups) { + const newOptions: { showHeader: true; headerIcon?: string; headerColor?: string } = { showHeader: true }; + + if (dividerGroup.options) { + try { + const options = + typeof dividerGroup.options === 'string' ? JSON.parse(dividerGroup.options) : dividerGroup.options; + + if (options.icon) newOptions.headerIcon = options.icon; + if (options.color) newOptions.headerColor = options.color; + } catch (err: any) { + logger.warn(`Couldn't convert previous options from field ${dividerGroup.collection}.${dividerGroup.field}`); + logger.warn(err); + } + } + + try { + await knex('directus_fields') + .update({ + interface: 'group-standard', + options: JSON.stringify(newOptions), + }) + .where('id', '=', dividerGroup.id); + } catch (err: any) { + logger.warn(`Couldn't update ${dividerGroup.collection}.${dividerGroup.field} to new group interface`); + logger.warn(err); + } + } + + await knex('directus_fields') + .update({ + interface: 'group-standard', + }) + .where({ interface: 'group-raw' }); +} + +export async function down(knex: Knex): Promise { + await knex('directus_fields') + .update({ + interface: 'group-raw', + }) + .where('interface', '=', 'group-standard'); +} diff --git a/api/src/database/migrations/20210803A-add-required-to-fields.ts b/api/src/database/migrations/20210803A-add-required-to-fields.ts new file mode 100644 index 0000000000..352496d07a --- /dev/null +++ b/api/src/database/migrations/20210803A-add-required-to-fields.ts @@ -0,0 +1,13 @@ +import { Knex } from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.schema.alterTable('directus_fields', (table) => { + table.boolean('required').defaultTo(false); + }); +} + +export async function down(knex: Knex): Promise { + await knex.schema.alterTable('directus_fields', (table) => { + table.dropColumn('required'); + }); +} diff --git a/api/src/database/migrations/20210805A-update-groups.ts b/api/src/database/migrations/20210805A-update-groups.ts new file mode 100644 index 0000000000..350268a52f --- /dev/null +++ b/api/src/database/migrations/20210805A-update-groups.ts @@ -0,0 +1,35 @@ +import { Knex } from 'knex'; + +export async function up(knex: Knex): Promise { + const groups = await knex.select('*').from('directus_fields').where({ interface: 'group-standard' }); + + const raw = []; + const detail = []; + + for (const group of groups) { + const options = typeof group.options === 'string' ? JSON.parse(group.options) : group.options || {}; + + if (options.showHeader === true) { + detail.push(group); + } else { + raw.push(group); + } + } + + for (const field of raw) { + await knex('directus_fields').update({ interface: 'group-raw' }).where({ id: field.id }); + } + + for (const field of detail) { + await knex('directus_fields').update({ interface: 'group-detail' }).where({ id: field.id }); + } +} + +export async function down(knex: Knex): Promise { + await knex('directus_fields') + .update({ + interface: 'group-standard', + }) + .where({ interface: 'group-detail' }) + .orWhere({ interface: 'group-raw' }); +} diff --git a/api/src/database/migrations/20210805B-change-image-metadata-structure.ts b/api/src/database/migrations/20210805B-change-image-metadata-structure.ts new file mode 100644 index 0000000000..952b4db42b --- /dev/null +++ b/api/src/database/migrations/20210805B-change-image-metadata-structure.ts @@ -0,0 +1,94 @@ +import { Knex } from 'knex'; + +// Change image metadata structure to match the output from 'exifr' +export async function up(knex: Knex): Promise { + const files = await knex + .select<{ id: number; metadata: string }[]>('id', 'metadata') + .from('directus_files') + .whereNotNull('metadata'); + + for (const { id, metadata } of files) { + let prevMetadata; + + try { + prevMetadata = JSON.parse(metadata); + } catch { + continue; + } + + // Update only required if metadata has 'exif' data + if (prevMetadata.exif) { + // Get all data from 'exif' and rename the following keys: + // - 'image' to 'ifd0' + // - 'thumbnail to 'ifd1' + // - 'interoperability' to 'interop' + const newMetadata = prevMetadata.exif; + + if (newMetadata.image) { + newMetadata.ifd0 = newMetadata.image; + delete newMetadata.image; + } + if (newMetadata.thumbnail) { + newMetadata.ifd1 = newMetadata.thumbnail; + delete newMetadata.thumbnail; + } + if (newMetadata.interoperability) { + newMetadata.interop = newMetadata.interoperability; + delete newMetadata.interoperability; + } + if (prevMetadata.icc) { + newMetadata.icc = prevMetadata.icc; + } + if (prevMetadata.iptc) { + newMetadata.iptc = prevMetadata.iptc; + } + + await knex('directus_files') + .update({ metadata: JSON.stringify(newMetadata) }) + .where({ id }); + } + } +} + +export async function down(knex: Knex): Promise { + const files = await knex + .select<{ id: number; metadata: string }[]>('id', 'metadata') + .from('directus_files') + .whereNotNull('metadata') + .whereNot('metadata', '{}'); + + for (const { id, metadata } of files) { + const prevMetadata = JSON.parse(metadata); + + // Update only required if metadata has keys other than 'icc' and 'iptc' + if (Object.keys(prevMetadata).filter((key) => key !== 'icc' && key !== 'iptc').length > 0) { + // Put all data under 'exif' and rename/move keys afterwards + const newMetadata: { exif: Record; icc?: unknown; iptc?: unknown } = { exif: prevMetadata }; + + if (newMetadata.exif.ifd0) { + newMetadata.exif.image = newMetadata.exif.ifd0; + delete newMetadata.exif.ifd0; + } + if (newMetadata.exif.ifd1) { + newMetadata.exif.thumbnail = newMetadata.exif.ifd1; + delete newMetadata.exif.ifd1; + } + if (newMetadata.exif.interop) { + newMetadata.exif.interoperability = newMetadata.exif.interop; + delete newMetadata.exif.interop; + } + if (newMetadata.exif.icc) { + newMetadata.icc = newMetadata.exif.icc; + delete newMetadata.exif.icc; + } + if (newMetadata.exif.iptc) { + newMetadata.iptc = newMetadata.exif.iptc; + delete newMetadata.exif.iptc; + } + + await knex('directus_files') + .update({ metadata: JSON.stringify(newMetadata) }) + .where({ id }); + } + } +} diff --git a/api/src/database/migrations/20210811A-add-geometry-config.ts b/api/src/database/migrations/20210811A-add-geometry-config.ts new file mode 100644 index 0000000000..dc86a4cb8a --- /dev/null +++ b/api/src/database/migrations/20210811A-add-geometry-config.ts @@ -0,0 +1,15 @@ +import { Knex } from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.schema.alterTable('directus_settings', (table) => { + table.json('basemaps'); + table.string('mapbox_key'); + }); +} + +export async function down(knex: Knex): Promise { + await knex.schema.alterTable('directus_settings', (table) => { + table.dropColumn('basemaps'); + table.dropColumn('mapbox_key'); + }); +} diff --git a/api/src/database/migrations/20210831A-remove-limit-column.ts b/api/src/database/migrations/20210831A-remove-limit-column.ts new file mode 100644 index 0000000000..3a9c1c84db --- /dev/null +++ b/api/src/database/migrations/20210831A-remove-limit-column.ts @@ -0,0 +1,13 @@ +import { Knex } from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.schema.alterTable('directus_permissions', (table) => { + table.dropColumn('limit'); + }); +} + +export async function down(knex: Knex): Promise { + await knex.schema.alterTable('directus_permissions', (table) => { + table.integer('limit').unsigned(); + }); +} diff --git a/api/src/database/migrations/20210907A-webhooks-collections-not-null.ts b/api/src/database/migrations/20210907A-webhooks-collections-not-null.ts new file mode 100644 index 0000000000..208b07e752 --- /dev/null +++ b/api/src/database/migrations/20210907A-webhooks-collections-not-null.ts @@ -0,0 +1,13 @@ +import { Knex } from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.schema.alterTable('directus_webhooks', (table) => { + table.text('collections').notNullable().alter(); + }); +} + +export async function down(knex: Knex): Promise { + await knex.schema.alterTable('directus_webhooks', (table) => { + table.text('collections').alter(); + }); +} diff --git a/api/src/database/migrations/run.ts b/api/src/database/migrations/run.ts index 5b3566fd1d..f7c9eb77b0 100644 --- a/api/src/database/migrations/run.ts +++ b/api/src/database/migrations/run.ts @@ -1,16 +1,10 @@ -/* eslint-disable no-console */ - import formatTitle from '@directus/format-title'; import fse from 'fs-extra'; import { Knex } from 'knex'; import path from 'path'; import env from '../../env'; - -type Migration = { - version: string; - name: string; - timestamp: Date; -}; +import logger from '../../logger'; +import { Migration } from '../../types'; export default async function run(database: Knex, direction: 'up' | 'down' | 'latest'): Promise { let migrationFiles = await fse.readdir(__dirname); @@ -67,7 +61,7 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la const { up } = require(nextVersion.file); - console.log(`✨ Applying ${nextVersion.name}...`); + logger.info(`Applying ${nextVersion.name}...`); await up(database); await database.insert({ version: nextVersion.version, name: nextVersion.name }).into('directus_migrations'); @@ -88,7 +82,7 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la const { down } = require(migration.file); - console.log(`✨ Undoing ${migration.name}...`); + logger.info(`Undoing ${migration.name}...`); await down(database); await database('directus_migrations').delete().where({ version: migration.version }); @@ -99,7 +93,7 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la if (migration.completed === false) { const { up } = require(migration.file); - console.log(`✨ Applying ${migration.name}...`); + logger.info(`Applying ${migration.name}...`); await up(database); await database.insert({ version: migration.version, name: migration.name }).into('directus_migrations'); diff --git a/api/src/database/run-ast.ts b/api/src/database/run-ast.ts index 783da3e613..76cd2ec17b 100644 --- a/api/src/database/run-ast.ts +++ b/api/src/database/run-ast.ts @@ -2,13 +2,15 @@ import { Knex } from 'knex'; import { clone, cloneDeep, pick, uniq } from 'lodash'; import { PayloadService } from '../services/payload'; import { Item, Query, SchemaOverview } from '../types'; -import { AST, FieldNode, NestedCollectionNode } from '../types/ast'; +import { AST, FieldNode, NestedCollectionNode, M2ONode } from '../types/ast'; import { applyFunctionToColumnName } from '../utils/apply-function-to-column-name'; import applyQuery from '../utils/apply-query'; import { getColumn } from '../utils/get-column'; import { stripFunction } from '../utils/strip-function'; -import { toArray } from '../utils/to-array'; +import { toArray } from '@directus/shared/utils'; import getDatabase from './index'; +import { isNativeGeometry } from '../utils/geometry'; +import { getGeometryHelper } from '../database/helpers/geometry'; type RunASTOptions = { /** @@ -58,7 +60,7 @@ export default async function runAST( async function run(collection: string, children: (NestedCollectionNode | FieldNode)[], query: Query) { // Retrieve the database columns to select in the current AST - const { columnsToSelect, primaryKeyField, nestedCollectionNodes } = await parseCurrentLevel( + const { fieldNodes, primaryKeyField, nestedCollectionNodes } = await parseCurrentLevel( schema, collection, children, @@ -66,7 +68,7 @@ export default async function runAST( ); // The actual knex query builder instance. This is a promise that resolves with the raw items from the db - const dbQuery = await getDBQuery(schema, knex, collection, columnsToSelect, query, options?.nested); + const dbQuery = await getDBQuery(schema, knex, collection, fieldNodes, query, options?.nested); const rawItems: Item | Item[] = await dbQuery; @@ -117,8 +119,17 @@ async function parseCurrentLevel( for (const child of children) { if (child.type === 'field') { const fieldKey = stripFunction(child.name); + if (columnsInCollection.includes(fieldKey) || fieldKey === '*') { columnsToSelectInternal.push(child.name); // maintain original name here (includes functions) + + if (query.alias) { + columnsToSelectInternal.push( + ...Object.entries(query.alias) + .filter(([_key, value]) => value === child.name) + .map(([key]) => key) + ); + } } continue; @@ -127,7 +138,7 @@ async function parseCurrentLevel( if (!child.relation) continue; if (child.type === 'm2o') { - columnsToSelectInternal.push(child.relation.field); + columnsToSelectInternal.push(child.fieldKey); } if (child.type === 'm2a') { @@ -138,31 +149,62 @@ async function parseCurrentLevel( nestedCollectionNodes.push(child); } - /** - * Always fetch primary key in case there's a nested relation that needs it, but only if there's - * no aggregation / grouping going on + const isAggregate = (query.aggregate && Object.keys(query.aggregate).length > 0) ?? false; + + /** Always fetch primary key in case there's a nested relation that needs it. Aggregate payloads + * can't have nested relational fields */ - const hasAggregationOrGrouping = 'aggregate' in query || 'group' in query; - if (columnsToSelectInternal.includes(primaryKeyField) === false && hasAggregationOrGrouping === false) { + if (isAggregate === false && columnsToSelectInternal.includes(primaryKeyField) === false) { columnsToSelectInternal.push(primaryKeyField); } /** Make sure select list has unique values */ const columnsToSelect = [...new Set(columnsToSelectInternal)]; - return { columnsToSelect, nestedCollectionNodes, primaryKeyField }; + const fieldNodes = columnsToSelect.map( + (column: string) => + children.find((childNode) => childNode.fieldKey === column) ?? { type: 'field', name: column, fieldKey: column } + ) as FieldNode[]; + + return { fieldNodes, nestedCollectionNodes, primaryKeyField }; +} + +function getColumnPreprocessor(knex: Knex, schema: SchemaOverview, table: string) { + const helper = getGeometryHelper(); + + return function (fieldNode: FieldNode | M2ONode): Knex.Raw { + let field; + + if (fieldNode.type === 'field') { + field = schema.collections[table].fields[stripFunction(fieldNode.name)]; + } else { + field = schema.collections[fieldNode.relation.collection].fields[fieldNode.relation.field]; + } + + let alias = undefined; + + if (fieldNode.name !== fieldNode.fieldKey) { + alias = fieldNode.fieldKey; + } + + if (isNativeGeometry(field)) { + return helper.asText(table, field.field); + } + + return getColumn(knex, table, fieldNode.name, alias); + }; } function getDBQuery( schema: SchemaOverview, knex: Knex, table: string, - columns: string[], + fieldNodes: FieldNode[], query: Query, nested?: boolean ): Knex.QueryBuilder { - const dbQuery = knex.select(columns.map((column) => getColumn(knex, table, column))).from(table); - + const preProcess = getColumnPreprocessor(knex, schema, table); + const dbQuery = knex.select(fieldNodes.map(preProcess)).from(table); const queryCopy = clone(query); queryCopy.limit = typeof queryCopy.limit === 'number' ? queryCopy.limit : 100; @@ -205,11 +247,19 @@ function applyParentFilters( }); if (relatedM2OisFetched === false) { - nestedNode.children.push({ type: 'field', name: nestedNode.relation.field }); + nestedNode.children.push({ + type: 'field', + name: nestedNode.relation.field, + fieldKey: nestedNode.relation.field, + }); } if (nestedNode.relation.meta?.sort_field) { - nestedNode.children.push({ type: 'field', name: nestedNode.relation.meta.sort_field }); + nestedNode.children.push({ + type: 'field', + name: nestedNode.relation.meta.sort_field, + fieldKey: nestedNode.relation.meta.sort_field, + }); } nestedNode.query = { @@ -387,10 +437,9 @@ function removeTemporaryFields( const nestedCollectionNodes: NestedCollectionNode[] = []; for (const child of ast.children) { - if (child.type === 'field') { - fields.push(child.name); - } else { - fields.push(child.fieldKey); + fields.push(child.fieldKey); + + if (child.type !== 'field') { nestedCollectionNodes.push(child); } } @@ -402,7 +451,7 @@ function removeTemporaryFields( if (operation === 'count' && aggregateFields.includes('*')) fields.push('count'); - fields.push(...aggregateFields.map((field) => `${field}_${operation}`)); + fields.push(...aggregateFields.map((field) => `${operation}.${field}`)); } } diff --git a/api/src/database/seeds/run.ts b/api/src/database/seeds/run.ts index 2ad2eb0916..6a14b83ff9 100644 --- a/api/src/database/seeds/run.ts +++ b/api/src/database/seeds/run.ts @@ -3,13 +3,14 @@ import yaml from 'js-yaml'; import { Knex } from 'knex'; import { isObject } from 'lodash'; import path from 'path'; -import { types } from '../../types'; +import { Type, Field } from '@directus/shared/types'; +import { getGeometryHelper } from '../helpers/geometry'; type TableSeed = { table: string; columns: { [column: string]: { - type?: typeof types[number]; + type?: Type; primary?: boolean; nullable?: boolean; default?: any; @@ -45,6 +46,8 @@ export default async function runSeed(database: Knex): Promise { for (const [columnName, columnInfo] of Object.entries(seedData.columns)) { let column: Knex.ColumnBuilder; + if (columnInfo.type === 'alias' || columnInfo.type === 'unknown') return; + if (columnInfo.type === 'string') { column = tableBuilder.string(columnName, columnInfo.length); } else if (columnInfo.increments) { @@ -53,6 +56,9 @@ export default async function runSeed(database: Knex): Promise { column = tableBuilder.string(columnName); } else if (columnInfo.type === 'hash') { column = tableBuilder.string(columnName, 255); + } else if (columnInfo.type === 'geometry') { + const helper = getGeometryHelper(); + column = helper.createColumn(tableBuilder, { field: columnName } as Field); } else { column = tableBuilder[columnInfo.type!](columnName); } diff --git a/api/src/database/system-data/app-access-permissions/index.ts b/api/src/database/system-data/app-access-permissions/index.ts index a41dc5d30a..498aa0ea3d 100644 --- a/api/src/database/system-data/app-access-permissions/index.ts +++ b/api/src/database/system-data/app-access-permissions/index.ts @@ -8,7 +8,6 @@ const defaults: Partial = { validation: null, presets: null, fields: ['*'], - limit: null, system: true, }; diff --git a/api/src/database/system-data/collections/collections.yaml b/api/src/database/system-data/collections/collections.yaml index 5086cda422..6ea3ddae42 100644 --- a/api/src/database/system-data/collections/collections.yaml +++ b/api/src/database/system-data/collections/collections.yaml @@ -8,51 +8,52 @@ defaults: note: null translations: null display_template: null + accountability: 'all' data: - collection: directus_activity - note: Accountability logs for all events + note: $t:directus_collection.directus_activity - collection: directus_collections icon: list_alt - note: Additional collection configuration and metadata + note: $t:directus_collection.directus_collections - collection: directus_fields icon: input - note: Additional field configuration and metadata + note: $t:directus_collection.directus_fields - collection: directus_files icon: folder - note: Metadata for all managed file assets + note: $t:directus_collection.directus_files display_template: '{{ $thumbnail }} {{ title }}' - collection: directus_folders - note: Provides virtual directories for files + note: $t:directus_collection.directus_folders display_template: '{{ name }}' - collection: directus_migrations - note: What version of the database you're using + note: $t:directus_collection.directus_migrations - collection: directus_permissions icon: admin_panel_settings - note: Access permissions for each role + note: $t:directus_collection.directus_permissions - collection: directus_presets icon: bookmark_border - note: Presets for collection defaults and bookmarks + note: $t:directus_collection.directus_presets accountability: null - collection: directus_relations icon: merge_type - note: Relationship configuration and metadata + note: $t:directus_collection.directus_relations - collection: directus_revisions - note: Data snapshots for all activity + note: $t:directus_collection.directus_revisions - collection: directus_roles icon: supervised_user_circle - note: Permission groups for system users + note: $t:directus_collection.directus_roles - collection: directus_sessions - note: User session information + note: $t:directus_collection.directus_sessions - collection: directus_settings singleton: true - note: Project configuration options + note: $t:directus_collection.directus_settings - collection: directus_users archive_field: status archive_value: archived unarchive_value: draft icon: people_alt - note: System users for the platform + note: $t:directus_collection.directus_users display_template: '{{ first_name }} {{ last_name }}' - collection: directus_webhooks - note: Configuration for event-based HTTP requests + note: $t:directus_collection.directus_webhooks diff --git a/api/src/database/system-data/fields/activity.yaml b/api/src/database/system-data/fields/activity.yaml index 3e759a6ac5..482122e7b8 100644 --- a/api/src/database/system-data/fields/activity.yaml +++ b/api/src/database/system-data/fields/activity.yaml @@ -13,19 +13,19 @@ fields: defaultForeground: 'var(--foreground-normal)' defaultBackground: 'var(--background-normal-alt)' choices: - - text: Create + - text: $t:field_options.directus_activity.create value: create foreground: 'var(--primary)' background: 'var(--primary-25)' - - text: Update + - text: $t:field_options.directus_activity.update value: update foreground: 'var(--blue)' background: 'var(--blue-25)' - - text: Delete + - text: $t:field_options.directus_activity.delete value: delete foreground: 'var(--danger)' background: 'var(--danger-25)' - - text: Login + - text: $t:field_options.directus_activity.login value: authenticate foreground: 'var(--purple)' background: 'var(--purple-25)' diff --git a/api/src/database/system-data/fields/collections.yaml b/api/src/database/system-data/fields/collections.yaml index 4fbf09a4c8..efdd176dc0 100644 --- a/api/src/database/system-data/fields/collections.yaml +++ b/api/src/database/system-data/fields/collections.yaml @@ -8,7 +8,7 @@ fields: interface: presentation-divider options: icon: box - title: Collection Setup + title: $t:field_options.directus_collections.collection_setup width: full - field: collection @@ -32,7 +32,7 @@ fields: - field: color interface: select-color options: - placeholder: Choose a color... + placeholder: $t:field_options.directus_collections.note_placeholder width: half - field: display_template @@ -45,7 +45,7 @@ fields: special: boolean interface: boolean options: - label: Hide within the App + label: $t:field_options.directus_collections.hidden_label width: half - field: singleton @@ -102,7 +102,7 @@ fields: interface: presentation-divider options: icon: archive - title: Archive + title: $t:field_options.directus_collections.archive_divider width: full - field: archive_field @@ -110,14 +110,14 @@ fields: options: collectionField: collection allowNone: true - placeholder: Choose a field... + placeholder: $t:field_options.directus_collections.archive_field width: half - field: archive_app_filter interface: boolean special: boolean options: - label: Enable App Archive Filter + label: $t:field_options.directus_collections.archive_app_filter width: half - field: archive_value @@ -125,7 +125,7 @@ fields: options: font: monospace iconRight: archive - placeholder: Value set when archiving... + placeholder: $t:field_options.directus_collections.archive_value width: half - field: unarchive_value @@ -133,7 +133,7 @@ fields: options: font: monospace iconRight: unarchive - placeholder: Value set when unarchiving... + placeholder: $t:field_options.directus_collections.unarchive_value width: half - field: sort_divider @@ -143,14 +143,14 @@ fields: interface: presentation-divider options: icon: sort - title: Sort + title: $t:field_options.directus_collections.divider width: full - field: sort_field interface: system-field options: collectionField: collection - placeholder: Choose a field... + placeholder: $t:field_options.directus_collections.sort_field typeAllowList: - float - decimal @@ -165,7 +165,7 @@ fields: interface: presentation-divider options: icon: admin_panel_settings - title: Accountability + title: $t:field_options.directus_collections.accountability_divider width: full - field: accountability diff --git a/api/src/database/system-data/fields/fields.yaml b/api/src/database/system-data/fields/fields.yaml index 5dfc5fcb25..1c3e3a4c23 100644 --- a/api/src/database/system-data/fields/fields.yaml +++ b/api/src/database/system-data/fields/fields.yaml @@ -52,6 +52,12 @@ fields: special: boolean width: half + - collection: directus_fields + field: required + hidden: true + special: boolean + width: half + - collection: directus_fields field: sort width: half @@ -73,3 +79,8 @@ fields: - collection: directus_fields field: note width: half + + - collection: directus_fields + field: conditions + hidden: true + special: json diff --git a/api/src/database/system-data/fields/files.yaml b/api/src/database/system-data/fields/files.yaml index b0733daaea..8eeb56281c 100644 --- a/api/src/database/system-data/fields/files.yaml +++ b/api/src/database/system-data/fields/files.yaml @@ -10,14 +10,14 @@ fields: interface: input options: iconRight: title - placeholder: A unique title... + placeholder: $t:field_options.directus_files.title width: full - field: description interface: input-multiline width: full options: - placeholder: An optional description... + placeholder: $t:field_options.directus_files.description - field: tags interface: tags @@ -35,7 +35,7 @@ fields: interface: input options: iconRight: place - placeholder: An optional location... + placeholder: $t:field_options.directus_files.location width: half - field: storage @@ -49,7 +49,7 @@ fields: interface: presentation-divider options: icon: insert_drive_file - title: File Naming + title: $t:field_options.directus_files.storage_divider special: - alias - no-data @@ -59,7 +59,7 @@ fields: interface: input options: iconRight: publish - placeholder: Name on disk storage... + placeholder: $t:field_options.directus_files.filename_disk readonly: true width: half @@ -67,7 +67,7 @@ fields: interface: input options: iconRight: get_app - placeholder: Name when downloading... + placeholder: $t:field_options.directus_files.filename_download width: half - field: metadata @@ -106,6 +106,7 @@ fields: display: user width: half hidden: true + special: user-created - field: uploaded_on display: datetime diff --git a/api/src/database/system-data/fields/index.ts b/api/src/database/system-data/fields/index.ts index b085e359e2..0f27da3436 100644 --- a/api/src/database/system-data/fields/index.ts +++ b/api/src/database/system-data/fields/index.ts @@ -1,7 +1,7 @@ import fse from 'fs-extra'; import { merge } from 'lodash'; import path from 'path'; -import { FieldMeta } from '../../../types'; +import { FieldMeta } from '@directus/shared/types'; import { requireYAML } from '../../../utils/require-yaml'; const defaults = requireYAML(require.resolve('./_defaults.yaml')); diff --git a/api/src/database/system-data/fields/permissions.yaml b/api/src/database/system-data/fields/permissions.yaml index 2b564453e4..d9842bb2f7 100644 --- a/api/src/database/system-data/fields/permissions.yaml +++ b/api/src/database/system-data/fields/permissions.yaml @@ -15,9 +15,6 @@ fields: - field: role width: half - - field: limit - width: half - - field: collection width: half diff --git a/api/src/database/system-data/fields/roles.yaml b/api/src/database/system-data/fields/roles.yaml index 664204a226..85880b0956 100644 --- a/api/src/database/system-data/fields/roles.yaml +++ b/api/src/database/system-data/fields/roles.yaml @@ -9,7 +9,7 @@ fields: - field: name interface: input options: - placeholder: The unique name for this role... + placeholder: $t:field_options.directus_roles.name width: half - field: icon @@ -20,7 +20,7 @@ fields: - field: description interface: input options: - placeholder: A description of this role... + placeholder: $t:field_options.directus_roles.description width: full - field: app_access @@ -36,7 +36,7 @@ fields: - field: ip_access interface: tags options: - placeholder: Add allowed IP addresses, leave empty to allow all... + placeholder: $t:field_options.directus_roles.ip_access special: csv width: full @@ -60,13 +60,13 @@ fields: template: '{{ name }}' addLabel: Add New Module... fields: - - name: Icon + - name: $t:field_options.directus_roles.fields.icon_name field: icon type: string meta: interface: select-icon width: half - - name: Name + - name: $t:field_options.directus_roles.fields.name_name field: name type: string meta: @@ -74,8 +74,8 @@ fields: width: half options: iconRight: title - placeholder: Enter a title... - - name: Link + placeholder: + - name: $t:field_options.directus_roles.fields.link_name field: link type: string meta: @@ -83,7 +83,7 @@ fields: width: full options: iconRight: link - placeholder: Relative or absolute URL... + placeholder: $t:field_options.directus_roles.fields.link_placeholder special: json width: full @@ -91,9 +91,9 @@ fields: interface: list options: template: '{{ group_name }}' - addLabel: Add New Group... + addLabel: $t:field_options.directus_roles.collection_list.group_name_addLabel fields: - - name: Group Name + - name: $t:field_options.directus_roles.collection_list.fields.group_name field: group_name type: string meta: @@ -101,10 +101,10 @@ fields: interface: input options: iconRight: title - placeholder: Label this group... + placeholder: $t:field_options.directus_roles.collection_list.fields.group_placeholder schema: is_nullable: false - - name: Type + - name: $t:field_options.directus_roles.collection_list.fields.type_name field: accordion type: string schema: @@ -115,21 +115,21 @@ fields: options: choices: - value: always_open - text: Always Open + text: $t:field_options.directus_roles.collection_list.fields.choices_always - value: start_open - text: Start Open + text: $t:field_options.directus_roles.collection_list.fields.choices_start_open - value: start_collapsed - text: Start Collapsed - - name: Collections + text: $t:field_options.directus_roles.collection_list.fields.choices_start_collapsed + - name: $t:field_options.directus_roles.collections_name field: collections type: JSON meta: interface: list options: - addLabel: Add New Collection... + addLabel: $t:field_options.directus_roles.collections_addLabel template: '{{ collection }}' fields: - - name: Collection + - name: $t:field_options.directus_roles.collections_name field: collection type: string meta: diff --git a/api/src/database/system-data/fields/settings.yaml b/api/src/database/system-data/fields/settings.yaml index 12f5f99944..442cf0df05 100644 --- a/api/src/database/system-data/fields/settings.yaml +++ b/api/src/database/system-data/fields/settings.yaml @@ -8,7 +8,7 @@ fields: interface: input options: iconRight: title - placeholder: My project... + placeholder: $t:field_options.directus_settings.project_name_placeholder translations: language: en-US translations: Name @@ -26,7 +26,7 @@ fields: - field: project_color interface: select-color - note: Login & Logo Background + note: $t:field_options.directus_settings.project_logo_note translations: language: en-US translations: Brand Color @@ -44,7 +44,7 @@ fields: interface: presentation-divider options: icon: public - title: Public Pages + title: $t:fields.directus_settings.public_pages special: - alias - no-data @@ -67,14 +67,14 @@ fields: - field: public_note interface: input-multiline options: - placeholder: A short, public message that supports markdown formatting... + placeholder: $t:field_options.directus_settings.public_note_placeholder width: full - field: security_divider interface: presentation-divider options: icon: security - title: Security + title: $t:security special: - alias - no-data @@ -85,11 +85,11 @@ fields: options: choices: - value: null - text: None – Not Recommended + text: $t:field_options.directus_settings.auth_password_policy.none_text - value: '/^.{8,}$/' - text: Weak – Minimum 8 Characters + text: $t:field_options.directus_settings.auth_password_policy.weak_text - value: "/(?=^.{8,}$)(?=.*\\d)(?=.*[a-z])(?=.*[A-Z])(?=.*[!@#$%^&*()_+}{';'?>.<,])(?!.*\\s).*$/" - text: Strong – Upper / Lowercase / Numbers / Special + text: $t:field_options.directus_settings.auth_password_policy.strong_text allowOther: true width: half @@ -104,7 +104,7 @@ fields: interface: presentation-divider options: icon: storage - title: Files & Thumbnails + title: $t:fields.directus_settings.files_and_thumbnails special: - alias - no-data @@ -115,7 +115,7 @@ fields: options: fields: - field: key - name: Key + name: $t:key type: string schema: is_nullable: false @@ -124,7 +124,7 @@ fields: options: slug: true onlyOnCreate: false - width: half + width: full - field: fit name: Fit type: string @@ -135,16 +135,16 @@ fields: options: choices: - value: contain - text: Contain (preserve aspect ratio) + text: $t:field_options.directus_settings.storage_asset_presets.fit.contain_text - value: cover - text: Cover (forces exact size) + text: $t:field_options.directus_settings.storage_asset_presets.fit.cover_text - value: inside - text: Fit inside + text: $t:field_options.directus_settings.storage_asset_presets.fit.fit_text - value: outside - text: Fit outside + text: $t:field_options.directus_settings.storage_asset_presets.fit.outside_text width: half - field: width - name: Width + name: $t:width type: integer schema: is_nullable: false @@ -152,7 +152,7 @@ fields: interface: input width: half - field: height - name: Height + name: $t:height type: integer schema: is_nullable: false @@ -161,7 +161,7 @@ fields: width: half - field: quality type: integer - name: Quality + name: $t:quality schema: default_value: 80 is_nullable: false @@ -173,6 +173,7 @@ fields: step: 1 width: half - field: withoutEnlargement + name: Upscaling type: boolean schema: default_value: false @@ -180,7 +181,51 @@ fields: interface: boolean width: half options: - label: Don't upscale images + label: $t:no_upscale + - field: format + name: Format + type: string + schema: + is_nullable: false + default_value: '' + meta: + interface: select-dropdown + options: + allowNone: true + choices: + - value: jpeg + text: JPEG + - value: png + text: PNG + - value: webp + text: WebP + - value: tiff + text: Tiff + width: half + - field: transforms + name: $t:field_options.directus_settings.additional_transforms + type: json + schema: + is_nullable: false + default_value: [] + meta: + note: $t:field_options.directus_settings.transforms_note + + interface: json + options: + template: > + [ + ["blur", 45], + ["grayscale"], + ["extend", { "right": 500, "background": "rgb(255, 0, 0)" }] + ] + placeholder: > + [ + ["blur", 45], + ["grayscale"], + ["extend", { "right": 500, "background": "rgb(255, 0, 0)" }] + ] + width: full template: '{{key}}' special: json width: full @@ -190,18 +235,23 @@ fields: options: choices: - value: all - text: All + text: $t:all - value: none - text: None + text: $t:none - value: presets - text: Presets Only + text: $t:presets_only width: half + - field: storage_default_folder + interface: system-folder + width: half + note: $t:interfaces.system-folder.field_hint + - field: overrides_divider interface: presentation-divider options: icon: brush - title: App Overrides + title: $t:fields.directus_settings.overrides special: - alias - no-data @@ -213,3 +263,72 @@ fields: language: css lineNumber: true width: full + + - field: map_divider + interface: presentation-divider + options: + icon: map + title: $t:maps + special: + - alias + - no-data + width: full + + - field: mapbox_key + interface: input + options: + icon: key + title: $t:field_options.directus_settings.mapbox_key + placeholder: $t:field_options.directus_settings.mapbox_placeholder + iconLeft: vpn_key + font: monospace + width: half + + - field: basemaps + interface: list + special: json + options: + template: '{{name}}' + fields: + - field: name + name: $t:name + schema: + is_nullable: false + meta: + interface: text-input + options: + placeholder: Enter the basemap name... + - field: type + name: $t:type + meta: + interface: select-dropdown + options: + choices: + - value: raster + text: $t:field_options.directus_settings.basemaps_raster + - value: tile + text: $t:field_options.directus_settings.basemaps_tile + - value: style + text: $t:field_options.directus_settings.basemaps_style + - field: url + name: $t:url + schema: + is_nullable: false + meta: + interface: text-input + options: + placeholder: http://{a-c}.tile.openstreetmap.org/{z}/{x}/{y}.png + - field: tileSize + name: $t:tile_size + schema: + is_nullable: true + meta: + interface: input + options: + placeholder: '512' + conditions: + - name: typeNeqRaster + rule: + type: + _neq: 'raster' + hidden: true diff --git a/api/src/database/system-data/fields/users.yaml b/api/src/database/system-data/fields/users.yaml index 9c3760a58e..16b7d17cdd 100644 --- a/api/src/database/system-data/fields/users.yaml +++ b/api/src/database/system-data/fields/users.yaml @@ -64,7 +64,7 @@ fields: interface: presentation-divider options: icon: face - title: User Preferences + title: $t:fields.directus_users.user_preferences special: - alias - no-data @@ -79,11 +79,11 @@ fields: options: choices: - value: auto - text: Automatic (Based on System) + text: $t:fields.directus_users.theme_auto - value: light - text: Light Mode + text: $t:fields.directus_users.theme_light - value: dark - text: Dark Mode + text: $t:fields.directus_users.theme_dark width: half - field: tfa_secret @@ -95,7 +95,7 @@ fields: interface: presentation-divider options: icon: verified_user - title: Admin Options + title: $t:fields.directus_users.admin_options color: '#E35169' special: - alias @@ -106,15 +106,15 @@ fields: interface: select-dropdown options: choices: - - text: Draft + - text: $t:fields.directus_users.status_draft value: draft - - text: Invited + - text: $t:fields.directus_users.status_invited value: invited - - text: Active + - text: $t:fields.directus_users.status_active value: active - - text: Suspended + - text: $t:fields.directus_users.status_suspended value: suspended - - text: Archived + - text: $t:fields.directus_users.status_archived value: archived width: half @@ -132,7 +132,7 @@ fields: interface: token options: iconRight: vpn_key - placeholder: Enter a secure access token... + placeholder: $t:fields.directus_users.token_placeholder width: full - field: id diff --git a/api/src/database/system-data/fields/webhooks.yaml b/api/src/database/system-data/fields/webhooks.yaml index 82ae7c3f27..17e7f4518b 100644 --- a/api/src/database/system-data/fields/webhooks.yaml +++ b/api/src/database/system-data/fields/webhooks.yaml @@ -38,26 +38,26 @@ fields: defaultBackground: 'var(--background-normal-alt)' showAsDot: true choices: - - text: Active + - text: $t:active value: active foreground: 'var(--primary-10)' background: 'var(--primary)' - - text: Inactive + - text: $t:inactive value: inactive foreground: 'var(--foreground-normal)' background: 'var(--background-normal-alt)' options: choices: - - text: Active + - text: $t:active value: active - - text: Inactive + - text: $t:inactive value: inactive width: half - field: data interface: boolean options: - label: Send Event Data + label: $t:fields.directus_webhooks.data_label special: boolean width: half display: boolean @@ -66,7 +66,7 @@ fields: interface: presentation-divider options: icon: api - title: Triggers + title: $t:fields.directus_webhooks.triggers special: - alias - no-data @@ -76,11 +76,11 @@ fields: interface: select-multiple-checkbox options: choices: - - text: Create + - text: $t:create value: create - - text: Update + - text: $t:update value: update - - text: Delete + - text: $t:delete_label value: delete special: csv width: full @@ -89,19 +89,19 @@ fields: defaultForeground: 'var(--foreground-normal)' defaultBackground: 'var(--background-normal-alt)' choices: - - text: Create + - text: $t:create value: create foreground: 'var(--primary)' background: 'var(--primary-25)' - - text: Update + - text: $t:update value: update foreground: 'var(--blue)' background: 'var(--blue-25)' - - text: Delete + - text: $t:delete_label value: delete foreground: 'var(--danger)' background: 'var(--danger-25)' - - text: Login + - text: $t:login value: authenticate foreground: 'var(--purple)' background: 'var(--purple-25)' diff --git a/api/src/emitter.ts b/api/src/emitter.ts index 8329626ffc..79397dee46 100644 --- a/api/src/emitter.ts +++ b/api/src/emitter.ts @@ -18,7 +18,7 @@ const emitter = new EventEmitter2({ export async function emitAsyncSafe(name: string, ...args: any[]): Promise { try { return await emitter.emitAsync(name, ...args); - } catch (err) { + } catch (err: any) { logger.warn(`An error was thrown while executing hook "${name}"`); logger.warn(err); } diff --git a/api/src/env.ts b/api/src/env.ts index 9b50dc1901..c6822fe823 100644 --- a/api/src/env.ts +++ b/api/src/env.ts @@ -8,7 +8,7 @@ import fs from 'fs'; import { clone, toNumber, toString } from 'lodash'; import path from 'path'; import { requireYAML } from './utils/require-yaml'; -import { toArray } from './utils/to-array'; +import { toArray } from '@directus/shared/utils'; const acceptedEnvTypes = ['string', 'number', 'regex', 'array']; @@ -16,9 +16,11 @@ const defaults: Record = { CONFIG_PATH: path.resolve(process.cwd(), '.env'), PORT: 8055, - PUBLIC_URL: 'http://localhost:8055', + PUBLIC_URL: '/', MAX_PAYLOAD_SIZE: '100kb', + DB_EXCLUDE_TABLES: 'spatial_ref_sys', + STORAGE_LOCATIONS: 'local', STORAGE_LOCAL_DRIVER: 'local', STORAGE_LOCAL_ROOT: './uploads', @@ -34,6 +36,7 @@ const defaults: Record = { REFRESH_TOKEN_TTL: '7d', REFRESH_TOKEN_COOKIE_SECURE: false, REFRESH_TOKEN_COOKIE_SAME_SITE: 'lax', + REFRESH_TOKEN_COOKIE_NAME: 'directus_refresh_token', ROOT_REDIRECT: './admin', @@ -64,9 +67,12 @@ const defaults: Record = { TELEMETRY: true, - ASSETS_CACHE_TTL: '30m', + ASSETS_CACHE_TTL: '30d', ASSETS_TRANSFORM_MAX_CONCURRENT: 1, ASSETS_TRANSFORM_IMAGE_MAX_DIMENSION: 6000, + ASSETS_TRANSFORM_MAX_OPERATIONS: 5, + + SERVE_APP: true, }; // Allows us to force certain environment variable into a type, instead of relying @@ -170,6 +176,8 @@ function getEnvironmentValueByType(envVariableString: string) { return new RegExp(envVariableValue); case 'string': return envVariableValue; + case 'json': + return tryJSON(envVariableValue); } } @@ -181,14 +189,14 @@ function processValues(env: Record) { // and store it in the variable with the same name but without '_FILE' at the end let newKey; if (key.length > 5 && key.endsWith('_FILE')) { + newKey = key.slice(0, -5); + if (newKey in env) { + throw new Error( + `Duplicate environment variable encountered: you can't use "${newKey}" and "${key}" simultaneously.` + ); + } try { value = fs.readFileSync(value, { encoding: 'utf8' }); - newKey = key.slice(0, -5); - if (newKey in env) { - throw new Error( - `Duplicate environment variable encountered: you can't use "${key}" and "${newKey}" simultaneously.` - ); - } key = newKey; } catch { throw new Error(`Failed to read value from file "${value}", defined in environment variable "${key}".`); @@ -214,6 +222,9 @@ function processValues(env: Record) { case 'array': env[key] = toArray(value); break; + case 'json': + env[key] = tryJSON(value); + break; } continue; } @@ -247,6 +258,14 @@ function processValues(env: Record) { continue; } + if (String(value).includes(',')) { + env[key] = toArray(value); + } + + // Try converting the value to a JS object. This allows JSON objects to be passed for nested + // config flags, or custom param names (that aren't camelCased) + env[key] = tryJSON(value); + // If '_FILE' variable hasn't been processed yet, store it as it is (string) if (newKey) { env[key] = value; @@ -255,3 +274,11 @@ function processValues(env: Record) { return env; } + +function tryJSON(value: any) { + try { + return JSON.parse(value); + } catch { + return value; + } +} diff --git a/api/src/exceptions/database/contains-null-values.ts b/api/src/exceptions/database/contains-null-values.ts index 67fa622e29..a1456e291e 100644 --- a/api/src/exceptions/database/contains-null-values.ts +++ b/api/src/exceptions/database/contains-null-values.ts @@ -1,4 +1,4 @@ -import { BaseException } from '../base'; +import { BaseException } from '@directus/shared/exceptions'; type Exceptions = { collection: string; diff --git a/api/src/exceptions/database/dialects/mssql.ts b/api/src/exceptions/database/dialects/mssql.ts index dca25c3f05..6c4dc009df 100644 --- a/api/src/exceptions/database/dialects/mssql.ts +++ b/api/src/exceptions/database/dialects/mssql.ts @@ -46,7 +46,7 @@ async function uniqueViolation(error: MSSQLError) { * information_schema when this happens */ - const betweenQuotes = /'([^']+)'/; + const betweenQuotes = /'([^']+)'/g; const betweenParens = /\(([^)]+)\)/g; const quoteMatches = error.message.match(betweenQuotes); @@ -54,21 +54,35 @@ async function uniqueViolation(error: MSSQLError) { if (!quoteMatches || !parenMatches) return error; - const keyName = quoteMatches[1]; + const keyName = quoteMatches[1]?.slice(1, -1); - const database = getDatabase(); + let collection = quoteMatches[0]?.slice(1, -1); + let field: string | null = null; - const constraintUsage = await database - .select('*') - .from('INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE') - .where({ - CONSTRAINT_NAME: keyName, - }) - .first(); + if (keyName) { + const database = getDatabase(); - const collection = constraintUsage.TABLE_NAME; - const field = constraintUsage.COLUMN_NAME; - const invalid = parenMatches[parenMatches.length - 1].slice(1, -1); + const constraintUsage = await database + .select('sys.columns.name as field', database.raw('OBJECT_NAME(??) as collection', ['sys.columns.object_id'])) + .from('sys.indexes') + .innerJoin('sys.index_columns', (join) => { + join + .on('sys.indexes.object_id', '=', 'sys.index_columns.object_id') + .andOn('sys.indexes.index_id', '=', 'sys.index_columns.index_id'); + }) + .innerJoin('sys.columns', (join) => { + join + .on('sys.index_columns.object_id', '=', 'sys.columns.object_id') + .andOn('sys.index_columns.column_id', '=', 'sys.columns.column_id'); + }) + .where('sys.indexes.name', '=', keyName) + .first(); + + collection = constraintUsage?.collection; + field = constraintUsage?.field; + } + + const invalid = parenMatches[parenMatches.length - 1]?.slice(1, -1); return new RecordNotUniqueException(field, { collection, diff --git a/api/src/exceptions/database/invalid-foreign-key.ts b/api/src/exceptions/database/invalid-foreign-key.ts index 7102538757..f95750b3d3 100644 --- a/api/src/exceptions/database/invalid-foreign-key.ts +++ b/api/src/exceptions/database/invalid-foreign-key.ts @@ -1,4 +1,4 @@ -import { BaseException } from '../base'; +import { BaseException } from '@directus/shared/exceptions'; type Extensions = { collection: string; diff --git a/api/src/exceptions/database/not-null-violation.ts b/api/src/exceptions/database/not-null-violation.ts index e857c76d3a..154eda3d57 100644 --- a/api/src/exceptions/database/not-null-violation.ts +++ b/api/src/exceptions/database/not-null-violation.ts @@ -1,4 +1,4 @@ -import { BaseException } from '../base'; +import { BaseException } from '@directus/shared/exceptions'; type Exceptions = { collection: string; diff --git a/api/src/exceptions/database/record-not-unique.ts b/api/src/exceptions/database/record-not-unique.ts index 2bbb68cfff..47464b9fa8 100644 --- a/api/src/exceptions/database/record-not-unique.ts +++ b/api/src/exceptions/database/record-not-unique.ts @@ -1,4 +1,4 @@ -import { BaseException } from '../base'; +import { BaseException } from '@directus/shared/exceptions'; type Extensions = { collection: string; diff --git a/api/src/exceptions/database/translate.ts b/api/src/exceptions/database/translate.ts index 99f38367bd..06ae4fcfb9 100644 --- a/api/src/exceptions/database/translate.ts +++ b/api/src/exceptions/database/translate.ts @@ -1,4 +1,6 @@ -import getDatabase from '../../database'; +import { compact, last } from 'lodash'; +import { getDatabaseClient } from '../../database'; +import emitter from '../../emitter'; import { extractError as mssql } from './dialects/mssql'; import { extractError as mysql } from './dialects/mysql'; import { extractError as oracle } from './dialects/oracle'; @@ -16,22 +18,29 @@ import { SQLError } from './dialects/types'; * - Value Too Long */ export async function translateDatabaseError(error: SQLError): Promise { - const database = getDatabase(); + const client = getDatabaseClient(); + let defaultError: any; - switch (database.client.constructor.name) { - case 'Client_MySQL': - return mysql(error); - case 'Client_PG': - return postgres(error); - case 'Client_SQLite3': - return sqlite(error); - case 'Client_Oracledb': - case 'Client_Oracle': - return oracle(error); - case 'Client_MSSQL': - return await mssql(error); - - default: - return error; + switch (client) { + case 'mysql': + defaultError = mysql(error); + break; + case 'postgres': + defaultError = postgres(error); + break; + case 'sqlite': + defaultError = sqlite(error); + break; + case 'oracle': + defaultError = oracle(error); + break; + case 'mssql': + defaultError = await mssql(error); + break; } + + const hookResult = await emitter.emitAsync('database.error', defaultError, { client }); + const hookError = Array.isArray(hookResult) ? last(compact(hookResult)) : hookResult; + + return hookError || defaultError; } diff --git a/api/src/exceptions/database/value-out-of-range.ts b/api/src/exceptions/database/value-out-of-range.ts index e238274377..48f42de9f5 100644 --- a/api/src/exceptions/database/value-out-of-range.ts +++ b/api/src/exceptions/database/value-out-of-range.ts @@ -1,4 +1,4 @@ -import { BaseException } from '../base'; +import { BaseException } from '@directus/shared/exceptions'; type Exceptions = { collection: string; diff --git a/api/src/exceptions/database/value-too-long.ts b/api/src/exceptions/database/value-too-long.ts index 4d27b67099..0c0bfabf49 100644 --- a/api/src/exceptions/database/value-too-long.ts +++ b/api/src/exceptions/database/value-too-long.ts @@ -1,4 +1,4 @@ -import { BaseException } from '../base'; +import { BaseException } from '@directus/shared/exceptions'; type Extensions = { collection: string; diff --git a/api/src/exceptions/forbidden.ts b/api/src/exceptions/forbidden.ts index 4b464d7e00..fd969abb05 100644 --- a/api/src/exceptions/forbidden.ts +++ b/api/src/exceptions/forbidden.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class ForbiddenException extends BaseException { constructor() { diff --git a/api/src/exceptions/graphql-validation.ts b/api/src/exceptions/graphql-validation.ts index 09ddb81a29..dc193b9db9 100644 --- a/api/src/exceptions/graphql-validation.ts +++ b/api/src/exceptions/graphql-validation.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class GraphQLValidationException extends BaseException { constructor(extensions: Record) { diff --git a/api/src/exceptions/hit-rate-limit.ts b/api/src/exceptions/hit-rate-limit.ts index 25f2f61663..077f5c0193 100644 --- a/api/src/exceptions/hit-rate-limit.ts +++ b/api/src/exceptions/hit-rate-limit.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; type Extensions = { limit: number; diff --git a/api/src/exceptions/illegal-asset-transformation.ts b/api/src/exceptions/illegal-asset-transformation.ts index 8b964cc433..0dbc174354 100644 --- a/api/src/exceptions/illegal-asset-transformation.ts +++ b/api/src/exceptions/illegal-asset-transformation.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class IllegalAssetTransformation extends BaseException { constructor(message: string) { diff --git a/api/src/exceptions/index.ts b/api/src/exceptions/index.ts index c55b89d4eb..d00eaf78ba 100644 --- a/api/src/exceptions/index.ts +++ b/api/src/exceptions/index.ts @@ -1,5 +1,3 @@ -export * from './base'; -export * from './failed-validation'; export * from './forbidden'; export * from './graphql-validation'; export * from './hit-rate-limit'; diff --git a/api/src/exceptions/invalid-credentials.ts b/api/src/exceptions/invalid-credentials.ts index 015cd3b012..cfdfc258b0 100644 --- a/api/src/exceptions/invalid-credentials.ts +++ b/api/src/exceptions/invalid-credentials.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class InvalidCredentialsException extends BaseException { constructor(message = 'Invalid user credentials.') { diff --git a/api/src/exceptions/invalid-ip.ts b/api/src/exceptions/invalid-ip.ts index 4709418d5f..73ca7d068d 100644 --- a/api/src/exceptions/invalid-ip.ts +++ b/api/src/exceptions/invalid-ip.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class InvalidIPException extends BaseException { constructor(message = 'Invalid IP address.') { diff --git a/api/src/exceptions/invalid-otp.ts b/api/src/exceptions/invalid-otp.ts index da7d736cc0..13e2db07f8 100644 --- a/api/src/exceptions/invalid-otp.ts +++ b/api/src/exceptions/invalid-otp.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class InvalidOTPException extends BaseException { constructor(message = 'Invalid user OTP.') { diff --git a/api/src/exceptions/invalid-payload.ts b/api/src/exceptions/invalid-payload.ts index b041444e8b..d40382f87b 100644 --- a/api/src/exceptions/invalid-payload.ts +++ b/api/src/exceptions/invalid-payload.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class InvalidPayloadException extends BaseException { constructor(message: string, extensions?: Record) { diff --git a/api/src/exceptions/invalid-query.ts b/api/src/exceptions/invalid-query.ts index 0419ee0536..fd7ed5f134 100644 --- a/api/src/exceptions/invalid-query.ts +++ b/api/src/exceptions/invalid-query.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class InvalidQueryException extends BaseException { constructor(message: string) { diff --git a/api/src/exceptions/method-not-allowed.ts b/api/src/exceptions/method-not-allowed.ts index 6d43769bc2..656d92d43b 100644 --- a/api/src/exceptions/method-not-allowed.ts +++ b/api/src/exceptions/method-not-allowed.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; type Extensions = { allow: string[]; diff --git a/api/src/exceptions/range-not-satisfiable.ts b/api/src/exceptions/range-not-satisfiable.ts index 6b169dde0e..437e0fac7e 100644 --- a/api/src/exceptions/range-not-satisfiable.ts +++ b/api/src/exceptions/range-not-satisfiable.ts @@ -1,5 +1,5 @@ import { Range } from '@directus/drive'; -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class RangeNotSatisfiableException extends BaseException { constructor(range: Range) { diff --git a/api/src/exceptions/route-not-found.ts b/api/src/exceptions/route-not-found.ts index 6f054af4b1..d225dba10b 100644 --- a/api/src/exceptions/route-not-found.ts +++ b/api/src/exceptions/route-not-found.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class RouteNotFoundException extends BaseException { constructor(path: string) { diff --git a/api/src/exceptions/service-unavailable.ts b/api/src/exceptions/service-unavailable.ts index f425f100d2..cd8a39860e 100644 --- a/api/src/exceptions/service-unavailable.ts +++ b/api/src/exceptions/service-unavailable.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; type Extensions = { service: string; diff --git a/api/src/exceptions/unprocessable-entity.ts b/api/src/exceptions/unprocessable-entity.ts index 6fe580d5ea..5a6dc25f30 100644 --- a/api/src/exceptions/unprocessable-entity.ts +++ b/api/src/exceptions/unprocessable-entity.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class UnprocessableEntityException extends BaseException { constructor(message: string) { diff --git a/api/src/exceptions/user-suspended.ts b/api/src/exceptions/user-suspended.ts index 4267fc6ae9..9bd5ae6d89 100644 --- a/api/src/exceptions/user-suspended.ts +++ b/api/src/exceptions/user-suspended.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class UserSuspendedException extends BaseException { constructor(message = 'User suspended.') { diff --git a/api/src/extensions.ts b/api/src/extensions.ts index d1c94e60ed..e9893605c4 100644 --- a/api/src/extensions.ts +++ b/api/src/extensions.ts @@ -7,34 +7,49 @@ import { getLocalExtensions, getPackageExtensions, resolvePackage, -} from '@directus/shared/utils'; -import { APP_EXTENSION_TYPES, SHARED_DEPS } from '@directus/shared/constants'; +} from '@directus/shared/utils/node'; +import { + API_EXTENSION_PACKAGE_TYPES, + API_EXTENSION_TYPES, + APP_EXTENSION_TYPES, + APP_SHARED_DEPS, + EXTENSION_PACKAGE_TYPES, + EXTENSION_TYPES, +} from '@directus/shared/constants'; import getDatabase from './database'; import emitter from './emitter'; import env from './env'; import * as exceptions from './exceptions'; import logger from './logger'; -import { HookRegisterFunction, EndpointRegisterFunction } from './types'; +import { HookConfig, EndpointConfig } from './types'; import fse from 'fs-extra'; import { getSchema } from './utils/get-schema'; import * as services from './services'; import { schedule, validate } from 'node-cron'; -import { REGEX_BETWEEN_PARENS } from './constants'; +import { REGEX_BETWEEN_PARENS } from '@directus/shared/constants'; import { rollup } from 'rollup'; // @TODO Remove this once a new version of @rollup/plugin-virtual has been released // @ts-expect-error import virtual from '@rollup/plugin-virtual'; import alias from '@rollup/plugin-alias'; +import { Url } from './utils/url'; +import getModuleDefault from './utils/get-module-default'; let extensions: Extension[] = []; let extensionBundles: Partial> = {}; +const registeredHooks: string[] = []; export async function initializeExtensions(): Promise { - await ensureExtensionDirs(env.EXTENSIONS_PATH); - extensions = await getExtensions(); + try { + await ensureExtensionDirs(env.EXTENSIONS_PATH, env.SERVE_APP ? EXTENSION_TYPES : API_EXTENSION_TYPES); + extensions = await getExtensions(); + } catch (err: any) { + logger.warn(`Couldn't load extensions`); + logger.warn(err); + } - if (!('DIRECTUS_DEV' in process.env)) { + if (env.SERVE_APP) { extensionBundles = await generateExtensionBundles(); } @@ -67,14 +82,20 @@ export function registerExtensionHooks(): void { } async function getExtensions(): Promise { - const packageExtensions = await getPackageExtensions('.'); - const localExtensions = await getLocalExtensions(env.EXTENSIONS_PATH); + const packageExtensions = await getPackageExtensions( + '.', + env.SERVE_APP ? EXTENSION_PACKAGE_TYPES : API_EXTENSION_PACKAGE_TYPES + ); + const localExtensions = await getLocalExtensions( + env.EXTENSIONS_PATH, + env.SERVE_APP ? EXTENSION_TYPES : API_EXTENSION_TYPES + ); return [...packageExtensions, ...localExtensions]; } async function generateExtensionBundles() { - const sharedDepsMapping = await getSharedDepsMapping(SHARED_DEPS); + const sharedDepsMapping = await getSharedDepsMapping(APP_SHARED_DEPS); const internalImports = Object.entries(sharedDepsMapping).map(([name, path]) => ({ find: name, replacement: path, @@ -103,14 +124,15 @@ async function generateExtensionBundles() { async function getSharedDepsMapping(deps: string[]) { const appDir = await fse.readdir(path.join(resolvePackage('@directus/app'), 'dist')); - const adminUrl = env.PUBLIC_URL.endsWith('/') ? env.PUBLIC_URL + 'admin' : env.PUBLIC_URL + '/admin'; const depsMapping: Record = {}; for (const dep of deps) { const depName = appDir.find((file) => dep.replace(/\//g, '_') === file.substring(0, file.indexOf('.'))); if (depName) { - depsMapping[dep] = `${adminUrl}/${depName}`; + const depUrl = new Url(env.PUBLIC_URL).addPath('admin', depName); + + depsMapping[dep] = depUrl.toString({ rootRelative: true }); } else { logger.warn(`Couldn't find shared extension dependency "${dep}"`); } @@ -123,7 +145,7 @@ function registerHooks(hooks: Extension[]) { for (const hook of hooks) { try { registerHook(hook); - } catch (error) { + } catch (error: any) { logger.warn(`Couldn't register hook "${hook.name}"`); logger.warn(error); } @@ -131,16 +153,18 @@ function registerHooks(hooks: Extension[]) { function registerHook(hook: Extension) { const hookPath = path.resolve(hook.path, hook.entrypoint || ''); - const hookInstance: HookRegisterFunction | { default?: HookRegisterFunction } = require(hookPath); + const hookInstance: HookConfig | { default: HookConfig } = require(hookPath); - let register: HookRegisterFunction = hookInstance as HookRegisterFunction; - if (typeof hookInstance !== 'function') { - if (hookInstance.default) { - register = hookInstance.default; - } + // Make sure hooks are only registered once + if (registeredHooks.includes(hookPath)) { + return; + } else { + registeredHooks.push(hookPath); } - const events = register({ services, exceptions, env, database: getDatabase(), getSchema }); + const register = getModuleDefault(hookInstance); + + const events = register({ services, exceptions, env, database: getDatabase(), logger, getSchema }); for (const [event, handler] of Object.entries(events)) { if (event.startsWith('cron(')) { @@ -162,7 +186,7 @@ function registerEndpoints(endpoints: Extension[], router: Router) { for (const endpoint of endpoints) { try { registerEndpoint(endpoint); - } catch (error) { + } catch (error: any) { logger.warn(`Couldn't register endpoint "${endpoint.name}"`); logger.warn(error); } @@ -170,18 +194,16 @@ function registerEndpoints(endpoints: Extension[], router: Router) { function registerEndpoint(endpoint: Extension) { const endpointPath = path.resolve(endpoint.path, endpoint.entrypoint || ''); - const endpointInstance: EndpointRegisterFunction | { default?: EndpointRegisterFunction } = require(endpointPath); + const endpointInstance: EndpointConfig | { default: EndpointConfig } = require(endpointPath); - let register: EndpointRegisterFunction = endpointInstance as EndpointRegisterFunction; - if (typeof endpointInstance !== 'function') { - if (endpointInstance.default) { - register = endpointInstance.default; - } - } + const mod = getModuleDefault(endpointInstance); + + const register = typeof mod === 'function' ? mod : mod.handler; + const pathName = typeof mod === 'function' ? endpoint.name : mod.id; const scopedRouter = express.Router(); - router.use(`/${endpoint.name}/`, scopedRouter); + router.use(`/${pathName}`, scopedRouter); - register(scopedRouter, { services, exceptions, env, database: getDatabase(), getSchema }); + register(scopedRouter, { services, exceptions, env, database: getDatabase(), logger, getSchema }); } } diff --git a/api/src/grant.ts b/api/src/grant.ts index e8b702a961..7704ea3716 100644 --- a/api/src/grant.ts +++ b/api/src/grant.ts @@ -3,7 +3,7 @@ */ import env from './env'; -import { toArray } from './utils/to-array'; +import { toArray } from '@directus/shared/utils'; import { getConfigFromEnv } from './utils/get-config-from-env'; const enabledProviders = toArray(env.OAUTH_PROVIDERS).map((provider) => provider.toLowerCase()); diff --git a/api/src/logger.ts b/api/src/logger.ts index d03bd7aa2d..40b96aef44 100644 --- a/api/src/logger.ts +++ b/api/src/logger.ts @@ -7,7 +7,7 @@ import env from './env'; const pinoOptions: LoggerOptions = { level: env.LOG_LEVEL || 'info', redact: { - paths: ['req.headers.authorization', 'req.cookies.directus_refresh_token'], + paths: ['req.headers.authorization', `req.cookies.${env.REFRESH_TOKEN_COOKIE_NAME}`], censor: '--redact--', }, }; @@ -19,16 +19,20 @@ if (env.LOG_STYLE !== 'raw') { const logger = pino(pinoOptions); -export const expressLogger = pinoHTTP({ - logger, - serializers: { - req(request: Request) { - const output = stdSerializers.req(request); - output.url = redactQuery(output.url); - return output; - }, +export const expressLogger = pinoHTTP( + { + logger, }, -}) as RequestHandler; + { + serializers: { + req(request: Request) { + const output = stdSerializers.req(request); + output.url = redactQuery(output.url); + return output; + }, + }, + } +) as RequestHandler; export default logger; diff --git a/api/src/mailer.ts b/api/src/mailer.ts index 1b62abb55d..6b8bb3b7b4 100644 --- a/api/src/mailer.ts +++ b/api/src/mailer.ts @@ -1,6 +1,7 @@ import nodemailer, { Transporter } from 'nodemailer'; import env from './env'; import logger from './logger'; +import { getConfigFromEnv } from './utils/get-config-from-env'; let transporter: Transporter; @@ -23,13 +24,16 @@ export default function getMailer(): Transporter { }; } + const tls: Record = getConfigFromEnv('EMAIL_SMTP_TLS_'); + transporter = nodemailer.createTransport({ pool: env.EMAIL_SMTP_POOL, host: env.EMAIL_SMTP_HOST, port: env.EMAIL_SMTP_PORT, secure: env.EMAIL_SMTP_SECURE, ignoreTLS: env.EMAIL_SMTP_IGNORE_TLS, - auth: auth, + auth, + tls, } as Record); } else if (env.EMAIL_TRANSPORT.toLowerCase() === 'mailgun') { const mg = require('nodemailer-mailgun-transport'); @@ -39,6 +43,7 @@ export default function getMailer(): Transporter { api_key: env.EMAIL_MAILGUN_API_KEY, domain: env.EMAIL_MAILGUN_DOMAIN, }, + host: env.EMAIL_MAILGUN_HOST || 'api.mailgun.net', }) as any ); } else { diff --git a/api/src/middleware/authenticate.ts b/api/src/middleware/authenticate.ts index 8e6abe3b52..e156eb07cb 100644 --- a/api/src/middleware/authenticate.ts +++ b/api/src/middleware/authenticate.ts @@ -4,7 +4,7 @@ import getDatabase from '../database'; import env from '../env'; import { InvalidCredentialsException } from '../exceptions'; import asyncHandler from '../utils/async-handler'; -import isJWT from '../utils/is-jwt'; +import isDirectusJWT from '../utils/is-directus-jwt'; /** * Verify the passed JWT and assign the user ID and role to `req` @@ -23,12 +23,12 @@ const authenticate: RequestHandler = asyncHandler(async (req, res, next) => { const database = getDatabase(); - if (isJWT(req.token)) { + if (isDirectusJWT(req.token)) { let payload: { id: string }; try { - payload = jwt.verify(req.token, env.SECRET as string) as { id: string }; - } catch (err) { + payload = jwt.verify(req.token, env.SECRET as string, { issuer: 'directus' }) as { id: string }; + } catch (err: any) { if (err instanceof TokenExpiredError) { throw new InvalidCredentialsException('Token expired.'); } else if (err instanceof JsonWebTokenError) { diff --git a/api/src/middleware/cache.test.ts b/api/src/middleware/cache.test.ts new file mode 100644 index 0000000000..08450d0876 --- /dev/null +++ b/api/src/middleware/cache.test.ts @@ -0,0 +1,76 @@ +import express from 'express'; +import request from 'supertest'; +import checkCacheMiddleware from './cache'; + +jest.mock('../cache'); +jest.mock('../env', () => ({ + CACHE_ENABLED: true, + CACHE_NAMESPACE: 'test', + CACHE_STORE: 'memory', + CACHE_TTL: '5s', + CACHE_CONTROL_S_MAXAGE: true, +})); + +const { cache } = jest.requireMock('../cache'); +const env = jest.requireMock('../env'); + +const handler = jest.fn((req, res) => res.json({ data: 'Uncached value' })); +const setup = () => express().use(checkCacheMiddleware).all('/items/test', handler); + +beforeEach(jest.clearAllMocks); + +describe('cache middleware', () => { + test('should return the cached response for a request', async () => { + cache.get.mockResolvedValueOnce({ data: 'Cached value' }); + cache.get.mockResolvedValueOnce(new Date().getTime() + 1000 * 60); + + const res = await request(setup()).get('/items/test').send(); + + expect(res.body.data).toBe('Cached value'); + expect(res.headers['vary']).toBe('Origin, Cache-Control'); + expect(res.headers['cache-control']).toMatch(/public, max-age=\d+, s-maxage=\d+/); + expect(handler).not.toHaveBeenCalled(); + }); + + test('should call the handler when there is no cached value', async () => { + cache.get.mockResolvedValueOnce(undefined); + + const res = await request(setup()).get('/items/test').send(); + + expect(res.body.data).toBe('Uncached value'); + expect(cache.get).toHaveBeenCalledTimes(1); + expect(handler).toHaveBeenCalledTimes(1); + }); + + test('should not cache requests then the cache is disabled', async () => { + env.CACHE_ENABLED = false; + + const res = await request(setup()).get('/items/test').send(); + + expect(res.body.data).toBe('Uncached value'); + expect(cache.get).not.toHaveBeenCalled(); + expect(handler).toHaveBeenCalledTimes(1); + + env.CACHE_ENABLED = true; + }); + + test('should not use cache when the "Cache-Control" header is set to "no-store"', async () => { + const res = await request(setup()).get('/items/test').set('Cache-Control', 'no-store').send(); + + expect(res.body.data).toBe('Uncached value'); + expect(cache.get).not.toHaveBeenCalled(); + expect(handler).toHaveBeenCalledTimes(1); + }); + + test('should only cache get requests', async () => { + const app = setup(); + + await request(app).post('/items/test').send(); + await request(app).put('/items/test').send(); + await request(app).patch('/items/test').send(); + await request(app).delete('/items/test').send(); + + expect(cache.get).not.toHaveBeenCalled(); + expect(handler).toHaveBeenCalledTimes(4); + }); +}); diff --git a/api/src/middleware/cache.ts b/api/src/middleware/cache.ts index 613cc81572..52fe344c1b 100644 --- a/api/src/middleware/cache.ts +++ b/api/src/middleware/cache.ts @@ -4,6 +4,7 @@ import env from '../env'; import asyncHandler from '../utils/async-handler'; import { getCacheControlHeader } from '../utils/get-cache-headers'; import { getCacheKey } from '../utils/get-cache-key'; +import logger from '../logger'; const checkCacheMiddleware: RequestHandler = asyncHandler(async (req, res, next) => { const { cache } = getCache(); @@ -17,10 +18,26 @@ const checkCacheMiddleware: RequestHandler = asyncHandler(async (req, res, next) } const key = getCacheKey(req); - const cachedData = await cache.get(key); + + let cachedData; + + try { + cachedData = await cache.get(key); + } catch (err: any) { + logger.warn(err, `[cache] Couldn't read key ${key}. ${err.message}`); + return next(); + } if (cachedData) { - const cacheExpiryDate = (await cache.get(`${key}__expires_at`)) as number | null; + let cacheExpiryDate; + + try { + cacheExpiryDate = (await cache.get(`${key}__expires_at`)) as number | null; + } catch (err: any) { + logger.warn(err, `[cache] Couldn't read key ${`${key}__expires_at`}. ${err.message}`); + return next(); + } + const cacheTTL = cacheExpiryDate ? cacheExpiryDate - Date.now() : null; res.setHeader('Cache-Control', getCacheControlHeader(req, cacheTTL)); diff --git a/api/src/middleware/error-handler.ts b/api/src/middleware/error-handler.ts index 24a61390ad..88a6ecff17 100644 --- a/api/src/middleware/error-handler.ts +++ b/api/src/middleware/error-handler.ts @@ -1,9 +1,10 @@ import { ErrorRequestHandler } from 'express'; import { emitAsyncSafe } from '../emitter'; import env from '../env'; -import { BaseException, MethodNotAllowedException } from '../exceptions'; +import { MethodNotAllowedException } from '../exceptions'; +import { BaseException } from '@directus/shared/exceptions'; import logger from '../logger'; -import { toArray } from '../utils/to-array'; +import { toArray } from '@directus/shared/utils'; // Note: keep all 4 parameters here. That's how Express recognizes it's the error handler, even if // we don't use next diff --git a/api/src/middleware/graphql.ts b/api/src/middleware/graphql.ts index 3374e61d5c..c2b6b411b5 100644 --- a/api/src/middleware/graphql.ts +++ b/api/src/middleware/graphql.ts @@ -17,7 +17,7 @@ export const parseGraphQL: RequestHandler = asyncHandler(async (req, res, next) if (req.method === 'GET') { query = (req.query.query as string | undefined) || null; - if (req.params.variables) { + if (req.query.variables) { try { variables = JSON.parse(req.query.variables as string); } catch { @@ -40,7 +40,7 @@ export const parseGraphQL: RequestHandler = asyncHandler(async (req, res, next) try { document = parse(new Source(query)); - } catch (err) { + } catch (err: any) { throw new InvalidPayloadException(`GraphQL schema validation error.`, { graphqlErrors: [err], }); diff --git a/api/src/middleware/rate-limiter.ts b/api/src/middleware/rate-limiter.ts index 0c59b6b01f..6e715e2f3d 100644 --- a/api/src/middleware/rate-limiter.ts +++ b/api/src/middleware/rate-limiter.ts @@ -18,7 +18,7 @@ if (env.RATE_LIMITER_ENABLED === true) { checkRateLimit = asyncHandler(async (req, res, next) => { try { await rateLimiter.consume(req.ip, 1); - } catch (rateLimiterRes) { + } catch (rateLimiterRes: any) { if (rateLimiterRes instanceof Error) throw rateLimiterRes; res.set('Retry-After', String(rateLimiterRes.msBeforeNext / 1000)); diff --git a/api/src/middleware/respond.ts b/api/src/middleware/respond.ts index ed4a4032a2..385943bba3 100644 --- a/api/src/middleware/respond.ts +++ b/api/src/middleware/respond.ts @@ -8,6 +8,7 @@ import asyncHandler from '../utils/async-handler'; import { getCacheKey } from '../utils/get-cache-key'; import { parse as toXML } from 'js2xmlparser'; import { getCacheControlHeader } from '../utils/get-cache-headers'; +import logger from '../logger'; export const respond: RequestHandler = asyncHandler(async (req, res) => { const { cache } = getCache(); @@ -20,8 +21,14 @@ export const respond: RequestHandler = asyncHandler(async (req, res) => { res.locals.cache !== false ) { const key = getCacheKey(req); - await cache.set(key, res.locals.payload, ms(env.CACHE_TTL as string)); - await cache.set(`${key}__expires_at`, Date.now() + ms(env.CACHE_TTL as string)); + + try { + await cache.set(key, res.locals.payload, ms(env.CACHE_TTL as string)); + await cache.set(`${key}__expires_at`, Date.now() + ms(env.CACHE_TTL as string)); + } catch (err: any) { + logger.warn(err, `[cache] Couldn't set key ${key}. ${err}`); + } + res.setHeader('Cache-Control', getCacheControlHeader(req, ms(env.CACHE_TTL as string))); res.setHeader('Vary', 'Origin, Cache-Control'); } else { diff --git a/api/src/middleware/validate-batch.ts b/api/src/middleware/validate-batch.ts index 90bcd2538a..594e2e8754 100644 --- a/api/src/middleware/validate-batch.ts +++ b/api/src/middleware/validate-batch.ts @@ -1,6 +1,7 @@ import { RequestHandler } from 'express'; import Joi from 'joi'; -import { FailedValidationException, InvalidPayloadException } from '../exceptions'; +import { InvalidPayloadException } from '../exceptions'; +import { FailedValidationException } from '@directus/shared/exceptions'; import asyncHandler from '../utils/async-handler'; import { sanitizeQuery } from '../utils/sanitize-query'; diff --git a/api/src/server.ts b/api/src/server.ts index cb483021e0..ca90b90cfb 100644 --- a/api/src/server.ts +++ b/api/src/server.ts @@ -8,6 +8,7 @@ import url from 'url'; import createApp from './app'; import getDatabase from './database'; import { emitAsyncSafe } from './emitter'; +import env from './env'; import logger from './logger'; export default async function createServer(): Promise { @@ -86,9 +87,7 @@ export default async function createServer(): Promise { async function beforeShutdown() { emitAsyncSafe('server.stop.before', { server }); - if ('DIRECTUS_DEV' in process.env) { - logger.info('Restarting...'); - } else { + if (env.NODE_ENV !== 'development') { logger.info('Shutting down...'); } } @@ -102,7 +101,7 @@ export default async function createServer(): Promise { async function onShutdown() { emitAsyncSafe('server.stop'); - if (!('DIRECTUS_DEV' in process.env)) { + if (env.NODE_ENV !== 'development') { logger.info('Directus shut down OK. Bye bye!'); } } diff --git a/api/src/services/assets.ts b/api/src/services/assets.ts index baae16fb31..ef38f019c0 100644 --- a/api/src/services/assets.ts +++ b/api/src/services/assets.ts @@ -1,15 +1,18 @@ import { Range, StatResponse } from '@directus/drive'; -import { Knex } from 'knex'; -import path from 'path'; -import sharp, { ResizeOptions } from 'sharp'; -import getDatabase from '../database'; -import { RangeNotSatisfiableException, IllegalAssetTransformation } from '../exceptions'; -import storage from '../storage'; -import { AbstractServiceOptions, Accountability, Transformation } from '../types'; -import { AuthorizationService } from './authorization'; import { Semaphore } from 'async-mutex'; +import { Knex } from 'knex'; +import { contentType } from 'mime-types'; +import ObjectHash from 'object-hash'; +import path from 'path'; +import sharp from 'sharp'; +import getDatabase from '../database'; import env from '../env'; -import { File } from '../types'; +import { IllegalAssetTransformation, RangeNotSatisfiableException } from '../exceptions'; +import storage from '../storage'; +import { AbstractServiceOptions, File, Transformation, TransformationParams, TransformationPreset } from '../types'; +import { Accountability } from '@directus/shared/types'; +import { AuthorizationService } from './authorization'; +import * as TransformationUtils from '../utils/transformations'; sharp.concurrency(1); @@ -30,7 +33,7 @@ export class AssetsService { async getAsset( id: string, - transformation: Transformation, + transformation: TransformationParams | TransformationPreset, range?: Range ): Promise<{ stream: NodeJS.ReadableStream; file: any; stat: StatResponse }> { const publicSettings = await this.knex @@ -53,18 +56,23 @@ export class AssetsService { } const type = file.type; + const transforms = TransformationUtils.resolvePreset(transformation, file); // We can only transform JPEG, PNG, and WebP - if (type && Object.keys(transformation).length > 0 && ['image/jpeg', 'image/png', 'image/webp'].includes(type)) { - const resizeOptions = this.parseTransformation(transformation); + if (type && transforms.length > 0 && ['image/jpeg', 'image/png', 'image/webp', 'image/tiff'].includes(type)) { + const maybeNewFormat = TransformationUtils.maybeExtractFormat(transforms); const assetFilename = path.basename(file.filename_disk, path.extname(file.filename_disk)) + - this.getAssetSuffix(transformation) + - path.extname(file.filename_disk); + getAssetSuffix(transforms) + + (maybeNewFormat ? `.${maybeNewFormat}` : path.extname(file.filename_disk)); const { exists } = await storage.disk(file.storage).exists(assetFilename); + if (maybeNewFormat) { + file.type = contentType(assetFilename) || null; + } + if (exists) { return { stream: storage.disk(file.storage).getStream(assetFilename, range), @@ -94,15 +102,9 @@ export class AssetsService { const transformer = sharp({ limitInputPixels: Math.pow(env.ASSETS_TRANSFORM_IMAGE_MAX_DIMENSION, 2), sequentialRead: true, - }) - .rotate() - .resize(resizeOptions); + }).rotate(); - if (transformation.quality) { - transformer.toFormat(type.substring(6) as 'jpeg' | 'png' | 'webp', { - quality: Number(transformation.quality), - }); - } + transforms.forEach(([method, ...args]) => (transformer[method] as any).apply(transformer, args)); await storage.disk(file.storage).put(assetFilename, readStream.pipe(transformer), type); @@ -118,28 +120,9 @@ export class AssetsService { return { stream: readStream, file, stat }; } } - - private parseTransformation(transformation: Transformation): ResizeOptions { - const resizeOptions: ResizeOptions = {}; - - if (transformation.width) resizeOptions.width = Number(transformation.width); - if (transformation.height) resizeOptions.height = Number(transformation.height); - if (transformation.fit) resizeOptions.fit = transformation.fit; - if (transformation.withoutEnlargement) - resizeOptions.withoutEnlargement = Boolean(transformation.withoutEnlargement); - - return resizeOptions; - } - - private getAssetSuffix(transformation: Transformation) { - if (Object.keys(transformation).length === 0) return ''; - - return ( - '__' + - Object.entries(transformation) - .sort((a, b) => (a[0] > b[0] ? 1 : -1)) - .map((e) => e.join('_')) - .join(',') - ); - } } + +const getAssetSuffix = (transforms: Transformation[]) => { + if (Object.keys(transforms).length === 0) return ''; + return `__${ObjectHash.sha1(transforms)}`; +}; diff --git a/api/src/services/authentication.ts b/api/src/services/authentication.ts index ae944df822..5f50c56951 100644 --- a/api/src/services/authentication.ts +++ b/api/src/services/authentication.ts @@ -15,9 +15,12 @@ import { } from '../exceptions'; import { createRateLimiter } from '../rate-limiter'; import { ActivityService } from '../services/activity'; -import { AbstractServiceOptions, Accountability, Action, SchemaOverview, Session } from '../types'; +import { AbstractServiceOptions, Action, SchemaOverview, Session } from '../types'; +import { Accountability } from '@directus/shared/types'; import { SettingsService } from './settings'; import { merge } from 'lodash'; +import { performance } from 'perf_hooks'; +import { stall } from '../utils/stall'; type AuthenticateOptions = { email: string; @@ -52,6 +55,9 @@ export class AuthenticationService { async authenticate( options: AuthenticateOptions ): Promise<{ accessToken: any; refreshToken: any; expires: any; id?: any }> { + const STALL_TIME = 100; + const timeStart = performance.now(); + const settingsService = new SettingsService({ knex: this.knex, schema: this.schema, @@ -59,13 +65,13 @@ export class AuthenticationService { const { email, password, ip, userAgent, otp } = options; - let user = await this.knex + const user = await this.knex .select('id', 'password', 'role', 'tfa_secret', 'status') .from('directus_users') .whereRaw('LOWER(??) = ?', ['email', email.toLowerCase()]) .first(); - const updatedUser = await emitter.emitAsync('auth.login.before', options, { + const updatedOptions = await emitter.emitAsync('auth.login.before', options, { event: 'auth.login.before', action: 'login', schema: this.schema, @@ -76,8 +82,8 @@ export class AuthenticationService { database: this.knex, }); - if (updatedUser) { - user = updatedUser.length > 0 ? updatedUser.reduce((val, acc) => merge(acc, val)) : user; + if (updatedOptions) { + options = updatedOptions.length > 0 ? updatedOptions.reduce((acc, val) => merge(acc, val), {}) : options; } const emitStatus = (status: 'fail' | 'success') => { @@ -97,8 +103,10 @@ export class AuthenticationService { emitStatus('fail'); if (user?.status === 'suspended') { + await stall(STALL_TIME, timeStart); throw new UserSuspendedException(); } else { + await stall(STALL_TIME, timeStart); throw new InvalidCredentialsException(); } } @@ -113,7 +121,7 @@ export class AuthenticationService { try { await loginAttemptsLimiter.consume(user.id); - } catch (err) { + } catch { await this.knex('directus_users').update({ status: 'suspended' }).where({ id: user.id }); user.status = 'suspended'; @@ -125,17 +133,20 @@ export class AuthenticationService { if (password !== undefined) { if (!user.password) { emitStatus('fail'); + await stall(STALL_TIME, timeStart); throw new InvalidCredentialsException(); } if ((await argon2.verify(user.password, password)) === false) { emitStatus('fail'); + await stall(STALL_TIME, timeStart); throw new InvalidCredentialsException(); } } if (user.tfa_secret && !otp) { emitStatus('fail'); + await stall(STALL_TIME, timeStart); throw new InvalidOTPException(`"otp" is required`); } @@ -144,6 +155,7 @@ export class AuthenticationService { if (otpValid === false) { emitStatus('fail'); + await stall(STALL_TIME, timeStart); throw new InvalidOTPException(`"otp" is invalid`); } } @@ -159,6 +171,7 @@ export class AuthenticationService { */ const accessToken = jwt.sign(payload, env.SECRET as string, { expiresIn: env.ACCESS_TOKEN_TTL, + issuer: 'directus', }); const refreshToken = nanoid(64); @@ -193,6 +206,8 @@ export class AuthenticationService { await loginAttemptsLimiter.set(user.id, 0, 0); } + await stall(STALL_TIME, timeStart); + return { accessToken, refreshToken, @@ -223,6 +238,7 @@ export class AuthenticationService { const accessToken = jwt.sign({ id: record.id }, env.SECRET as string, { expiresIn: env.ACCESS_TOKEN_TTL, + issuer: 'directus', }); const newRefreshToken = nanoid(64); diff --git a/api/src/services/authorization.ts b/api/src/services/authorization.ts index 2ddbd9f7a2..c5c749d95f 100644 --- a/api/src/services/authorization.ts +++ b/api/src/services/authorization.ts @@ -1,13 +1,14 @@ import { Knex } from 'knex'; -import { cloneDeep, flatten, merge, uniq, uniqWith } from 'lodash'; +import { cloneDeep, merge, uniq, uniqWith, flatten, isNil } from 'lodash'; import getDatabase from '../database'; -import { FailedValidationException, ForbiddenException } from '../exceptions'; +import { ForbiddenException } from '../exceptions'; +import { FailedValidationException } from '@directus/shared/exceptions'; +import { validatePayload, parseFilter } from '@directus/shared/utils'; +import { Accountability } from '@directus/shared/types'; import { AbstractServiceOptions, - Accountability, AST, FieldNode, - Filter, Item, NestedCollectionNode, Permission, @@ -15,9 +16,8 @@ import { PrimaryKey, Query, SchemaOverview, + Aggregate, } from '../types'; -import generateJoi from '../utils/generate-joi'; -import { parseFilter } from '../utils/parse-filter'; import { ItemsService } from './items'; import { PayloadService } from './payload'; @@ -71,7 +71,13 @@ export class AuthorizationService { if (ast.type === 'm2a') { collections.push(...ast.names.map((name) => ({ collection: name, field: ast.fieldKey }))); - /** @TODO add nestedNode */ + for (const children of Object.values(ast.children)) { + for (const nestedNode of children) { + if (nestedNode.type !== 'field') { + collections.push(...getCollectionsFromAST(nestedNode)); + } + } + } } else { collections.push({ collection: ast.name, @@ -89,17 +95,23 @@ export class AuthorizationService { } function validateFields(ast: AST | NestedCollectionNode | FieldNode) { - if (ast.type !== 'field' && ast.type !== 'm2a') { - /** @TODO remove m2a check */ - const collection = ast.name; + if (ast.type !== 'field') { + if (ast.type === 'm2a') { + for (const [collection, children] of Object.entries(ast.children)) { + checkFields(collection, children, ast.query?.[collection]?.aggregate); + } + } else { + checkFields(ast.name, ast.children, ast.query?.aggregate); + } + } + function checkFields(collection: string, children: (NestedCollectionNode | FieldNode)[], aggregate?: Aggregate) { // We check the availability of the permissions in the step before this is run const permissions = permissionsForCollections.find((permission) => permission.collection === collection)!; - const allowedFields = permissions.fields || []; - if (ast.query.aggregate && allowedFields.includes('*') === false) { - for (const [_operation, aliasMap] of Object.entries(ast.query.aggregate)) { + if (aggregate && allowedFields.includes('*') === false) { + for (const [_operation, aliasMap] of Object.entries(aggregate)) { if (!aliasMap) continue; for (const [column, _alias] of Object.entries(aliasMap)) { @@ -108,7 +120,7 @@ export class AuthorizationService { } } - for (const childNode of ast.children) { + for (const childNode of children) { if (childNode.type !== 'field') { validateFields(childNode); continue; @@ -129,43 +141,52 @@ export class AuthorizationService { ast: AST | NestedCollectionNode | FieldNode, accountability: Accountability | null ): AST | NestedCollectionNode | FieldNode { - if (ast.type !== 'field' && ast.type !== 'm2a') { - /** @TODO remove m2a check */ - const collection = ast.name; + if (ast.type !== 'field') { + if (ast.type === 'm2a') { + const collections = Object.keys(ast.children); + for (const collection of collections) { + updateFilterQuery(collection, ast.query[collection]); + } + + for (const [collection, children] of Object.entries(ast.children)) { + ast.children[collection] = children.map((child) => applyFilters(child, accountability)) as ( + | NestedCollectionNode + | FieldNode + )[]; + } + } else { + const collection = ast.name; + + updateFilterQuery(collection, ast.query); + + ast.children = ast.children.map((child) => applyFilters(child, accountability)) as ( + | NestedCollectionNode + | FieldNode + )[]; + } + } + + return ast; + + function updateFilterQuery(collection: string, query: Query) { // We check the availability of the permissions in the step before this is run const permissions = permissionsForCollections.find((permission) => permission.collection === collection)!; const parsedPermissions = parseFilter(permissions.permissions, accountability); - if (!ast.query.filter || Object.keys(ast.query.filter).length === 0) { - ast.query.filter = { _and: [] }; + if (!query.filter || Object.keys(query.filter).length === 0) { + query.filter = { _and: [] }; } else { - ast.query.filter = { _and: [ast.query.filter] }; + query.filter = { _and: [query.filter] }; } if (parsedPermissions && Object.keys(parsedPermissions).length > 0) { - ast.query.filter._and.push(parsedPermissions); + query.filter._and.push(parsedPermissions); } - if (ast.query.filter._and.length === 0) delete ast.query.filter._and; - - if (permissions.limit && ast.query.limit && ast.query.limit > permissions.limit) { - throw new ForbiddenException(); - } - - // Default to the permissions limit if limit hasn't been set - if (permissions.limit && !ast.query.limit) { - ast.query.limit = permissions.limit; - } - - ast.children = ast.children.map((child) => applyFilters(child, accountability)) as ( - | NestedCollectionNode - | FieldNode - )[]; + if (query.filter._and.length === 0) delete query.filter._and; } - - return ast; } } @@ -173,8 +194,6 @@ export class AuthorizationService { * Checks if the provided payload matches the configured permissions, and adds the presets to the payload. */ validatePayload(action: PermissionsAction, collection: string, data: Partial): Promise> { - const validationErrors: FailedValidationException[] = []; - const payload = cloneDeep(data); let permission: Permission | undefined; @@ -187,7 +206,6 @@ export class AuthorizationService { action, permissions: {}, validation: {}, - limit: null, fields: ['*'], presets: {}, }; @@ -216,44 +234,57 @@ export class AuthorizationService { const payloadWithPresets = merge({}, preset, payload); - const requiredColumns: string[] = []; + const hasValidationRules = + isNil(permission.validation) === false && Object.keys(permission.validation ?? {}).length > 0; - for (const [name, field] of Object.entries(this.schema.collections[collection].fields)) { + const requiredColumns: SchemaOverview['collections'][string]['fields'][string][] = []; + + for (const field of Object.values(this.schema.collections[collection].fields)) { const specials = field?.special ?? []; const hasGenerateSpecial = ['uuid', 'date-created', 'role-created', 'user-created'].some((name) => specials.includes(name) ); - const isRequired = field.nullable === false && field.defaultValue === null && hasGenerateSpecial === false; + const notNullable = field.nullable === false && hasGenerateSpecial === false; - if (isRequired) { - requiredColumns.push(name); + if (notNullable) { + requiredColumns.push(field); } } + if (hasValidationRules === false && requiredColumns.length === 0) { + return payloadWithPresets; + } + if (requiredColumns.length > 0) { - permission.validation = { - _and: [permission.validation, {}], - }; + permission.validation = hasValidationRules ? { _and: [permission.validation] } : { _and: [] }; - if (action === 'create') { - for (const name of requiredColumns) { - permission.validation._and[1][name] = { - _submitted: true, - }; + for (const field of requiredColumns) { + if (action === 'create' && field.defaultValue === null) { + permission.validation._and.push({ + [field.field]: { + _submitted: true, + }, + }); } - } else { - for (const name of requiredColumns) { - permission.validation._and[1][name] = { + + permission.validation._and.push({ + [field.field]: { _nnull: true, - }; - } + }, + }); } } + const validationErrors: FailedValidationException[] = []; + validationErrors.push( - ...this.validateJoi(parseFilter(permission.validation || {}, this.accountability), payloadWithPresets) + ...flatten( + validatePayload(parseFilter(permission.validation!, this.accountability), payloadWithPresets).map((error) => + error.details.map((details) => new FailedValidationException(details)) + ) + ) ); if (validationErrors.length > 0) throw validationErrors; @@ -261,48 +292,6 @@ export class AuthorizationService { return payloadWithPresets; } - validateJoi(validation: Filter, payload: Partial): FailedValidationException[] { - if (!validation) return []; - - const errors: FailedValidationException[] = []; - - /** - * Note there can only be a single _and / _or per level - */ - - if (Object.keys(validation)[0] === '_and') { - const subValidation = Object.values(validation)[0]; - - const nestedErrors = flatten( - subValidation.map((subObj: Record) => { - return this.validateJoi(subObj, payload); - }) - ).filter((err?: FailedValidationException) => err); - errors.push(...nestedErrors); - } else if (Object.keys(validation)[0] === '_or') { - const subValidation = Object.values(validation)[0]; - const nestedErrors = flatten( - subValidation.map((subObj: Record) => this.validateJoi(subObj, payload)) - ); - - const allErrored = subValidation.length === nestedErrors.length; - - if (allErrored) { - errors.push(...nestedErrors); - } - } else { - const schema = generateJoi(validation); - - const { error } = schema.validate(payload, { abortEarly: false }); - - if (error) { - errors.push(...error.details.map((details) => new FailedValidationException(details))); - } - } - - return errors; - } - async checkAccess(action: PermissionsAction, collection: string, pk: PrimaryKey | PrimaryKey[]): Promise { if (this.accountability?.admin === true) return; @@ -317,7 +306,7 @@ export class AuthorizationService { }; if (Array.isArray(pk)) { - const result = await itemsService.readMany(pk, query, { permissionsAction: action }); + const result = await itemsService.readMany(pk, { ...query, limit: pk.length }, { permissionsAction: action }); if (!result) throw new ForbiddenException(); if (result.length !== pk.length) throw new ForbiddenException(); } else { diff --git a/api/src/services/collections.ts b/api/src/services/collections.ts index 3b87db7fa2..7bf1995a84 100644 --- a/api/src/services/collections.ts +++ b/api/src/services/collections.ts @@ -7,17 +7,11 @@ import { systemCollectionRows } from '../database/system-data/collections'; import env from '../env'; import { ForbiddenException, InvalidPayloadException } from '../exceptions'; import logger from '../logger'; -import { FieldsService, RawField } from '../services/fields'; +import { FieldsService } from '../services/fields'; import { ItemsService, MutationOptions } from '../services/items'; import Keyv from 'keyv'; -import { - AbstractServiceOptions, - Accountability, - Collection, - CollectionMeta, - FieldMeta, - SchemaOverview, -} from '../types'; +import { AbstractServiceOptions, Collection, CollectionMeta, SchemaOverview } from '../types'; +import { Accountability, FieldMeta, RawField } from '@directus/shared/types'; export type RawCollection = { collection: string; @@ -213,6 +207,11 @@ export class CollectionsService { const collections: Collection[] = []; + /** + * The collections as known in the schema cache. + */ + const knownCollections = Object.keys(this.schema.collections); + for (const table of tablesInDatabase) { const collection: Collection = { collection: table.name, @@ -220,7 +219,12 @@ export class CollectionsService { schema: table, }; - collections.push(collection); + // By only returning collections that are known in the schema cache, we prevent weird + // situations where the collections endpoint returns different info from every other + // collection + if (knownCollections.includes(table.name)) { + collections.push(collection); + } } return collections; @@ -272,6 +276,8 @@ export class CollectionsService { const collections: Collection[] = []; + const knownCollections = Object.keys(this.schema.collections); + for (const table of tables) { const collection: Collection = { collection: table.name, @@ -279,7 +285,12 @@ export class CollectionsService { schema: table, }; - collections.push(collection); + // By only returning collections that are known in the schema cache, we prevent weird + // situations where the collections endpoint returns different info from every other + // collection + if (knownCollections.includes(table.name)) { + collections.push(collection); + } } return collections; @@ -403,15 +414,6 @@ export class CollectionsService { if (relation.related_collection === collectionKey) { await fieldsService.deleteField(relation.collection, relation.field); } - - const isM2O = relation.collection === collectionKey; - - // Delete any fields that have a relationship to/from the current collection - if (isM2O && relation.related_collection && relation.meta?.one_field) { - await fieldsService.deleteField(relation.related_collection!, relation.meta.one_field); - } else { - await fieldsService.deleteField(relation.collection, relation.field); - } } const m2aRelationsThatIncludeThisCollection = this.schema.relations.filter((relation) => { diff --git a/api/src/services/fields.ts b/api/src/services/fields.ts index b821508a6e..18b2f643c1 100644 --- a/api/src/services/fields.ts +++ b/api/src/services/fields.ts @@ -11,17 +11,17 @@ import { ForbiddenException, InvalidPayloadException } from '../exceptions'; import { translateDatabaseError } from '../exceptions/database/translate'; import { ItemsService } from '../services/items'; import { PayloadService } from '../services/payload'; -import { AbstractServiceOptions, Accountability, FieldMeta, SchemaOverview, types } from '../types'; -import { Field } from '../types/field'; +import { AbstractServiceOptions, SchemaOverview } from '../types'; +import { Accountability } from '@directus/shared/types'; +import { Field, FieldMeta, RawField, Type } from '@directus/shared/types'; import getDefaultValue from '../utils/get-default-value'; import getLocalType from '../utils/get-local-type'; -import { toArray } from '../utils/to-array'; -import { isEqual } from 'lodash'; +import { toArray } from '@directus/shared/utils'; +import { isEqual, isNil } from 'lodash'; import { RelationsService } from './relations'; +import { getGeometryHelper } from '../database/helpers/geometry'; import Keyv from 'keyv'; -export type RawField = DeepPartial & { field: string; type: typeof types[number] }; - export class FieldsService { knex: Knex; accountability: Accountability | null; @@ -75,25 +75,22 @@ export class FieldsService { fields.push(...systemFieldRows); } - let columns = await this.schemaInspector.columnInfo(collection); - - columns = columns.map((column) => { - return { - ...column, - default_value: getDefaultValue(column), - }; - }); + const columns = (await this.schemaInspector.columnInfo(collection)).map((column) => ({ + ...column, + default_value: getDefaultValue(column), + })); const columnsWithSystem = columns.map((column) => { const field = fields.find((field) => { return field.field === column.name && field.collection === column.table; }); + const { type = 'alias', ...info } = column ? getLocalType(column, field) : {}; const data = { collection: column.table, field: column.name, - type: column ? getLocalType(column, field) : 'alias', - schema: column, + type: type, + schema: { ...column, ...info }, meta: field || null, }; @@ -200,12 +197,13 @@ export class FieldsService { // Do nothing } + const { type = 'alias', ...info } = column ? getLocalType(column, fieldInfo) : {}; const data = { collection, field, - type: column ? getLocalType(column, fieldInfo) : 'alias', + type, meta: fieldInfo || null, - schema: column || null, + schema: type == 'alias' ? null : { ...column, ...info }, }; return data; @@ -213,15 +211,20 @@ export class FieldsService { async createField( collection: string, - field: Partial & { field: string; type: typeof types[number] | null }, + field: Partial & { field: string; type: Type | null }, table?: Knex.CreateTableBuilder // allows collection creation to ): Promise { if (this.accountability && this.accountability.admin !== true) { throw new ForbiddenException(); } + const exists = + field.field in this.schema.collections[collection].fields || + isNil(await this.knex.select('id').from('directus_fields').where({ collection, field: field.field }).first()) === + false; + // Check if field already exists, either as a column, or as a row in directus_fields - if (field.field in this.schema.collections[collection].fields) { + if (exists) { throw new InvalidPayloadException(`Field "${field.field}" already exists in collection "${collection}"`); } @@ -274,7 +277,7 @@ export class FieldsService { if (!field.schema) return; this.addColumnToTable(table, field, existingColumn); }); - } catch (err) { + } catch (err: any) { throw await translateDatabaseError(err); } } @@ -313,7 +316,6 @@ export class FieldsService { return field.field; } - /** @todo save accountability */ async deleteField(collection: string, field: string): Promise { if (this.accountability && this.accountability.admin !== true) { throw new ForbiddenException(); @@ -434,6 +436,9 @@ export class FieldsService { public addColumnToTable(table: Knex.CreateTableBuilder, field: RawField | Field, alter: Column | null = null): void { let column: Knex.ColumnBuilder; + // Don't attempt to add a DB column for alias / corrupt fields + if (field.type === 'alias' || field.type === 'unknown') return; + if (field.schema?.has_auto_increment) { column = table.increments(field.field); } else if (field.type === 'string') { @@ -445,6 +450,13 @@ export class FieldsService { column = table.string(field.field); } else if (field.type === 'hash') { column = table.string(field.field, 255); + } else if (field.type === 'dateTime') { + column = table.dateTime(field.field, { useTz: false }); + } else if (field.type === 'timestamp') { + column = table.timestamp(field.field, { useTz: true }); + } else if (field.type === 'geometry') { + const helper = getGeometryHelper(); + column = helper.createColumn(table, field); } else { column = table[field.type](field.field); } diff --git a/api/src/services/files.ts b/api/src/services/files.ts index 9b9bdc6780..10a8080fe1 100644 --- a/api/src/services/files.ts +++ b/api/src/services/files.ts @@ -1,7 +1,6 @@ import formatTitle from '@directus/format-title'; import axios, { AxiosResponse } from 'axios'; -import parseEXIF from 'exif-reader'; -import { parse as parseICC } from 'icc'; +import exifr from 'exifr'; import { clone } from 'lodash'; import { extension } from 'mime-types'; import path from 'path'; @@ -13,8 +12,7 @@ import { ForbiddenException, ServiceUnavailableException } from '../exceptions'; import logger from '../logger'; import storage from '../storage'; import { AbstractServiceOptions, File, PrimaryKey } from '../types'; -import parseIPTC from '../utils/parse-iptc'; -import { toArray } from '../utils/to-array'; +import { toArray } from '@directus/shared/utils'; import { ItemsService, MutationOptions } from './items'; export class FilesService extends ItemsService { @@ -32,6 +30,14 @@ export class FilesService extends ItemsService { ): Promise { const payload = clone(data); + if ('folder' in payload === false) { + const settings = await this.knex.select('storage_default_folder').from('directus_settings').first(); + + if (settings?.storage_default_folder) { + payload.folder = settings.storage_default_folder; + } + } + if (primaryKey !== undefined) { await this.updateOne(primaryKey, payload, { emitEvents: false }); @@ -46,9 +52,10 @@ export class FilesService extends ItemsService { primaryKey = await this.createOne(payload, { emitEvents: false }); } - const fileExtension = path.extname(payload.filename_download) || (payload.type && extension(payload.type)); + const fileExtension = + path.extname(payload.filename_download) || (payload.type && '.' + extension(payload.type)) || ''; - payload.filename_disk = primaryKey + '.' + fileExtension; + payload.filename_disk = primaryKey + (fileExtension || ''); if (!payload.type) { payload.type = 'application/octet-stream'; @@ -56,7 +63,7 @@ export class FilesService extends ItemsService { try { await storage.disk(data.storage).put(payload.filename_disk, stream, payload.type); - } catch (err) { + } catch (err: any) { logger.warn(`Couldn't save file ${payload.filename_disk}`); logger.warn(err); throw new ServiceUnavailableException(`Couldn't save file ${payload.filename_disk}`, { service: 'files' }); @@ -77,37 +84,30 @@ export class FilesService extends ItemsService { payload.height = meta.height; } - payload.filesize = meta.size; payload.metadata = {}; - if (meta.icc) { - try { - payload.metadata.icc = parseICC(meta.icc); - } catch (err) { - logger.warn(`Couldn't extract ICC information from file`); - logger.warn(err); + try { + payload.metadata = await exifr.parse(buffer.content, { + icc: false, + iptc: true, + ifd1: true, + interop: true, + translateValues: true, + reviveValues: true, + mergeOutput: false, + }); + if (payload.metadata?.iptc?.Headline) { + payload.title = payload.metadata.iptc.Headline; } - } - - if (meta.exif) { - try { - payload.metadata.exif = parseEXIF(meta.exif); - } catch (err) { - logger.warn(`Couldn't extract EXIF information from file`); - logger.warn(err); + if (!payload.description && payload.metadata?.iptc?.Caption) { + payload.description = payload.metadata.iptc.Caption; } - } - - if (meta.iptc) { - try { - payload.metadata.iptc = parseIPTC(meta.iptc); - payload.title = payload.metadata.iptc.headline || payload.title; - payload.description = payload.description || payload.metadata.iptc.caption; - payload.tags = payload.metadata.iptc.keywords; - } catch (err) { - logger.warn(`Couldn't extract IPTC information from file`); - logger.warn(err); + if (payload.metadata?.iptc?.Keywords) { + payload.tags = payload.metadata.iptc.Keywords; } + } catch (err: any) { + logger.warn(`Couldn't extract metadata from file`); + logger.warn(err); } } @@ -156,7 +156,7 @@ export class FilesService extends ItemsService { fileResponse = await axios.get(importURL, { responseType: 'stream', }); - } catch (err) { + } catch (err: any) { logger.warn(`Couldn't fetch file from url "${importURL}"`); logger.warn(err); throw new ServiceUnavailableException(`Couldn't fetch file from url "${importURL}"`, { diff --git a/api/src/services/graphql.ts b/api/src/services/graphql.ts index ca297efffd..77623e4822 100644 --- a/api/src/services/graphql.ts +++ b/api/src/services/graphql.ts @@ -1,4 +1,5 @@ import argon2 from 'argon2'; +import { validateQuery } from '../utils/validate-query'; import { ArgumentNode, BooleanValueNode, @@ -42,13 +43,16 @@ import { toInputObjectType, } from 'graphql-compose'; import { Knex } from 'knex'; -import { flatten, get, mapKeys, merge, set, uniq } from 'lodash'; +import { flatten, get, mapKeys, merge, set, uniq, pick } from 'lodash'; import ms from 'ms'; +import { getCache } from '../cache'; import getDatabase from '../database'; import env from '../env'; -import { BaseException, GraphQLValidationException, InvalidPayloadException } from '../exceptions'; +import { ForbiddenException, GraphQLValidationException, InvalidPayloadException } from '../exceptions'; +import { BaseException } from '@directus/shared/exceptions'; import { listExtensions } from '../extensions'; -import { AbstractServiceOptions, Accountability, Action, GraphQLParams, Item, Query, SchemaOverview } from '../types'; +import { Accountability } from '@directus/shared/types'; +import { AbstractServiceOptions, Action, Aggregate, GraphQLParams, Item, Query, SchemaOverview } from '../types'; import { getGraphQLType } from '../utils/get-graphql-type'; import { reduceSchema } from '../utils/reduce-schema'; import { sanitizeQuery } from '../utils/sanitize-query'; @@ -70,6 +74,7 @@ import { SpecificationService } from './specifications'; import { UsersService } from './users'; import { UtilsService } from './utils'; import { WebhooksService } from './webhooks'; +import { generateHash } from '../utils/generate-hash'; const GraphQLVoid = new GraphQLScalarType({ name: 'Void', @@ -89,6 +94,12 @@ const GraphQLVoid = new GraphQLScalarType({ }, }); +export const GraphQLGeoJSON = new GraphQLScalarType({ + ...GraphQLJSON, + name: 'GraphQLGeoJSON', + description: 'GeoJSON value', +}); + export const GraphQLDate = new GraphQLScalarType({ ...GraphQLString, name: 'Date', @@ -147,7 +158,7 @@ export class GraphQLService { variableValues: variables, operationName, }); - } catch (err) { + } catch (err: any) { throw new InvalidPayloadException('GraphQL execution error.', { graphqlErrors: [err.message] }); } @@ -217,6 +228,22 @@ export class GraphQLService { acc[`${collectionName}_by_id`] = ReadCollectionTypes[collection.collection].getResolver( `${collection.collection}_by_id` ); + + const hasAggregate = Object.values(collection.fields).some((field) => { + const graphqlType = getGraphQLType(field.type); + + if (graphqlType === GraphQLInt || graphqlType === GraphQLFloat) { + return true; + } + + return false; + }); + + if (hasAggregate) { + acc[`${collectionName}_aggregated`] = ReadCollectionTypes[collection.collection].getResolver( + `${collection.collection}_aggregated` + ); + } } return acc; @@ -313,6 +340,50 @@ export class GraphQLService { function getTypes(action: 'read' | 'create' | 'update' | 'delete') { const CollectionTypes: Record = {}; + const DateFunctions = schemaComposer.createObjectTC({ + name: 'date_functions', + fields: { + year: { + type: GraphQLInt, + }, + month: { + type: GraphQLInt, + }, + week: { + type: GraphQLInt, + }, + day: { + type: GraphQLInt, + }, + weekday: { + type: GraphQLInt, + }, + }, + }); + + const TimeFunctions = schemaComposer.createObjectTC({ + name: 'time_functions', + fields: { + hour: { + type: GraphQLInt, + }, + minute: { + type: GraphQLInt, + }, + second: { + type: GraphQLInt, + }, + }, + }); + + const DateTimeFunctions = schemaComposer.createObjectTC({ + name: 'datetime_functions', + fields: { + ...DateFunctions.getFields(), + ...TimeFunctions.getFields(), + }, + }); + for (const collection of Object.values(schema[action].collections)) { if (Object.keys(collection.fields).length === 0) continue; if (SYSTEM_DENY_LIST.includes(collection.collection)) continue; @@ -336,8 +407,41 @@ export class GraphQLService { acc[field.field] = { type, description: field.note, + resolve: (obj: Record, _, __, info) => { + return obj[info?.path?.key ?? field.field]; + }, }; + if (field.type === 'date') { + acc[`${field.field}_func`] = { + type: DateFunctions, + resolve: (obj: Record) => { + const funcFields = Object.keys(DateFunctions.getFields()).map((key) => `${field.field}_${key}`); + return mapKeys(pick(obj, funcFields), (_value, key) => key.substring(field.field.length + 1)); + }, + }; + } + + if (field.type === 'time') { + acc[`${field.field}_func`] = { + type: TimeFunctions, + resolve: (obj: Record) => { + const funcFields = Object.keys(TimeFunctions.getFields()).map((key) => `${field.field}_${key}`); + return mapKeys(pick(obj, funcFields), (_value, key) => key.substring(field.field.length + 1)); + }, + }; + } + + if (field.type === 'dateTime' || field.type === 'timestamp') { + acc[`${field.field}_func`] = { + type: DateTimeFunctions, + resolve: (obj: Record) => { + const funcFields = Object.keys(DateTimeFunctions.getFields()).map((key) => `${field.field}_${key}`); + return mapKeys(pick(obj, funcFields), (_value, key) => key.substring(field.field.length + 1)); + }, + }; + } + return acc; }, {} as ObjectTypeComposerFieldConfigMapDefinition), }); @@ -348,6 +452,9 @@ export class GraphQLService { CollectionTypes[relation.collection]?.addFields({ [relation.field]: { type: CollectionTypes[relation.related_collection], + resolve: (obj: Record, _, __, info) => { + return obj[info?.path?.key ?? relation.field]; + }, }, }); @@ -355,6 +462,9 @@ export class GraphQLService { CollectionTypes[relation.related_collection]?.addFields({ [relation.meta.one_field]: { type: [CollectionTypes[relation.collection]], + resolve: (obj: Record, _, __, info) => { + return obj[info?.path?.key ?? relation.meta!.one_field]; + }, }, }); } @@ -386,6 +496,9 @@ export class GraphQLService { return CollectionTypes[collection].getType(); }, }), + resolve: (obj: Record, _, __, info) => { + return obj[info?.path?.key ?? relation.field]; + }, }, }); } @@ -399,8 +512,12 @@ export class GraphQLService { */ function getReadableTypes() { const { CollectionTypes: ReadCollectionTypes } = getTypes('read'); + const ReadableCollectionFilterTypes: Record = {}; + const AggregatedFunctions: Record> = {}; + const AggregatedFilters: Record> = {}; + const StringFilterOperators = schemaComposer.createInputTC({ name: 'string_filter_operators', fields: { @@ -533,6 +650,30 @@ export class GraphQLService { }, }); + const GeometryFilterOperators = schemaComposer.createInputTC({ + name: 'geometry_filter_operators', + fields: { + _eq: { + type: GraphQLGeoJSON, + }, + _neq: { + type: GraphQLGeoJSON, + }, + _intersects: { + type: GraphQLGeoJSON, + }, + _nintersects: { + type: GraphQLGeoJSON, + }, + _intersects_bbox: { + type: GraphQLGeoJSON, + }, + _nintersects_bbox: { + type: GraphQLGeoJSON, + }, + }, + }); + for (const collection of Object.values(schema.read.collections)) { if (Object.keys(collection.fields).length === 0) continue; if (SYSTEM_DENY_LIST.includes(collection.collection)) continue; @@ -543,6 +684,7 @@ export class GraphQLService { const graphqlType = getGraphQLType(field.type); let filterOperatorType: InputTypeComposer; + switch (graphqlType) { case GraphQLBoolean: filterOperatorType = BooleanFilterOperators; @@ -554,12 +696,14 @@ export class GraphQLService { case GraphQLDate: filterOperatorType = DateFilterOperators; break; + case GraphQLGeoJSON: + filterOperatorType = GeometryFilterOperators; + break; default: filterOperatorType = StringFilterOperators; } acc[field.field] = filterOperatorType; - return acc; }, {} as InputTypeComposerFieldConfigMapDefinition), }); @@ -569,6 +713,69 @@ export class GraphQLService { _or: [ReadableCollectionFilterTypes[collection.collection]], }); + AggregatedFilters[collection.collection] = schemaComposer.createObjectTC({ + name: `${collection.collection}_aggregated_fields`, + fields: Object.values(collection.fields).reduce((acc, field) => { + const graphqlType = getGraphQLType(field.type); + + switch (graphqlType) { + case GraphQLInt: + case GraphQLFloat: + acc[field.field] = { + type: GraphQLFloat, + description: field.note, + }; + break; + default: + break; + } + + return acc; + }, {} as ObjectTypeComposerFieldConfigMapDefinition), + }); + + AggregatedFunctions[collection.collection] = schemaComposer.createObjectTC({ + name: `${collection.collection}_aggregated`, + fields: { + group: { + name: 'group', + type: GraphQLJSON, + }, + avg: { + name: 'avg', + type: AggregatedFilters[collection.collection], + }, + sum: { + name: 'sum', + type: AggregatedFilters[collection.collection], + }, + count: { + name: 'count', + type: AggregatedFilters[collection.collection], + }, + countDistinct: { + name: 'countDistinct', + type: AggregatedFilters[collection.collection], + }, + avgDistinct: { + name: 'avgDistinct', + type: AggregatedFilters[collection.collection], + }, + sumDistinct: { + name: 'sumDistinct', + type: AggregatedFilters[collection.collection], + }, + min: { + name: 'min', + type: AggregatedFilters[collection.collection], + }, + max: { + name: 'max', + type: AggregatedFilters[collection.collection], + }, + }, + }); + ReadCollectionTypes[collection.collection].addResolver({ name: collection.collection, args: collection.singleton @@ -601,6 +808,26 @@ export class GraphQLService { }, }); + ReadCollectionTypes[collection.collection].addResolver({ + name: `${collection.collection}_aggregated`, + type: [AggregatedFunctions[collection.collection]], + args: { + groupBy: schemaComposer.createEnumTC({ + name: `${collection.collection}_group_by`, + values: Object.values(collection.fields).reduce((acc, field) => { + acc[field.field] = { value: field.field }; + return acc; + }, {} as Record), + }), + }, + resolve: async ({ info, context }: { info: GraphQLResolveInfo; context: Record }) => { + const result = await self.resolveQuery(info); + context.data = result; + + return result; + }, + }); + if (collection.singleton === false) { ReadCollectionTypes[collection.collection].addResolver({ name: `${collection.collection}_by_id`, @@ -833,18 +1060,25 @@ export class GraphQLService { async resolveQuery(info: GraphQLResolveInfo): Promise | null> { let collection = info.fieldName; if (this.scope === 'system') collection = `directus_${collection}`; - const selections = this.replaceFragmentsInSelections(info.fieldNodes[0]?.selectionSet?.selections, info.fragments); if (!selections) return null; - const args: Record = this.parseArgs(info.fieldNodes[0].arguments || [], info.variableValues); - const query = this.getQuery(args, selections, info.variableValues); - if (collection.endsWith('_by_id') && collection in this.schema.collections === false) { - collection = collection.slice(0, -6); + let query: Record; + + const isAggregate = collection.endsWith('_aggregated') && collection in this.schema.collections === false; + + if (isAggregate) { + query = this.getAggregateQuery(args, selections); + collection = collection.slice(0, -11); + } else { + query = this.getQuery(args, selections, info.variableValues); + + if (collection.endsWith('_by_id') && collection in this.schema.collections === false) { + collection = collection.slice(0, -6); + } } - if (args.id) { query.filter = { _and: [ @@ -866,13 +1100,16 @@ export class GraphQLService { return result?.[0] || null; } + if (query.group) { + // for every entry in result add a group field based on query.group; + result.map((field: Item) => { + field.group = field[query.group[0]]; + }); + } + return result; } - /** - * Generic mutation resolver that converts the incoming GraphQL mutation AST into a Directus query and executes the - * appropriate C-UD operation - */ async resolveMutation( args: Record, info: GraphQLResolveInfo @@ -933,8 +1170,8 @@ export class GraphQLService { return { ids: keys }; } } - } catch (err) { - this.formatError(err); + } catch (err: any) { + return this.formatError(err); } } @@ -970,7 +1207,7 @@ export class GraphQLService { } return true; - } catch (err) { + } catch (err: any) { throw this.formatError(err); } } @@ -1030,24 +1267,42 @@ export class GraphQLService { ): Query { const query: Query = sanitizeQuery(rawQuery, this.accountability); + const parseAliases = (selections: readonly SelectionNode[]) => { + const aliases: Record = {}; + + for (const selection of selections) { + if (selection.kind !== 'Field') continue; + + if (selection.alias?.value) { + aliases[selection.alias.value] = selection.name.value; + } + } + + return aliases; + }; + const parseFields = (selections: readonly SelectionNode[], parent?: string): string[] => { const fields: string[] = []; for (let selection of selections) { if ((selection.kind === 'Field' || selection.kind === 'InlineFragment') !== true) continue; + selection = selection as FieldNode | InlineFragmentNode; let current: string; + // Union type (Many-to-Any) if (selection.kind === 'InlineFragment') { - // filter out graphql pointers, like __typename if (selection.typeCondition!.name.value.startsWith('__')) continue; current = `${parent}:${selection.typeCondition!.name.value}`; - } else { + } + // Any other field type + else { // filter out graphql pointers, like __typename if (selection.name.value.startsWith('__')) continue; - current = selection.name.value; + + current = selection.alias?.value ?? selection.name.value; if (parent) { current = `${parent}.${current}`; @@ -1055,7 +1310,20 @@ export class GraphQLService { } if (selection.selectionSet) { - const children = parseFields(selection.selectionSet.selections, current); + let children: string[]; + + if (current.endsWith('_func')) { + children = []; + + const rootField = current.slice(0, -5); + + for (const subSelection of selection.selectionSet.selections) { + if (subSelection.kind !== 'Field') continue; + children.push(`${subSelection.name!.value}(${rootField})`); + } + } else { + children = parseFields(selection.selectionSet.selections, current); + } fields.push(...children); } else { @@ -1083,11 +1351,43 @@ export class GraphQLService { return uniq(fields); }; + query.alias = parseAliases(selections); query.fields = parseFields(selections); + validateQuery(query); + return query; } + /** + * Resolve the aggregation query based on the requested aggregated fields + */ + getAggregateQuery(rawQuery: Query, selections: readonly SelectionNode[]): Query { + const query: Query = sanitizeQuery(rawQuery, this.accountability); + + query.aggregate = {}; + + for (let aggregationGroup of selections) { + if ((aggregationGroup.kind === 'Field') !== true) continue; + + aggregationGroup = aggregationGroup as FieldNode; + + // filter out graphql pointers, like __typename + if (aggregationGroup.name.value.startsWith('__')) continue; + + const aggregateProperty = aggregationGroup.name.value as keyof Aggregate; + + query.aggregate[aggregateProperty] = + aggregationGroup.selectionSet?.selections.map((selectionNode) => { + selectionNode = selectionNode as FieldNode; + return selectionNode.name.value; + }) ?? []; + } + + validateQuery(query); + + return query; + } /** * Convert Directus-Exception into a GraphQL format, so it can be returned by GraphQL properly. */ @@ -1103,7 +1403,7 @@ export class GraphQLService { * Select the correct service for the given collection. This allows the individual services to run * their custom checks (f.e. it allows UsersService to prevent updating TFA secret from outside) */ - getService(collection: string): RolesService { + getService(collection: string): ItemsService { const opts = { knex: this.knex, accountability: this.accountability, @@ -1376,7 +1676,7 @@ export class GraphQLService { userAgent: req?.get('user-agent'), }); if (args.mode === 'cookie') { - res?.cookie('directus_refresh_token', result.refreshToken, { + res?.cookie(env.REFRESH_TOKEN_COOKIE_NAME, result.refreshToken, { httpOnly: true, domain: env.REFRESH_TOKEN_COOKIE_DOMAIN, maxAge: ms(env.REFRESH_TOKEN_TTL as string), @@ -1407,13 +1707,13 @@ export class GraphQLService { accountability: accountability, schema: this.schema, }); - const currentRefreshToken = args.refresh_token || req?.cookies.directus_refresh_token; + const currentRefreshToken = args.refresh_token || req?.cookies[env.REFRESH_TOKEN_COOKIE_NAME]; if (!currentRefreshToken) { throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`); } const result = await authenticationService.refresh(currentRefreshToken); if (args.mode === 'cookie') { - res?.cookie('directus_refresh_token', result.refreshToken, { + res?.cookie(env.REFRESH_TOKEN_COOKIE_NAME, result.refreshToken, { httpOnly: true, domain: env.REFRESH_TOKEN_COOKIE_DOMAIN, maxAge: ms(env.REFRESH_TOKEN_TTL as string), @@ -1443,7 +1743,7 @@ export class GraphQLService { accountability: accountability, schema: this.schema, }); - const currentRefreshToken = args.refresh_token || req?.cookies.directus_refresh_token; + const currentRefreshToken = args.refresh_token || req?.cookies[env.REFRESH_TOKEN_COOKIE_NAME]; if (!currentRefreshToken) { throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`); } @@ -1467,7 +1767,7 @@ export class GraphQLService { try { await service.requestPasswordReset(args.email, args.reset_url || null); - } catch (err) { + } catch (err: any) { if (err instanceof InvalidPayloadException) { throw err; } @@ -1565,7 +1865,7 @@ export class GraphQLService { string: GraphQLNonNull(GraphQLString), }, resolve: async (_, args) => { - return await argon2.hash(args.string); + return await generateHash(args.string); }, }, utils_hash_verify: { @@ -1609,6 +1909,21 @@ export class GraphQLService { return true; }, }, + utils_cache_clear: { + type: GraphQLVoid, + resolve: async () => { + if (this.accountability?.admin !== true) { + throw new ForbiddenException(); + } + + const { cache, schemaCache } = getCache(); + + await cache?.clear(); + await schemaCache?.clear(); + + return; + }, + }, users_invite_accept: { type: GraphQLBoolean, args: { @@ -2019,6 +2334,7 @@ export class GraphQLService { info.fragments ); const query = this.getQuery(args, selections || [], info.variableValues); + return await service.readOne(this.accountability.user, query); }, }, diff --git a/api/src/services/import.ts b/api/src/services/import.ts index f72203090a..37c8f674cd 100644 --- a/api/src/services/import.ts +++ b/api/src/services/import.ts @@ -1,6 +1,7 @@ import { Knex } from 'knex'; import getDatabase from '../database'; -import { AbstractServiceOptions, Accountability, SchemaOverview } from '../types'; +import { AbstractServiceOptions, SchemaOverview } from '../types'; +import { Accountability } from '@directus/shared/types'; import { ForbiddenException, InvalidPayloadException } from '../exceptions'; import StreamArray from 'stream-json/streamers/StreamArray'; import { ItemsService } from './items'; @@ -103,8 +104,16 @@ export class ImportService { .pipe(csv()) .on('data', (value: Record) => { const obj = transform(value, (result: Record, value, key) => { - if (value.length === 0) delete result[key]; - else set(result, key, value); + if (value.length === 0) { + delete result[key]; + } else { + try { + const parsedJson = JSON.parse(value); + set(result, key, parsedJson); + } catch { + set(result, key, value); + } + } }); saveQueue.push(obj); diff --git a/api/src/services/items.ts b/api/src/services/items.ts index ff63d221a9..979fb448fc 100644 --- a/api/src/services/items.ts +++ b/api/src/services/items.ts @@ -9,10 +9,10 @@ import env from '../env'; import { ForbiddenException, InvalidPayloadException } from '../exceptions'; import { translateDatabaseError } from '../exceptions/database/translate'; import logger from '../logger'; +import { Accountability } from '@directus/shared/types'; import { AbstractService, AbstractServiceOptions, - Accountability, Action, Item as AnyItem, PermissionsAction, @@ -21,7 +21,7 @@ import { SchemaOverview, } from '../types'; import getASTFromQuery from '../utils/get-ast-from-query'; -import { toArray } from '../utils/to-array'; +import { toArray } from '@directus/shared/utils'; import { AuthorizationService } from './authorization'; import { PayloadService } from './payload'; @@ -133,13 +133,15 @@ export class ItemsService implements AbstractSer let primaryKey = payloadWithTypeCasting[primaryKeyField]; try { - await trx.insert(payloadWithTypeCasting).into(this.collection); - } catch (err) { + const result = await trx.insert(payloadWithoutAliases).into(this.collection).returning(primaryKeyField); + primaryKey = primaryKey ?? result[0]; + } catch (err: any) { throw await translateDatabaseError(err); } - // When relying on a database auto-incremented ID, we'll have to fetch it from the DB in - // order to know what the PK is of the just-inserted item + // Most database support returning, those who don't tend to return the PK anyways + // (MySQL/SQLite). In case the primary key isn't know yet, we'll do a best-attempt at + // fetching it based on the last inserted row if (!primaryKey) { // Fetching it with max should be safe, as we're in the context of the current transaction const result = await trx.max(primaryKeyField, { as: 'id' }).from(this.collection).first(); @@ -162,9 +164,7 @@ export class ItemsService implements AbstractSer item: primaryKey, }; - await trx.insert(activityRecord).into('directus_activity'); - - const { id: activityID } = await trx.max('id', { as: 'id ' }).from('directus_activity').first(); + const activityID = (await trx.insert(activityRecord).into('directus_activity').returning('id'))[0] as number; // If revisions are tracked, create revisions record if (this.schema.collections[this.collection].accountability === 'all') { @@ -172,13 +172,11 @@ export class ItemsService implements AbstractSer activity: activityID, collection: this.collection, item: primaryKey, - data: JSON.stringify(payload), - delta: JSON.stringify(payload), + data: await payloadService.prepareDelta(payload), + delta: await payloadService.prepareDelta(payload), }; - await trx.insert(revisionRecord).into('directus_revisions'); - - const { id: revisionID } = await trx.max('id', { as: 'id' }).from('directus_revisions').first(); + const revisionID = (await trx.insert(revisionRecord).into('directus_revisions').returning('id'))[0] as number; // Make sure to set the parent field of the child-revision rows const childrenRevisions = [...revisionsM2O, ...revisionsA2O, ...revisionsO2M]; @@ -279,6 +277,17 @@ export class ItemsService implements AbstractSer throw new ForbiddenException(); } + emitAsyncSafe(`${this.eventScope}.read`, { + event: `${this.eventScope}.read`, + accountability: this.accountability, + collection: this.collection, + query, + action: 'read', + payload: records, + schema: this.schema, + database: getDatabase(), + }); + return records as Item[]; } @@ -306,17 +315,6 @@ export class ItemsService implements AbstractSer throw new ForbiddenException(); } - emitAsyncSafe(`${this.eventScope}.read`, { - event: `${this.eventScope}.read`, - accountability: this.accountability, - collection: this.collection, - item: key, - action: 'read', - payload: results, - schema: this.schema, - database: getDatabase(), - }); - return results[0]; } @@ -344,17 +342,6 @@ export class ItemsService implements AbstractSer const results = await this.readByQuery(queryWithKeys, opts); - emitAsyncSafe(`${this.eventScope}.read`, { - event: `${this.eventScope}.read`, - accountability: this.accountability, - collection: this.collection, - item: keys, - action: 'read', - payload: results, - schema: this.schema, - database: getDatabase(), - }); - return results; } @@ -455,7 +442,7 @@ export class ItemsService implements AbstractSer if (Object.keys(payloadWithTypeCasting).length > 0) { try { await trx(this.collection).update(payloadWithTypeCasting).whereIn(primaryKeyField, keys); - } catch (err) { + } catch (err: any) { throw await translateDatabaseError(err); } } @@ -481,10 +468,7 @@ export class ItemsService implements AbstractSer const activityPrimaryKeys: PrimaryKey[] = []; for (const activityRecord of activityRecords) { - await trx.insert(activityRecord).into('directus_activity'); - const result = await trx.max('id', { as: 'id' }).from('directus_activity').first(); - const primaryKey = result.id; - + const primaryKey = (await trx.insert(activityRecord).into('directus_activity').returning('id'))[0] as number; activityPrimaryKeys.push(primaryKey); } @@ -496,18 +480,28 @@ export class ItemsService implements AbstractSer const snapshots = await itemsService.readMany(keys); - const revisionRecords = activityPrimaryKeys.map((key, index) => ({ - activity: key, - collection: this.collection, - item: keys[index], - data: - snapshots && Array.isArray(snapshots) ? JSON.stringify(snapshots?.[index]) : JSON.stringify(snapshots), - delta: JSON.stringify(payloadWithTypeCasting), - })); + const revisionRecords: { + activity: PrimaryKey; + collection: string; + item: PrimaryKey; + data: string; + delta: string; + }[] = []; + + for (let i = 0; i < activityPrimaryKeys.length; i++) { + revisionRecords.push({ + activity: activityPrimaryKeys[i], + collection: this.collection, + item: keys[i], + data: snapshots && Array.isArray(snapshots) ? JSON.stringify(snapshots[i]) : JSON.stringify(snapshots), + delta: await payloadService.prepareDelta(payloadWithTypeCasting), + }); + } for (let i = 0; i < revisionRecords.length; i++) { - await trx.insert(revisionRecords[i]).into('directus_revisions'); - const { id: revisionID } = await trx.max('id', { as: 'id' }).from('directus_revisions').first(); + const revisionID = ( + await trx.insert(revisionRecords[i]).into('directus_revisions').returning('id') + )[0] as number; if (opts?.onRevisionCreate) { opts.onRevisionCreate(revisionID); diff --git a/api/src/services/mail/index.ts b/api/src/services/mail/index.ts index 19efaf95bd..8a990961fc 100644 --- a/api/src/services/mail/index.ts +++ b/api/src/services/mail/index.ts @@ -6,10 +6,12 @@ import getDatabase from '../../database'; import env from '../../env'; import { InvalidPayloadException } from '../../exceptions'; import logger from '../../logger'; -import { AbstractServiceOptions, Accountability, SchemaOverview } from '../../types'; +import { AbstractServiceOptions, SchemaOverview } from '../../types'; +import { Accountability } from '@directus/shared/types'; import getMailer from '../../mailer'; import { Transporter, SendMailOptions } from 'nodemailer'; import prettier from 'prettier'; +import { Url } from '../../utils/url'; const liquidEngine = new Liquid({ root: [path.resolve(env.EXTENSIONS_PATH, 'templates'), path.resolve(__dirname, 'templates')], @@ -99,16 +101,15 @@ export class MailService { }; function getProjectLogoURL(logoID?: string) { - let projectLogoURL = env.PUBLIC_URL; - if (projectLogoURL.endsWith('/') === false) { - projectLogoURL += '/'; - } + const projectLogoUrl = new Url(env.PUBLIC_URL); + if (logoID) { - projectLogoURL += `assets/${logoID}`; + projectLogoUrl.addPath('assets', logoID); } else { - projectLogoURL += `admin/img/directus-white.png`; + projectLogoUrl.addPath('admin', 'img', 'directus-white.png'); } - return projectLogoURL; + + return projectLogoUrl.toString(); } } } diff --git a/api/src/services/meta.ts b/api/src/services/meta.ts index c7b2ff6df7..d88a099246 100644 --- a/api/src/services/meta.ts +++ b/api/src/services/meta.ts @@ -1,10 +1,11 @@ import { Knex } from 'knex'; import getDatabase from '../database'; import { ForbiddenException } from '../exceptions'; -import { AbstractServiceOptions, Accountability, SchemaOverview } from '../types'; +import { AbstractServiceOptions, SchemaOverview } from '../types'; +import { Accountability } from '@directus/shared/types'; import { Query } from '../types/query'; import { applyFilter, applySearch } from '../utils/apply-query'; -import { parseFilter } from '../utils/parse-filter'; +import { parseFilter } from '@directus/shared/utils'; export class MetaService { knex: Knex; diff --git a/api/src/services/payload.ts b/api/src/services/payload.ts index 63bd7c6bed..ae0e59b4be 100644 --- a/api/src/services/payload.ts +++ b/api/src/services/payload.ts @@ -1,14 +1,19 @@ -import argon2 from 'argon2'; -import { format, formatISO, parse, parseISO } from 'date-fns'; +import { format, parseISO } from 'date-fns'; import Joi from 'joi'; import { Knex } from 'knex'; -import { clone, cloneDeep, isObject, isPlainObject, omit } from 'lodash'; +import { clone, cloneDeep, isObject, isPlainObject, omit, isNil } from 'lodash'; import { v4 as uuidv4 } from 'uuid'; import getDatabase from '../database'; import { ForbiddenException, InvalidPayloadException } from '../exceptions'; -import { AbstractServiceOptions, Accountability, Item, PrimaryKey, Query, SchemaOverview } from '../types'; -import { toArray } from '../utils/to-array'; +import { AbstractServiceOptions, Item, PrimaryKey, Query, SchemaOverview, Alterations } from '../types'; +import { Accountability } from '@directus/shared/types'; +import { toArray } from '@directus/shared/utils'; import { ItemsService } from './items'; +import { unflatten } from 'flat'; +import { isNativeGeometry } from '../utils/geometry'; +import { getGeometryHelper } from '../database/helpers/geometry'; +import { parse as wktToGeoJSON } from 'wellknown'; +import { generateHash } from '../utils/generate-hash'; type Action = 'create' | 'read' | 'update'; @@ -18,19 +23,10 @@ type Transformers = { value: any; payload: Partial; accountability: Accountability | null; + specials: string[]; }) => Promise; }; -type Alterations = { - create: { - [key: string]: any; - }[]; - update: { - [key: string]: any; - }[]; - delete: (number | string)[]; -}; - /** * Process a given payload for a collection to ensure the special fields (hash, uuid, date etc) are * handled correctly. @@ -50,19 +46,11 @@ export class PayloadService { return this; } - /** - * @todo allow this to be extended - * - * @todo allow these extended special types to have "field dependencies"? - * f.e. the file-links transformer needs the id and filename_download to be fetched from the DB - * in order to work - */ public transformers: Transformers = { async hash({ action, value }) { if (!value) return; - if (action === 'create' || action === 'update') { - return await argon2.hash(String(value)); + return await generateHash(String(value)); } return value; @@ -124,8 +112,7 @@ export class PayloadService { }, async csv({ action, value }) { if (!value) return; - if (action === 'read') return value.split(','); - + if (action === 'read' && Array.isArray(value) === false) return value.split(','); if (Array.isArray(value)) return value.join(','); return value; }, @@ -137,7 +124,7 @@ export class PayloadService { action: Action, payload: Partial | Partial[] ): Promise | Partial[]> { - const processedPayload = toArray(payload); + let processedPayload = toArray(payload); if (processedPayload.length === 0) return []; @@ -164,18 +151,23 @@ export class PayloadService { }) ); - await this.processDates(processedPayload, action); + this.processGeometries(processedPayload, action); + this.processDates(processedPayload, action); if (['create', 'update'].includes(action)) { processedPayload.forEach((record) => { for (const [key, value] of Object.entries(record)) { - if (Array.isArray(value) || (typeof value === 'object' && value instanceof Date !== true && value !== null)) { - record[key] = JSON.stringify(value); + if (Array.isArray(value) || (typeof value === 'object' && !(value instanceof Date) && value !== null)) { + if (!value.isRawInstance) { + record[key] = JSON.stringify(value); + } } } }); } + processedPayload = processedPayload.map((item: Record) => unflatten(item, { delimiter: '->' })); + if (Array.isArray(payload)) { return processedPayload; } @@ -201,6 +193,7 @@ export class PayloadService { value, payload, accountability, + specials: fieldSpecials, }); } } @@ -208,25 +201,55 @@ export class PayloadService { return value; } + /** + * Native geometries are stored in custom binary format. We need to insert them with + * the function st_geomfromtext. For this to work, that function call must not be + * escaped. It's therefore placed as a Knex.Raw object in the payload. Thus the need + * to check if the value is a raw instance before stringifying it in the next step. + */ + processGeometries>[]>(payloads: T, action: Action): T { + const helper = getGeometryHelper(); + + const process = + action == 'read' + ? (value: any) => { + if (typeof value === 'string') return wktToGeoJSON(value); + } + : (value: any) => helper.fromGeoJSON(typeof value == 'string' ? JSON.parse(value) : value); + + const fieldsInCollection = Object.entries(this.schema.collections[this.collection].fields); + const geometryColumns = fieldsInCollection.filter(([_, field]) => isNativeGeometry(field)); + + for (const [name] of geometryColumns) { + for (const payload of payloads) { + if (payload[name]) { + payload[name] = process(payload[name]); + } + } + } + + return payloads; + } /** * Knex returns `datetime` and `date` columns as Date.. This is wrong for date / datetime, as those * shouldn't return with time / timezone info respectively */ - async processDates( - payloads: Partial>[], - action: Action - ): Promise>[]> { + processDates(payloads: Partial>[], action: Action): Partial>[] { const fieldsInCollection = Object.entries(this.schema.collections[this.collection].fields); const dateColumns = fieldsInCollection.filter(([_name, field]) => ['dateTime', 'date', 'timestamp'].includes(field.type) ); - if (dateColumns.length === 0) return payloads; + const timeColumns = fieldsInCollection.filter(([_name, field]) => { + return field.type === 'time'; + }); + + if (dateColumns.length === 0 && timeColumns.length === 0) return payloads; for (const [name, dateColumn] of dateColumns) { for (const payload of payloads) { - let value = payload[name]; + let value: number | string | Date = payload[name]; if (value === null || value === '0000-00-00') { payload[name] = null; @@ -236,32 +259,54 @@ export class PayloadService { if (!value) continue; if (action === 'read') { - if (typeof value === 'string') value = new Date(value); + if (typeof value === 'number' || typeof value === 'string') { + value = new Date(value); + } if (dateColumn.type === 'timestamp') { - const newValue = formatISO(value); + const newValue = value.toISOString(); payload[name] = newValue; } if (dateColumn.type === 'dateTime') { - // Strip off the Z at the end of a non-timezone datetime value - const newValue = format(value, "yyyy-MM-dd'T'HH:mm:ss"); + const year = String(value.getUTCFullYear()); + const month = String(value.getUTCMonth() + 1).padStart(2, '0'); + const date = String(value.getUTCDate()).padStart(2, '0'); + const hours = String(value.getUTCHours()).padStart(2, '0'); + const minutes = String(value.getUTCMinutes()).padStart(2, '0'); + const seconds = String(value.getUTCSeconds()).padStart(2, '0'); + + const newValue = `${year}-${month}-${date}T${hours}:${minutes}:${seconds}`; payload[name] = newValue; } if (dateColumn.type === 'date') { + const [year, month, day] = value.toISOString().substr(0, 10).split('-'); + // Strip off the time / timezone information from a date-only value - const newValue = format(value, 'yyyy-MM-dd'); + const newValue = `${year}-${month}-${day}`; payload[name] = newValue; } } else { - if (value instanceof Date === false) { + if (value instanceof Date === false && typeof value === 'string') { if (dateColumn.type === 'date') { - const newValue = parse(value, 'yyyy-MM-dd', new Date()); - payload[name] = newValue; + const [date] = value.split('T'); + const [year, month, day] = date.split('-'); + + payload[name] = new Date(Date.UTC(Number(year), Number(month) - 1, Number(day))); } - if (dateColumn.type === 'timestamp' || dateColumn.type === 'dateTime') { + if (dateColumn.type === 'dateTime') { + const [date, time] = value.split('T'); + const [year, month, day] = date.split('-'); + const [hours, minutes, seconds] = time.substring(0, 8).split(':'); + + payload[name] = new Date( + Date.UTC(Number(year), Number(month) - 1, Number(day), Number(hours), Number(minutes), Number(seconds)) + ); + } + + if (dateColumn.type === 'timestamp') { const newValue = parseISO(value); payload[name] = newValue; } @@ -270,6 +315,22 @@ export class PayloadService { } } + /** + * Some DB drivers (MS SQL f.e.) return time values as Date objects. For consistencies sake, + * we'll abstract those back to hh:mm:ss + */ + for (const [name] of timeColumns) { + for (const payload of payloads) { + const value = payload[name]; + + if (!value) continue; + + if (action === 'read') { + if (value instanceof Date) payload[name] = format(value, 'HH:mm:ss'); + } + } + } + return payloads; } @@ -318,6 +379,9 @@ export class PayloadService { const relatedPrimary = this.schema.collections[relatedCollection].primary; const relatedRecord: Partial = payload[relation.field]; + + if (['string', 'number'].includes(typeof relatedRecord)) continue; + const hasPrimaryKey = relatedPrimary in relatedRecord; let relatedPrimaryKey: PrimaryKey = relatedRecord[relatedPrimary]; @@ -455,7 +519,8 @@ export class PayloadService { schema: this.schema, }); - const relatedRecords: Partial[] = []; + const recordsToUpsert: Partial[] = []; + const savedPrimaryKeys: PrimaryKey[] = []; // Nested array of individual items if (Array.isArray(payload[relation.meta!.one_field!])) { @@ -465,30 +530,47 @@ export class PayloadService { let record = cloneDeep(relatedRecord); if (typeof relatedRecord === 'string' || typeof relatedRecord === 'number') { - const exists = !!(await this.knex - .select(relatedPrimaryKeyField) + const existingRecord = await this.knex + .select(relatedPrimaryKeyField, relation.field) .from(relation.collection) .where({ [relatedPrimaryKeyField]: record }) - .first()); + .first(); - if (exists === false) { + if (!!existingRecord === false) { throw new ForbiddenException(); } + // If the related item is already associated to the current item, and there's no + // other updates (which is indicated by the fact that this is just the PK, we can + // ignore updating this item. This makes sure we don't trigger any update logic + // for items that aren't actually being updated. NOTE: We use == here, as the + // primary key might be reported as a string instead of number, coming from the + // http route, and or a bigInteger in the DB + if ( + isNil(existingRecord[relation.field]) === false && + (existingRecord[relation.field] == parent || + existingRecord[relation.field] == payload[currentPrimaryKeyField]) + ) { + savedPrimaryKeys.push(existingRecord[relatedPrimaryKeyField]); + continue; + } + record = { [relatedPrimaryKeyField]: relatedRecord, }; } - relatedRecords.push({ + recordsToUpsert.push({ ...record, [relation.field]: parent || payload[currentPrimaryKeyField], }); } - const savedPrimaryKeys = await itemsService.upsertMany(relatedRecords, { - onRevisionCreate: (id) => revisions.push(id), - }); + savedPrimaryKeys.push( + ...(await itemsService.upsertMany(recordsToUpsert, { + onRevisionCreate: (id) => revisions.push(id), + })) + ); const query: Query = { filter: { @@ -540,7 +622,7 @@ export class PayloadService { } if (alterations.update) { - const primaryKeyField = this.schema.collections[this.collection].primary; + const primaryKeyField = this.schema.collections[relation.collection].primary; for (const item of alterations.update) { await itemsService.updateOne( @@ -591,4 +673,22 @@ export class PayloadService { return { revisions }; } + + /** + * Transforms the input partial payload to match the output structure, to have consistency + * between delta and data + */ + async prepareDelta(data: Partial): Promise { + let payload = cloneDeep(data); + + for (const key in payload) { + if (payload[key]?.isRawInstance) { + payload[key] = payload[key].bindings[0]; + } + } + + payload = await this.processValues('read', payload); + + return JSON.stringify(payload); + } } diff --git a/api/src/services/relations.ts b/api/src/services/relations.ts index 1aa0523e33..636e5336da 100644 --- a/api/src/services/relations.ts +++ b/api/src/services/relations.ts @@ -1,8 +1,9 @@ import { Knex } from 'knex'; import { systemRelationRows } from '../database/system-data/relations'; import { ForbiddenException, InvalidPayloadException } from '../exceptions'; -import { AbstractServiceOptions, SchemaOverview, Query, Relation, RelationMeta, Accountability } from '../types'; -import { toArray } from '../utils/to-array'; +import { AbstractServiceOptions, SchemaOverview, Query, Relation, RelationMeta } from '../types'; +import { Accountability } from '@directus/shared/types'; +import { toArray } from '@directus/shared/utils'; import { ItemsService, QueryOptions } from './items'; import { PermissionsService } from './permissions'; import SchemaInspector from '@directus/schema'; @@ -142,6 +143,10 @@ export class RelationsService { ); } + if (relation.related_collection && relation.related_collection in this.schema.collections === false) { + throw new InvalidPayloadException(`Collection "${relation.related_collection}" doesn't exist`); + } + const existingRelation = this.schema.relations.find( (existingRelation) => existingRelation.collection === relation.collection && existingRelation.field === relation.field diff --git a/api/src/services/roles.ts b/api/src/services/roles.ts index 57f7203f7c..eff48f77b4 100644 --- a/api/src/services/roles.ts +++ b/api/src/services/roles.ts @@ -1,6 +1,6 @@ -import { UnprocessableEntityException } from '../exceptions'; -import { AbstractServiceOptions, PrimaryKey } from '../types'; -import { ItemsService } from './items'; +import { ForbiddenException, UnprocessableEntityException } from '../exceptions'; +import { AbstractServiceOptions, PrimaryKey, Query, Alterations, Item } from '../types'; +import { ItemsService, MutationOptions } from './items'; import { PermissionsService } from './permissions'; import { PresetsService } from './presets'; import { UsersService } from './users'; @@ -10,21 +10,89 @@ export class RolesService extends ItemsService { super('directus_roles', options); } + private async checkForOtherAdminRoles(excludeKeys: PrimaryKey[]): Promise { + // Make sure there's at least one admin role left after this deletion is done + const otherAdminRoles = await this.knex + .count('*', { as: 'count' }) + .from('directus_roles') + .whereNotIn('id', excludeKeys) + .andWhere({ admin_access: true }) + .first(); + + const otherAdminRolesCount = +(otherAdminRoles?.count || 0); + if (otherAdminRolesCount === 0) throw new UnprocessableEntityException(`You can't delete the last admin role.`); + } + + private async checkForOtherAdminUsers(key: PrimaryKey, users: Alterations | Item[]): Promise { + const role = await this.knex.select('admin_access').from('directus_roles').where('id', '=', key).first(); + + if (!role) throw new ForbiddenException(); + + // The users that will now be in this new non-admin role + let userKeys: PrimaryKey[] = []; + + if (Array.isArray(users)) { + userKeys = users.map((user) => (typeof user === 'string' ? user : user.id)).filter((id) => id); + } else { + userKeys = users.update.map((user) => user.id).filter((id) => id); + } + + const usersThatWereInRoleBefore = (await this.knex.select('id').from('directus_users').where('role', '=', key)).map( + (user) => user.id + ); + const usersThatAreRemoved = usersThatWereInRoleBefore.filter((id) => userKeys.includes(id) === false); + + const usersThatAreAdded = Array.isArray(users) ? users : users.create; + + // If the role the users are moved to is an admin-role, and there's at least 1 (new) admin + // user, we don't have to check for other admin + // users + if ((role.admin_access === true || role.admin_access === 1) && usersThatAreAdded.length > 0) return; + + const otherAdminUsers = await this.knex + .count('*', { as: 'count' }) + .from('directus_users') + .whereNotIn('directus_users.id', [...userKeys, ...usersThatAreRemoved]) + .andWhere({ 'directus_roles.admin_access': true }) + .leftJoin('directus_roles', 'directus_users.role', 'directus_roles.id') + .first(); + + const otherAdminUsersCount = +(otherAdminUsers?.count || 0); + + if (otherAdminUsersCount === 0) { + throw new UnprocessableEntityException(`You can't remove the last admin user from the admin role.`); + } + + return; + } + + async updateOne(key: PrimaryKey, data: Record, opts?: MutationOptions): Promise { + if ('admin_access' in data && data.admin_access === false) { + await this.checkForOtherAdminRoles([key]); + } + + if ('users' in data) { + await this.checkForOtherAdminUsers(key, data.users); + } + + return super.updateOne(key, data, opts); + } + + async updateMany(keys: PrimaryKey[], data: Record, opts?: MutationOptions): Promise { + if ('admin_access' in data && data.admin_access === false) { + await this.checkForOtherAdminRoles(keys); + } + + return super.updateMany(keys, data, opts); + } + async deleteOne(key: PrimaryKey): Promise { await this.deleteMany([key]); return key; } async deleteMany(keys: PrimaryKey[]): Promise { - // Make sure there's at least one admin role left after this deletion is done - const otherAdminRoles = await this.knex - .count('*', { as: 'count' }) - .from('directus_roles') - .whereNotIn('id', keys) - .andWhere({ admin_access: true }) - .first(); - const otherAdminRolesCount = +(otherAdminRoles?.count || 0); - if (otherAdminRolesCount === 0) throw new UnprocessableEntityException(`You can't delete the last admin role.`); + await this.checkForOtherAdminRoles(keys); await this.knex.transaction(async (trx) => { const itemsService = new ItemsService('directus_roles', { @@ -77,6 +145,10 @@ export class RolesService extends ItemsService { return keys; } + deleteByQuery(query: Query, opts?: MutationOptions): Promise { + return super.deleteByQuery(query, opts); + } + /** * @deprecated Use `deleteOne` or `deleteMany` instead */ diff --git a/api/src/services/server.ts b/api/src/services/server.ts index ef81579ce5..c371ddf447 100644 --- a/api/src/services/server.ts +++ b/api/src/services/server.ts @@ -12,8 +12,9 @@ import env from '../env'; import logger from '../logger'; import { rateLimiter } from '../middleware/rate-limiter'; import storage from '../storage'; -import { AbstractServiceOptions, Accountability, SchemaOverview } from '../types'; -import { toArray } from '../utils/to-array'; +import { AbstractServiceOptions, SchemaOverview } from '../types'; +import { Accountability } from '@directus/shared/types'; +import { toArray } from '@directus/shared/utils'; import getMailer from '../mailer'; import { SettingsService } from './settings'; @@ -208,7 +209,7 @@ export class ServerService { try { await cache!.set(`health-${checkID}`, true, 5); await cache!.delete(`health-${checkID}`); - } catch (err) { + } catch (err: any) { checks['cache:responseTime'][0].status = 'error'; checks['cache:responseTime'][0].output = err; } finally { @@ -248,7 +249,7 @@ export class ServerService { try { await rateLimiter.consume(`health-${checkID}`, 1); await rateLimiter.delete(`health-${checkID}`); - } catch (err) { + } catch (err: any) { checks['rateLimiter:responseTime'][0].status = 'error'; checks['rateLimiter:responseTime'][0].output = err; } finally { @@ -288,7 +289,7 @@ export class ServerService { await disk.put(`health-${checkID}`, 'check'); await disk.get(`health-${checkID}`); await disk.delete(`health-${checkID}`); - } catch (err) { + } catch (err: any) { checks[`storage:${location}:responseTime`][0].status = 'error'; checks[`storage:${location}:responseTime`][0].output = err; } finally { @@ -322,7 +323,7 @@ export class ServerService { try { await mailer.verify(); - } catch (err) { + } catch (err: any) { checks['email:connection'][0].status = 'error'; checks['email:connection'][0].output = err; } diff --git a/api/src/services/specifications.ts b/api/src/services/specifications.ts index c4d21e60ed..a3d1be4107 100644 --- a/api/src/services/specifications.ts +++ b/api/src/services/specifications.ts @@ -7,16 +7,8 @@ import { OpenAPIObject, OperationObject, PathItemObject, SchemaObject, TagObject import { version } from '../../package.json'; import getDatabase from '../database'; import env from '../env'; -import { - AbstractServiceOptions, - Accountability, - Collection, - Field, - Permission, - Relation, - SchemaOverview, - types, -} from '../types'; +import { AbstractServiceOptions, Collection, Permission, Relation, SchemaOverview } from '../types'; +import { Accountability, Field, Type } from '@directus/shared/types'; import { getRelationType } from '../utils/get-relation-type'; import { CollectionsService } from './collections'; import { FieldsService } from './fields'; @@ -459,20 +451,33 @@ class OASSpecsService implements SpecificationSubService { } private fieldTypes: Record< - typeof types[number], + Type, { type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'integer' | 'null' | undefined; format?: string; items?: any; } > = { + alias: { + type: 'string', + }, bigInteger: { type: 'integer', format: 'int64', }, + binary: { + type: 'string', + format: 'binary', + }, boolean: { type: 'boolean', }, + csv: { + type: 'array', + items: { + type: 'string', + }, + }, date: { type: 'string', format: 'date', @@ -488,6 +493,9 @@ class OASSpecsService implements SpecificationSubService { type: 'number', format: 'float', }, + hash: { + type: 'string', + }, integer: { type: 'integer', }, @@ -511,21 +519,14 @@ class OASSpecsService implements SpecificationSubService { type: 'string', format: 'timestamp', }, - binary: { - type: 'string', - format: 'binary', + unknown: { + type: undefined, }, uuid: { type: 'string', format: 'uuid', }, - csv: { - type: 'array', - items: { - type: 'string', - }, - }, - hash: { + geometry: { type: 'string', }, }; diff --git a/api/src/services/users.ts b/api/src/services/users.ts index cc80d95992..ffbd864af4 100644 --- a/api/src/services/users.ts +++ b/api/src/services/users.ts @@ -1,11 +1,10 @@ -import argon2 from 'argon2'; import jwt from 'jsonwebtoken'; import { Knex } from 'knex'; -import { clone } from 'lodash'; +import { clone, cloneDeep } from 'lodash'; import getDatabase from '../database'; import env from '../env'; +import { FailedValidationException } from '@directus/shared/exceptions'; import { - FailedValidationException, ForbiddenException, InvalidPayloadException, UnprocessableEntityException, @@ -13,13 +12,18 @@ import { } from '../exceptions'; import { RecordNotUniqueException } from '../exceptions/database/record-not-unique'; import logger from '../logger'; -import { AbstractServiceOptions, Accountability, Item, PrimaryKey, Query, SchemaOverview } from '../types'; +import { AbstractServiceOptions, Item, PrimaryKey, Query, SchemaOverview } from '../types'; +import { Accountability } from '@directus/shared/types'; import isUrlAllowed from '../utils/is-url-allowed'; -import { toArray } from '../utils/to-array'; +import { toArray } from '@directus/shared/utils'; +import { Url } from '../utils/url'; import { AuthenticationService } from './authentication'; +import { generateHash } from '../utils/generate-hash'; import { ItemsService, MutationOptions } from './items'; import { MailService } from './mail'; import { SettingsService } from './settings'; +import { stall } from '../utils/stall'; +import { performance } from 'perf_hooks'; export class UsersService extends ItemsService { knex: Knex; @@ -99,12 +103,33 @@ export class UsersService extends ItemsService { return true; } + private async checkRemainingAdminExistence(excludeKeys: PrimaryKey[]) { + // Make sure there's at least one admin user left after this deletion is done + const otherAdminUsers = await this.knex + .count('*', { as: 'count' }) + .from('directus_users') + .whereNotIn('directus_users.id', excludeKeys) + .andWhere({ 'directus_roles.admin_access': true }) + .leftJoin('directus_roles', 'directus_users.role', 'directus_roles.id') + .first(); + + const otherAdminUsersCount = +(otherAdminUsers?.count || 0); + + if (otherAdminUsersCount === 0) { + throw new UnprocessableEntityException(`You can't remove the last admin user from the role.`); + } + } + /** * Create a new user */ async createOne(data: Partial, opts?: MutationOptions): Promise { - const email = data.email.toLowerCase(); - await this.checkUniqueEmails([email]); + const email = data.email?.toLowerCase(); + + if (email) { + await this.checkUniqueEmails([email]); + } + return await this.service.createOne(data, opts); } @@ -129,6 +154,14 @@ export class UsersService extends ItemsService { } async updateOne(key: PrimaryKey, data: Partial, opts?: MutationOptions): Promise { + if (data.role) { + const newRole = await this.knex.select('admin_access').from('directus_roles').where('id', data.role).first(); + + if (newRole && !newRole.admin_access) { + await this.checkRemainingAdminExistence([key]); + } + } + const email = data.email?.toLowerCase(); if (email) { @@ -147,6 +180,14 @@ export class UsersService extends ItemsService { } async updateMany(keys: PrimaryKey[], data: Partial, opts?: MutationOptions): Promise { + if (data.role) { + const newRole = await this.knex.select('admin_access').from('directus_roles').where('id', data.role).first(); + + if (newRole && !newRole.admin_access) { + await this.checkRemainingAdminExistence(keys); + } + } + const email = data.email?.toLowerCase(); if (email) { @@ -165,6 +206,29 @@ export class UsersService extends ItemsService { } async updateByQuery(query: Query, data: Partial, opts?: MutationOptions): Promise { + if (data.role) { + const newRole = await this.knex.select('admin_access').from('directus_roles').where('id', data.role).first(); + + if (newRole && !newRole.admin_access) { + // This is duplicated a touch, but we need to know the keys first + // Not authenticated: + const itemsService = new ItemsService('directus_users', { + knex: this.knex, + schema: this.schema, + }); + + const readQuery = cloneDeep(query); + readQuery.fields = ['id']; + + // We read the IDs of the items based on the query, and then run `updateMany`. `updateMany` does it's own + // permissions check for the keys, so we don't have to make this an authenticated read + const itemsToUpdate = await itemsService.readByQuery(readQuery); + const keys = itemsToUpdate.map((item) => item.id); + + await this.checkRemainingAdminExistence(keys); + } + } + const email = data.email?.toLowerCase(); if (email) { @@ -183,20 +247,7 @@ export class UsersService extends ItemsService { } async deleteOne(key: PrimaryKey, opts?: MutationOptions): Promise { - // Make sure there's at least one admin user left after this deletion is done - const otherAdminUsers = await this.knex - .count('*', { as: 'count' }) - .from('directus_users') - .whereNot('directus_users.id', key) - .andWhere({ 'directus_roles.admin_access': true }) - .leftJoin('directus_roles', 'directus_users.role', 'directus_roles.id') - .first(); - - const otherAdminUsersCount = +(otherAdminUsers?.count || 0); - - if (otherAdminUsersCount === 0) { - throw new UnprocessableEntityException(`You can't delete the last admin user.`); - } + await this.checkRemainingAdminExistence([key]); await this.service.deleteOne(key, opts); @@ -204,26 +255,32 @@ export class UsersService extends ItemsService { } async deleteMany(keys: PrimaryKey[], opts?: MutationOptions): Promise { - // Make sure there's at least one admin user left after this deletion is done - const otherAdminUsers = await this.knex - .count('*', { as: 'count' }) - .from('directus_users') - .whereNotIn('directus_users.id', keys) - .andWhere({ 'directus_roles.admin_access': true }) - .leftJoin('directus_roles', 'directus_users.role', 'directus_roles.id') - .first(); - - const otherAdminUsersCount = +(otherAdminUsers?.count || 0); - - if (otherAdminUsersCount === 0) { - throw new UnprocessableEntityException(`You can't delete the last admin user.`); - } + await this.checkRemainingAdminExistence(keys); await this.service.deleteMany(keys, opts); return keys; } + async deleteByQuery(query: Query, opts?: MutationOptions): Promise { + const primaryKeyField = this.schema.collections[this.collection].primary; + const readQuery = cloneDeep(query); + readQuery.fields = [primaryKeyField]; + + // Not authenticated: + const itemsService = new ItemsService(this.collection, { + knex: this.knex, + schema: this.schema, + }); + + const itemsToDelete = await itemsService.readByQuery(readQuery); + const keys: PrimaryKey[] = itemsToDelete.map((item: Item) => item[primaryKeyField]); + + if (keys.length === 0) return []; + + return await this.deleteMany(keys, opts); + } + async inviteUser(email: string | string[], role: string, url: string | null, subject?: string | null): Promise { const emails = toArray(email); @@ -248,10 +305,10 @@ export class UsersService extends ItemsService { await service.createOne({ email, role, status: 'invited' }); const payload = { email, scope: 'invite' }; - const token = jwt.sign(payload, env.SECRET as string, { expiresIn: '7d' }); - const inviteURL = url ?? env.PUBLIC_URL + '/admin/accept-invite'; - const acceptURL = inviteURL + '?token=' + token; - const subjectLine = subject ? subject : "You've been invited"; + const token = jwt.sign(payload, env.SECRET as string, { expiresIn: '7d', issuer: 'directus' }); + const subjectLine = subject ?? "You've been invited"; + const inviteURL = url ? new Url(url) : new Url(env.PUBLIC_URL).addPath('admin', 'accept-invite'); + inviteURL.setQuery('token', token); await mailService.send({ to: email, @@ -259,7 +316,7 @@ export class UsersService extends ItemsService { template: { name: 'user-invitation', data: { - url: acceptURL, + url: inviteURL.toString(), email, }, }, @@ -269,7 +326,7 @@ export class UsersService extends ItemsService { } async acceptInvite(token: string, password: string): Promise { - const { email, scope } = jwt.verify(token, env.SECRET as string) as { + const { email, scope } = jwt.verify(token, env.SECRET as string, { issuer: 'directus' }) as { email: string; scope: string; }; @@ -282,7 +339,7 @@ export class UsersService extends ItemsService { throw new InvalidPayloadException(`Email address ${email} hasn't been invited.`); } - const passwordHashed = await argon2.hash(password); + const passwordHashed = generateHash(password); await this.knex('directus_users').update({ password: passwordHashed, status: 'active' }).where({ id: user.id }); @@ -292,8 +349,14 @@ export class UsersService extends ItemsService { } async requestPasswordReset(email: string, url: string | null, subject?: string | null): Promise { + const STALL_TIME = 500; + const timeStart = performance.now(); + const user = await this.knex.select('id').from('directus_users').where({ email }).first(); - if (!user) throw new ForbiddenException(); + if (!user) { + await stall(STALL_TIME, timeStart); + throw new ForbiddenException(); + } const mailService = new MailService({ schema: this.schema, @@ -302,7 +365,7 @@ export class UsersService extends ItemsService { }); const payload = { email, scope: 'password-reset' }; - const token = jwt.sign(payload, env.SECRET as string, { expiresIn: '1d' }); + const token = jwt.sign(payload, env.SECRET as string, { expiresIn: '1d', issuer: 'directus' }); if (url && isUrlAllowed(url, env.PASSWORD_RESET_URL_ALLOW_LIST) === false) { throw new InvalidPayloadException(`Url "${url}" can't be used to reset passwords.`); @@ -322,10 +385,12 @@ export class UsersService extends ItemsService { }, }, }); + + await stall(STALL_TIME, timeStart); } async resetPassword(token: string, password: string): Promise { - const { email, scope } = jwt.verify(token, env.SECRET as string) as { + const { email, scope } = jwt.verify(token, env.SECRET as string, { issuer: 'directus' }) as { email: string; scope: string; }; @@ -338,7 +403,7 @@ export class UsersService extends ItemsService { throw new ForbiddenException(); } - const passwordHashed = await argon2.hash(password); + const passwordHashed = await generateHash(password); await this.knex('directus_users').update({ password: passwordHashed, status: 'active' }).where({ id: user.id }); diff --git a/api/src/services/utils.ts b/api/src/services/utils.ts index d7a8cf6806..4fbb6b4fbe 100644 --- a/api/src/services/utils.ts +++ b/api/src/services/utils.ts @@ -2,7 +2,8 @@ import { Knex } from 'knex'; import getDatabase from '../database'; import { systemCollectionRows } from '../database/system-data/collections'; import { ForbiddenException, InvalidPayloadException } from '../exceptions'; -import { AbstractServiceOptions, Accountability, PrimaryKey, SchemaOverview } from '../types'; +import { AbstractServiceOptions, PrimaryKey, SchemaOverview } from '../types'; +import { Accountability } from '@directus/shared/types'; export class UtilsService { knex: Knex; diff --git a/api/src/start.ts b/api/src/start.ts index 392567b8ca..9079617f2d 100644 --- a/api/src/start.ts +++ b/api/src/start.ts @@ -1,6 +1,8 @@ import emitter, { emitAsyncSafe } from './emitter'; import env from './env'; import logger from './logger'; +import checkForUpdate from 'update-check'; +import pkg from '../package.json'; // If this file is called directly using node, start the server if (require.main === module) { @@ -18,7 +20,17 @@ export default async function start(): Promise { server .listen(port, () => { - logger.info(`Server started at port ${port}`); + checkForUpdate(pkg) + .then((update) => { + if (update) { + logger.warn(`Update available: ${pkg.version} -> ${update.latest}`); + } + }) + .catch(() => { + // No need to log/warn here. The update message is only an informative nice-to-have + }); + + logger.info(`Server started at http://localhost:${port}`); emitAsyncSafe('server.start'); }) .once('error', (err: any) => { diff --git a/api/src/storage.ts b/api/src/storage.ts index 6864e690c3..f398714abe 100644 --- a/api/src/storage.ts +++ b/api/src/storage.ts @@ -4,7 +4,7 @@ import { GoogleCloudStorage } from '@directus/drive-gcs'; import { AmazonWebServicesS3Storage } from '@directus/drive-s3'; import env from './env'; import { getConfigFromEnv } from './utils/get-config-from-env'; -import { toArray } from './utils/to-array'; +import { toArray } from '@directus/shared/utils'; import { validateEnv } from './utils/validate-env'; validateEnv(['STORAGE_LOCATIONS']); diff --git a/api/src/types/assets.ts b/api/src/types/assets.ts index 8fa6718c59..b97c495527 100644 --- a/api/src/types/assets.ts +++ b/api/src/types/assets.ts @@ -1,10 +1,84 @@ -export type Transformation = { +import { ResizeOptions, Sharp } from 'sharp'; + +// List of allowed sharp methods to expose. +// +// This is a literal, so we can use it to validate request parameters. +export const TransformationMethods /*: readonly (keyof Sharp)[]*/ = [ + // Output options + // https://sharp.pixelplumbing.com/api-output + 'toFormat', + 'jpeg', + 'png', + 'tiff', + 'webp', + + // Resizing + // https://sharp.pixelplumbing.com/api-resize + 'resize', + 'extend', + 'extract', + 'trim', + + // Image operations + // https://sharp.pixelplumbing.com/api-operation + 'rotate', + 'flip', + 'flop', + 'sharpen', + 'median', + 'blur', + 'flatten', + 'gamma', + 'negate', + 'normalise', + 'normalize', + 'clahe', + 'convolve', + 'threshold', + 'linear', + 'recomb', + 'modulate', + + // Color manipulation + // https://sharp.pixelplumbing.com/api-colour + 'tint', + 'greyscale', + 'grayscale', + 'toColorspace', + 'toColourspace', + + // Channel manipulation + // https://sharp.pixelplumbing.com/api-channel + 'removeAlpha', + 'ensureAlpha', + 'extractChannel', + 'bandbool', +] as const; + +// Helper types +type AllowedSharpMethods = Pick; + +export type TransformationMap = { + [M in keyof AllowedSharpMethods]: readonly [M, ...Parameters]; +}; + +export type Transformation = TransformationMap[keyof TransformationMap]; + +export type TransformationParams = { key?: string; - width?: number; // width - height?: number; // height - fit?: 'cover' | 'contain' | 'inside' | 'outside'; // fit - withoutEnlargement?: boolean; // Without Enlargement + transforms?: Transformation[]; +}; + +// Transformation preset is defined in the admin UI. +export type TransformationPreset = TransformationPresetFormat & + TransformationPresetResize & + TransformationParams & { key: string }; + +export type TransformationPresetFormat = { + format?: 'jpg' | 'jpeg' | 'png' | 'webp' | 'tiff'; quality?: number; }; -// @NOTE Keys used in Transformation should match ASSET_GENERATION_QUERY_KEYS in constants.ts +export type TransformationPresetResize = Pick; + +// @NOTE Keys used in TransformationParams should match ASSET_GENERATION_QUERY_KEYS in constants.ts diff --git a/api/src/types/ast.ts b/api/src/types/ast.ts index 3ab3b302a5..75b32e44dc 100644 --- a/api/src/types/ast.ts +++ b/api/src/types/ast.ts @@ -45,6 +45,7 @@ export type NestedCollectionNode = M2ONode | O2MNode | M2ANode; export type FieldNode = { type: 'field'; name: string; + fieldKey: string; }; export type AST = { diff --git a/api/src/types/collection.ts b/api/src/types/collection.ts index e589efb85e..37362d1789 100644 --- a/api/src/types/collection.ts +++ b/api/src/types/collection.ts @@ -1,5 +1,5 @@ import { Table } from 'knex-schema-inspector/dist/types/table'; -import { Field } from './field'; +import { Field } from '@directus/shared/types'; export type CollectionMeta = { collection: string; diff --git a/api/src/types/deep-partial.d.ts b/api/src/types/deep-partial.d.ts deleted file mode 100644 index bc66618da4..0000000000 --- a/api/src/types/deep-partial.d.ts +++ /dev/null @@ -1,40 +0,0 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ -/* eslint-disable @typescript-eslint/ban-types */ - -type Primitive = string | number | boolean | bigint | symbol | undefined | null; -type Builtin = Primitive | Function | Date | Error | RegExp; -type IsTuple = T extends [infer A] - ? T - : T extends [infer A, infer B] - ? T - : T extends [infer A, infer B, infer C] - ? T - : T extends [infer A, infer B, infer C, infer D] - ? T - : T extends [infer A, infer B, infer C, infer D, infer E] - ? T - : never; - -type DeepPartial = T extends Primitive | Builtin - ? T - : T extends Map - ? Map, DeepPartial> - : T extends ReadonlyMap - ? ReadonlyMap, DeepPartial> - : T extends WeakMap - ? WeakMap, DeepPartial> - : T extends Set - ? Set> - : T extends ReadonlySet - ? ReadonlySet> - : T extends WeakSet - ? WeakSet> - : T extends Array - ? T extends IsTuple - ? { [K in keyof T]?: DeepPartial } - : Array> - : T extends Promise - ? Promise> - : T extends {} - ? { [K in keyof T]?: DeepPartial } - : Partial; diff --git a/api/src/types/express.d.ts b/api/src/types/express.d.ts index 528f44f51a..1c65aa2e4e 100644 --- a/api/src/types/express.d.ts +++ b/api/src/types/express.d.ts @@ -2,7 +2,7 @@ * Custom properties on the req object in express */ -import { Accountability } from './accountability'; +import { Accountability } from '@directus/shared/types'; import { Query } from './query'; import { SchemaOverview } from './schema'; diff --git a/api/src/types/extensions.ts b/api/src/types/extensions.ts index 2250bd89d5..4b5f525e82 100644 --- a/api/src/types/extensions.ts +++ b/api/src/types/extensions.ts @@ -1,6 +1,7 @@ import { ListenerFn } from 'eventemitter2'; import { Router } from 'express'; import { Knex } from 'knex'; +import { Logger } from 'pino'; import env from '../env'; import * as exceptions from '../exceptions'; import * as services from '../services'; @@ -11,8 +12,18 @@ export type ExtensionContext = { exceptions: typeof exceptions; database: Knex; env: typeof env; + logger: Logger; getSchema: typeof getSchema; }; -export type HookRegisterFunction = (context: ExtensionContext) => Record; -export type EndpointRegisterFunction = (router: Router, context: ExtensionContext) => void; +type HookHandlerFunction = (context: ExtensionContext) => Record; + +export type HookConfig = HookHandlerFunction; + +type EndpointHandlerFunction = (router: Router, context: ExtensionContext) => void; +interface EndpointAdvancedConfig { + id: string; + handler: EndpointHandlerFunction; +} + +export type EndpointConfig = EndpointHandlerFunction | EndpointAdvancedConfig; diff --git a/api/src/types/field.ts b/api/src/types/field.ts deleted file mode 100644 index d0b518a89e..0000000000 --- a/api/src/types/field.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { Column } from 'knex-schema-inspector/dist/types/column'; - -export const types = [ - 'bigInteger', - 'boolean', - 'date', - 'dateTime', - 'decimal', - 'float', - 'integer', - 'json', - 'string', - 'text', - 'time', - 'timestamp', - 'binary', - 'uuid', - 'hash', - 'csv', -] as const; - -export type FieldMeta = { - id: number; - collection: string; - field: string; - special: string[] | null; - interface: string | null; - options: Record | null; - readonly: boolean; - hidden: boolean; - sort: number | null; - width: string | null; - group: number | null; - note: string | null; - translations: null; -}; - -export type Field = { - collection: string; - field: string; - type: typeof types[number]; - schema: Column | null; - meta: FieldMeta | null; -}; diff --git a/api/src/types/files.ts b/api/src/types/files.ts index ca5ebef0c5..cc985555d9 100644 --- a/api/src/types/files.ts +++ b/api/src/types/files.ts @@ -1,4 +1,3 @@ -/** @todo finalize */ export type File = { id: string; // uuid storage: string; diff --git a/api/src/types/index.ts b/api/src/types/index.ts index 4fbf2d9510..d789825013 100644 --- a/api/src/types/index.ts +++ b/api/src/types/index.ts @@ -1,14 +1,13 @@ -export * from './accountability'; export * from './activity'; export * from './assets'; export * from './ast'; export * from './collection'; export * from './extensions'; -export * from './field'; export * from './files'; export * from './graphql'; export * from './items'; export * from './meta'; +export * from './migration'; export * from './permissions'; export * from './query'; export * from './relation'; diff --git a/api/src/types/items.ts b/api/src/types/items.ts index 6c79ddf4e1..17c9abfcf5 100644 --- a/api/src/types/items.ts +++ b/api/src/types/items.ts @@ -6,3 +6,13 @@ export type Item = Record; export type PrimaryKey = string | number; + +export type Alterations = { + create: { + [key: string]: any; + }[]; + update: { + [key: string]: any; + }[]; + delete: (number | string)[]; +}; diff --git a/api/src/types/migration.ts b/api/src/types/migration.ts new file mode 100644 index 0000000000..0819484a83 --- /dev/null +++ b/api/src/types/migration.ts @@ -0,0 +1,5 @@ +export type Migration = { + version: string; + name: string; + timestamp: Date; +}; diff --git a/api/src/types/permissions.ts b/api/src/types/permissions.ts index 17d97fa1aa..76dddb6f41 100644 --- a/api/src/types/permissions.ts +++ b/api/src/types/permissions.ts @@ -9,7 +9,6 @@ export type Permission = { action: PermissionsAction; permissions: Record; validation: Filter | null; - limit: number | null; presets: Record | null; fields: string[] | null; system?: true; diff --git a/api/src/types/query.ts b/api/src/types/query.ts index 319bdee40c..b046afd5c7 100644 --- a/api/src/types/query.ts +++ b/api/src/types/query.ts @@ -13,6 +13,7 @@ export type Query = { group?: string[]; aggregate?: Aggregate; deep?: Record; + alias?: Record; }; export type Sort = { @@ -37,21 +38,3 @@ export type Aggregate = { min?: string[]; max?: string[]; }; - -export type FilterOperator = - | 'eq' - | 'neq' - | 'contains' - | 'ncontains' - | 'in' - | 'nin' - | 'gt' - | 'gte' - | 'lt' - | 'lte' - | 'null' - | 'nnull' - | 'empty' - | 'nempty'; - -export type ValidationOperator = 'required' | 'regex'; diff --git a/api/src/types/schema.ts b/api/src/types/schema.ts index b5f4b33a60..c0c5eddf2b 100644 --- a/api/src/types/schema.ts +++ b/api/src/types/schema.ts @@ -1,8 +1,21 @@ -import { types } from './field'; +import { Type } from '@directus/shared/types'; import { Permission } from './permissions'; import { Relation } from './relation'; -type CollectionsOverview = { +export type FieldOverview = { + field: string; + defaultValue: any; + nullable: boolean; + type: Type | 'unknown' | 'alias'; + dbType: string | null; + precision: number | null; + scale: number | null; + special: string[]; + note: string | null; + alias: boolean; +}; + +export type CollectionsOverview = { [name: string]: { collection: string; primary: string; @@ -11,18 +24,7 @@ type CollectionsOverview = { note: string | null; accountability: 'all' | 'activity' | null; fields: { - [name: string]: { - field: string; - defaultValue: any; - nullable: boolean; - type: typeof types[number] | 'unknown' | 'alias'; - dbType: string | null; - precision: number | null; - scale: number | null; - special: string[]; - note: string | null; - alias: boolean; - }; + [name: string]: FieldOverview; }; }; }; diff --git a/api/src/types/services.ts b/api/src/types/services.ts index a38673693a..5794f75e8c 100644 --- a/api/src/types/services.ts +++ b/api/src/types/services.ts @@ -1,6 +1,6 @@ import { Knex } from 'knex'; import { SchemaOverview } from '../types'; -import { Accountability } from './accountability'; +import { Accountability } from '@directus/shared/types'; import { Item, PrimaryKey } from './items'; import { PermissionsAction } from './permissions'; import { Query } from './query'; diff --git a/api/src/types/shims.d.ts b/api/src/types/shims.d.ts index 98fda04c9c..4ed0eeb3e9 100644 --- a/api/src/types/shims.d.ts +++ b/api/src/types/shims.d.ts @@ -3,16 +3,6 @@ declare module 'grant' { export default grant; } -declare module 'icc' { - const parse: (buf: Buffer) => Record; - export { parse }; -} - -declare module 'exif-reader' { - const exifReader: (buf: Buffer) => Record; - export default exifReader; -} - declare module 'pino-http' { import PinoHttp from '@types/pino-http'; const pinoHttp: PinoHttp; diff --git a/api/src/utils/apply-function-to-column-name.ts b/api/src/utils/apply-function-to-column-name.ts index e7847950f6..4c8a17bcfa 100644 --- a/api/src/utils/apply-function-to-column-name.ts +++ b/api/src/utils/apply-function-to-column-name.ts @@ -1,4 +1,4 @@ -import { REGEX_BETWEEN_PARENS } from '../constants'; +import { REGEX_BETWEEN_PARENS } from '@directus/shared/constants'; /** * Takes in a column name, and transforms the original name with the generated column name based on diff --git a/api/src/utils/apply-query.ts b/api/src/utils/apply-query.ts index e5d52defa0..d9c4e9c0e3 100644 --- a/api/src/utils/apply-query.ts +++ b/api/src/utils/apply-query.ts @@ -7,6 +7,7 @@ import { Aggregate, Filter, Query, Relation, SchemaOverview } from '../types'; import { applyFunctionToColumnName } from './apply-function-to-column-name'; import { getColumn } from './get-column'; import { getRelationType } from './get-relation-type'; +import { getGeometryHelper } from '../database/helpers/geometry'; const generateAlias = customAlphabet('abcdefghijklmnopqrstuvwxyz', 5); @@ -25,7 +26,7 @@ export default function applyQuery( dbQuery.orderBy( query.sort.map((sort) => ({ ...sort, - column: getColumn(knex, collection, sort.column, false) as any, + column: getColumn(knex, collection, sort.column) as any, })) ); } @@ -97,6 +98,7 @@ export default function applyQuery( * ) * ``` */ + export function applyFilter( knex: Knex, schema: SchemaOverview, @@ -304,7 +306,8 @@ export function applyFilter( const [table, column] = key.split('.'); // Is processed through Knex.Raw, so should be safe to string-inject into these where queries - const selectionRaw = getColumn(knex, table, column, false) as any; + const selectionRaw = getColumn(knex, table, column) as any; + // Knex supports "raw" in the columnName parameter, but isn't typed as such. Too bad.. // See https://github.com/knex/knex/issues/4518 @TODO remove as any once knex is updated @@ -319,15 +322,13 @@ export function applyFilter( if (operator === '_empty' || (operator === '_nempty' && compareValue === false)) { dbQuery[logical].andWhere((query) => { - query.whereNull(selectionRaw); - query.orWhere(selectionRaw, '=', ''); + query.where(key, '=', ''); }); } if (operator === '_nempty' || (operator === '_empty' && compareValue === false)) { dbQuery[logical].andWhere((query) => { - query.whereNotNull(selectionRaw); - query.orWhere(selectionRaw, '!=', ''); + query.where(key, '!=', ''); }); } @@ -341,8 +342,6 @@ export function applyFilter( // reported as [undefined]. // We need to remove any undefined values, as they are useless compareValue = compareValue.filter((val) => val !== undefined); - // And ignore the result filter if there are no values in it - if (compareValue.length === 0) return; } if (operator === '_eq') { @@ -424,6 +423,23 @@ export function applyFilter( dbQuery[logical].whereNotBetween(selectionRaw, value); } + + const geometryHelper = getGeometryHelper(); + + if (operator == '_intersects') { + dbQuery[logical].whereRaw(geometryHelper.intersects(key, compareValue)); + } + + if (operator == '_nintersects') { + dbQuery[logical].whereRaw(geometryHelper.nintersects(key, compareValue)); + } + if (operator == '_intersects_bbox') { + dbQuery[logical].whereRaw(geometryHelper.intersects_bbox(key, compareValue)); + } + + if (operator == '_nintersects_bbox') { + dbQuery[logical].whereRaw(geometryHelper.nintersects_bbox(key, compareValue)); + } } function getWhereColumn(path: string[], collection: string) { @@ -514,39 +530,39 @@ export function applyAggregate(dbQuery: Knex.QueryBuilder, aggregate: Aggregate) for (const field of fields) { if (operation === 'avg') { - dbQuery.avg(field, { as: `${field}_avg` }); + dbQuery.avg(field, { as: `avg->${field}` }); } - if (operation === 'avg_distinct') { - dbQuery.avgDistinct(field, { as: `${field}_avg_distinct` }); + if (operation === 'avgDistinct') { + dbQuery.avgDistinct(field, { as: `avgDistinct->${field}` }); } if (operation === 'count') { if (field === '*') { dbQuery.count('*', { as: 'count' }); } else { - dbQuery.count(field, { as: `${field}_count` }); + dbQuery.count(field, { as: `count->${field}` }); } } - if (operation === 'count_distinct') { - dbQuery.countDistinct(field, { as: `${field}_count_distinct` }); + if (operation === 'countDistinct') { + dbQuery.countDistinct(field, { as: `countDistinct->${field}` }); } if (operation === 'sum') { - dbQuery.sum(field, { as: `${field}_sum` }); + dbQuery.sum(field, { as: `sum->${field}` }); } if (operation === 'sumDistinct') { - dbQuery.sum(field, { as: `${field}_sum_distinct` }); + dbQuery.sumDistinct(field, { as: `sumDistinct->${field}` }); } if (operation === 'min') { - dbQuery.min(field, { as: `${field}_min` }); + dbQuery.min(field, { as: `min->${field}` }); } if (operation === 'max') { - dbQuery.max(field, { as: `${field}_max` }); + dbQuery.max(field, { as: `max->${field}` }); } } } diff --git a/api/src/utils/generate-hash.ts b/api/src/utils/generate-hash.ts new file mode 100644 index 0000000000..d474cd170f --- /dev/null +++ b/api/src/utils/generate-hash.ts @@ -0,0 +1,10 @@ +import argon2 from 'argon2'; +import { getConfigFromEnv } from './get-config-from-env'; + +export function generateHash(stringToHash: string): Promise { + const argon2HashConfigOptions = getConfigFromEnv('HASH_', 'HASH_RAW'); // Disallow the HASH_RAW option, see https://github.com/directus/directus/discussions/7670#discussioncomment-1255805 + // associatedData, if specified, must be passed as a Buffer to argon2.hash, see https://github.com/ranisalt/node-argon2/wiki/Options#associateddata + 'associatedData' in argon2HashConfigOptions && + (argon2HashConfigOptions.associatedData = Buffer.from(argon2HashConfigOptions.associatedData)); + return argon2.hash(stringToHash, argon2HashConfigOptions); +} diff --git a/api/src/utils/geometry.ts b/api/src/utils/geometry.ts new file mode 100644 index 0000000000..3f6ca7956e --- /dev/null +++ b/api/src/utils/geometry.ts @@ -0,0 +1,18 @@ +import { FieldOverview } from '../types'; +const dbGeometricTypes = new Set([ + 'point', + 'polygon', + 'linestring', + 'multipoint', + 'multipolygon', + 'multilinestring', + 'geometry', + 'geometrycollection', + 'sdo_geometry', + 'user-defined', +]); + +export function isNativeGeometry(field: FieldOverview): boolean { + const { type, dbType } = field; + return type == 'geometry' && dbGeometricTypes.has(dbType!.toLowerCase()); +} diff --git a/api/src/utils/get-ast-from-query.ts b/api/src/utils/get-ast-from-query.ts index 06e13e35d6..91be1e5d3f 100644 --- a/api/src/utils/get-ast-from-query.ts +++ b/api/src/utils/get-ast-from-query.ts @@ -3,16 +3,9 @@ */ import { Knex } from 'knex'; -import { cloneDeep, mapKeys, omitBy } from 'lodash'; -import { - Accountability, - AST, - FieldNode, - NestedCollectionNode, - PermissionsAction, - Query, - SchemaOverview, -} from '../types'; +import { cloneDeep, mapKeys, omitBy, uniq } from 'lodash'; +import { Accountability } from '@directus/shared/types'; +import { AST, FieldNode, NestedCollectionNode, PermissionsAction, Query, SchemaOverview } from '../types'; import { getRelationType } from '../utils/get-relation-type'; type GetASTOptions = { @@ -72,6 +65,8 @@ export default async function getASTFromQuery( fields = query.group; } + fields = uniq(fields); + const deep = query.deep || {}; // Prevent fields/deep from showing up in the query object in further use @@ -116,34 +111,42 @@ export default async function getASTFromQuery( const relationalStructure: Record = {}; - for (const field of fields) { + for (const fieldKey of fields) { + let name = fieldKey; + + const isAlias = (query.alias && name in query.alias) ?? false; + + if (isAlias) { + name = query.alias![fieldKey]; + } + const isRelational = - field.includes('.') || + name.includes('.') || // We'll always treat top level o2m fields as a related item. This is an alias field, otherwise it won't return // anything !!schema.relations.find( - (relation) => relation.related_collection === parentCollection && relation.meta?.one_field === field + (relation) => relation.related_collection === parentCollection && relation.meta?.one_field === name ); if (isRelational) { // field is relational - const parts = field.split('.'); + const parts = name.split('.'); - let fieldKey = parts[0]; + let rootField = parts[0]; let collectionScope: string | null = null; // m2a related collection scoped field selector `fields=sections.section_id:headings.title` - if (fieldKey.includes(':')) { - const [key, scope] = fieldKey.split(':'); - fieldKey = key; + if (rootField.includes(':')) { + const [key, scope] = rootField.split(':'); + rootField = key; collectionScope = scope; } - if (fieldKey in relationalStructure === false) { + if (rootField in relationalStructure === false) { if (collectionScope) { - relationalStructure[fieldKey] = { [collectionScope]: [] }; + relationalStructure[rootField] = { [collectionScope]: [] }; } else { - relationalStructure[fieldKey] = []; + relationalStructure[rootField] = []; } } @@ -151,30 +154,36 @@ export default async function getASTFromQuery( const childKey = parts.slice(1).join('.'); if (collectionScope) { - if (collectionScope in relationalStructure[fieldKey] === false) { - (relationalStructure[fieldKey] as anyNested)[collectionScope] = []; + if (collectionScope in relationalStructure[rootField] === false) { + (relationalStructure[rootField] as anyNested)[collectionScope] = []; } - (relationalStructure[fieldKey] as anyNested)[collectionScope].push(childKey); + (relationalStructure[rootField] as anyNested)[collectionScope].push(childKey); } else { - (relationalStructure[fieldKey] as string[]).push(childKey); + (relationalStructure[rootField] as string[]).push(childKey); } } } else { - children.push({ type: 'field', name: field }); + children.push({ type: 'field', name, fieldKey }); } } - for (const [relationalField, nestedFields] of Object.entries(relationalStructure)) { - const relatedCollection = getRelatedCollection(parentCollection, relationalField); - const relation = getRelation(parentCollection, relationalField); + for (const [fieldKey, nestedFields] of Object.entries(relationalStructure)) { + let fieldName = fieldKey; + + if (query.alias && fieldKey in query.alias) { + fieldName = query.alias[fieldKey]; + } + + const relatedCollection = getRelatedCollection(parentCollection, fieldName); + const relation = getRelation(parentCollection, fieldName); if (!relation) continue; const relationType = getRelationType({ relation, collection: parentCollection, - field: relationalField, + field: fieldName, }); if (!relationType) continue; @@ -194,7 +203,7 @@ export default async function getASTFromQuery( query: {}, relatedKey: {}, parentKey: schema.collections[parentCollection].primary, - fieldKey: relationalField, + fieldKey: fieldKey, relation: relation, }; @@ -202,10 +211,10 @@ export default async function getASTFromQuery( child.children[relatedCollection] = await parseFields( relatedCollection, Array.isArray(nestedFields) ? nestedFields : (nestedFields as anyNested)[relatedCollection] || ['*'], - deep?.[`${relationalField}:${relatedCollection}`] + deep?.[`${fieldKey}:${relatedCollection}`] ); - child.query[relatedCollection] = getDeepQuery(deep?.[`${relationalField}:${relatedCollection}`] || {}); + child.query[relatedCollection] = getDeepQuery(deep?.[`${fieldKey}:${relatedCollection}`] || {}); child.relatedKey[relatedCollection] = schema.collections[relatedCollection].primary; } @@ -217,12 +226,12 @@ export default async function getASTFromQuery( child = { type: relationType, name: relatedCollection, - fieldKey: relationalField, + fieldKey: fieldKey, parentKey: schema.collections[parentCollection].primary, relatedKey: schema.collections[relatedCollection].primary, relation: relation, - query: getDeepQuery(deep?.[relationalField] || {}), - children: await parseFields(relatedCollection, nestedFields as string[], deep?.[relationalField] || {}), + query: getDeepQuery(deep?.[fieldKey] || {}), + children: await parseFields(relatedCollection, nestedFields as string[], deep?.[fieldKey] || {}), }; if (relationType === 'o2m' && !child!.query.sort) { @@ -237,7 +246,18 @@ export default async function getASTFromQuery( } } - return children; + // Deduplicate any children fields that are included both as a regular field, and as a nested m2o field + const nestedCollectionNodes = children.filter((childNode) => childNode.type !== 'field'); + + return children.filter((childNode) => { + const existsAsNestedRelational = !!nestedCollectionNodes.find( + (nestedCollectionNode) => childNode.fieldKey === nestedCollectionNode.fieldKey + ); + + if (childNode.type === 'field' && existsAsNestedRelational) return false; + + return true; + }); } async function convertWildcards(parentCollection: string, fields: string[]) { @@ -263,12 +283,18 @@ export default async function getASTFromQuery( if (fieldKey.includes('*') === false) continue; if (fieldKey === '*') { + const aliases = Object.keys(query.alias ?? {}); // Set to all fields in collection if (allowedFields.includes('*')) { - fields.splice(index, 1, ...fieldsInCollection); + fields.splice(index, 1, ...fieldsInCollection, ...aliases); } else { // Set to all allowed fields - fields.splice(index, 1, ...allowedFields); + const allowedAliases = aliases.filter((fieldKey) => { + const name = query.alias![fieldKey]; + return allowedFields!.includes(name); + }); + + fields.splice(index, 1, ...allowedFields, ...allowedAliases); } } @@ -290,6 +316,16 @@ export default async function getASTFromQuery( const nonRelationalFields = allowedFields.filter((fieldKey) => relationalFields.includes(fieldKey) === false); + const aliasFields = Object.keys(query.alias ?? {}).map((fieldKey) => { + const name = query.alias![fieldKey]; + + if (relationalFields.includes(name)) { + return `${fieldKey}.${parts.slice(1).join('.')}`; + } + + return fieldKey; + }); + fields.splice( index, 1, @@ -298,6 +334,7 @@ export default async function getASTFromQuery( return `${relationalField}.${parts.slice(1).join('.')}`; }), ...nonRelationalFields, + ...aliasFields, ] ); } diff --git a/api/src/utils/get-cache-key.test.ts b/api/src/utils/get-cache-key.test.ts new file mode 100644 index 0000000000..82f528a20f --- /dev/null +++ b/api/src/utils/get-cache-key.test.ts @@ -0,0 +1,60 @@ +import { Request } from 'express'; +import { getCacheKey } from './get-cache-key'; + +const restUrl = 'http://localhost/items/example'; +const graphQlUrl = 'http://localhost/graphql'; +const accountability = { user: '00000000-0000-0000-0000-000000000000' }; + +const requests = [ + { + name: 'as unauthenticated request', + params: { originalUrl: restUrl }, + key: '17da8272c9a0ec6eea38a37d6d78bddeb7c79045', + }, + { + name: 'as authenticated request', + params: { originalUrl: restUrl, accountability }, + key: '99a6394222a3d7d149ac1662fc2fff506932db58', + }, + { + name: 'a request with a fields query', + params: { originalUrl: restUrl, sanitizedQuery: { fields: ['id', 'name'] } }, + key: 'aa6e2d8a78de4dfb4af6eaa230d1cd9b7d31ed19', + }, + { + name: 'a request with a filter query', + params: { originalUrl: restUrl, sanitizedQuery: { filter: { name: { _eq: 'test' } } } }, + key: 'd7eb8970f0429e1cf85e12eb5bb8669f618b09d3', + }, + { + name: 'a GraphQL query request', + params: { originalUrl: graphQlUrl, query: { query: 'query { test { id } }' } }, + key: '201731b75c627c60554512d819b6935b54c73814', + }, +]; + +const cases = requests.map(({ name, params, key }) => [name, params, key]); + +describe('get cache key', () => { + test.each(cases)('should create a cache key for %s', (_, params, key) => { + expect(getCacheKey(params as unknown as Request)).toEqual(key); + }); + + test('should create a unique key for each request', () => { + const keys = requests.map((r) => r.key); + const hasDuplicate = keys.some((key) => keys.indexOf(key) !== keys.lastIndexOf(key)); + + expect(hasDuplicate).toBeFalsy(); + }); + + test('should create a unique key for GraphQL requests with different variables', () => { + const query = 'query Test ($name: String) { test (filter: { name: { _eq: $name } }) { id } }'; + const operationName = 'test'; + const variables1 = JSON.stringify({ name: 'test 1' }); + const variables2 = JSON.stringify({ name: 'test 2' }); + const req1: any = { originalUrl: graphQlUrl, query: { query, operationName, variables: variables1 } }; + const req2: any = { originalUrl: graphQlUrl, query: { query, operationName, variables: variables2 } }; + + expect(getCacheKey(req1)).not.toEqual(getCacheKey(req2)); + }); +}); diff --git a/api/src/utils/get-cache-key.ts b/api/src/utils/get-cache-key.ts index 7f0990ed43..99087518bd 100644 --- a/api/src/utils/get-cache-key.ts +++ b/api/src/utils/get-cache-key.ts @@ -1,16 +1,18 @@ import { Request } from 'express'; import url from 'url'; +import hash from 'object-hash'; +import { pick } from 'lodash'; export function getCacheKey(req: Request): string { const path = url.parse(req.originalUrl).pathname; + const isGraphQl = path?.includes('/graphql'); - let key: string; - - if (path?.includes('/graphql')) { - key = `${req.accountability?.user || 'null'}-${path}-${JSON.stringify(req.params.query)}`; - } else { - key = `${req.accountability?.user || 'null'}-${path}-${JSON.stringify(req.sanitizedQuery)}`; - } + const info = { + user: req.accountability?.user || null, + path, + query: isGraphQl ? pick(req.query, ['query', 'variables']) : req.sanitizedQuery, + }; + const key = hash(info); return key; } diff --git a/api/src/utils/get-column.ts b/api/src/utils/get-column.ts index 998839f1ca..1404aa3c11 100644 --- a/api/src/utils/get-column.ts +++ b/api/src/utils/get-column.ts @@ -1,6 +1,6 @@ import { Knex } from 'knex'; import { FunctionsHelper } from '../database/functions'; -import { REGEX_BETWEEN_PARENS } from '../constants'; +import { REGEX_BETWEEN_PARENS } from '@directus/shared/constants'; import { applyFunctionToColumnName } from './apply-function-to-column-name'; /** @@ -28,15 +28,15 @@ export function getColumn( if (functionName in fn) { const result = fn[functionName as keyof typeof fn](table, columnName); - if (alias) { - return knex.raw(result + ' AS ??', [alias]); - } - - return result; + return knex.raw(result + ' AS ??', [alias]); } else { throw new Error(`Invalid function specified "${functionName}"`); } } - return knex.raw('??.??', [table, column]); + if (column !== alias) { + return knex.ref(`${table}.${column}`).as(alias); + } + + return knex.ref(`${table}.${column}`); } diff --git a/api/src/utils/get-default-value.ts b/api/src/utils/get-default-value.ts index afb8d60217..ad33f029fd 100644 --- a/api/src/utils/get-default-value.ts +++ b/api/src/utils/get-default-value.ts @@ -5,7 +5,7 @@ import getLocalType from './get-local-type'; export default function getDefaultValue( column: SchemaOverview[string]['columns'][string] | Column ): string | boolean | null { - const type = getLocalType(column); + const { type } = getLocalType(column); let defaultValue = column.default_value ?? null; if (defaultValue === null) return null; diff --git a/api/src/utils/get-email-from-profile.ts b/api/src/utils/get-email-from-profile.ts index 42da9e1fd0..2ecd4e1470 100644 --- a/api/src/utils/get-email-from-profile.ts +++ b/api/src/utils/get-email-from-profile.ts @@ -21,10 +21,15 @@ export default function getEmailFromProfile(provider: string, profile: Record { switch (localType) { case 'boolean': return GraphQLBoolean; @@ -14,8 +22,11 @@ export function getGraphQLType(localType: typeof types[number] | 'alias' | 'unkn case 'float': return GraphQLFloat; case 'csv': + return new GraphQLList(GraphQLString); case 'json': return GraphQLJSON; + case 'geometry': + return GraphQLGeoJSON; case 'timestamp': case 'dateTime': case 'date': diff --git a/api/src/utils/get-local-type.ts b/api/src/utils/get-local-type.ts index f5759267ff..6c4932f191 100644 --- a/api/src/utils/get-local-type.ts +++ b/api/src/utils/get-local-type.ts @@ -1,14 +1,17 @@ import { SchemaOverview } from '@directus/schema/dist/types/overview'; import { Column } from 'knex-schema-inspector/dist/types/column'; -import { FieldMeta, types } from '../types'; +import { FieldMeta, Type } from '@directus/shared/types'; +import getDatabase from '../database'; -/** - * Typemap graciously provided by @gpetrov - */ -const localTypeMap: Record = { +type LocalTypeEntry = { + type: Type | 'unknown'; + geometry_type?: 'Point' | 'LineString' | 'Polygon' | 'MultiPoint' | 'MultiLineString' | 'MultiPolygon'; +}; + +const localTypeMap: Record = { // Shared boolean: { type: 'boolean' }, - tinyint: { type: 'boolean' }, + tinyint: { type: 'integer' }, smallint: { type: 'integer' }, mediumint: { type: 'integer' }, int: { type: 'integer' }, @@ -40,6 +43,15 @@ const localTypeMap: Record(mod: T | { default: T }): T { + if ('default' in mod) { + return mod.default; + } + return mod; +} diff --git a/api/src/utils/get-schema.ts b/api/src/utils/get-schema.ts index f4102510f0..f3dcd0484b 100644 --- a/api/src/utils/get-schema.ts +++ b/api/src/utils/get-schema.ts @@ -6,8 +6,9 @@ import { systemCollectionRows } from '../database/system-data/collections'; import { systemFieldRows } from '../database/system-data/fields'; import logger from '../logger'; import { RelationsService } from '../services'; -import { Accountability, Permission, SchemaOverview } from '../types'; -import { toArray } from '../utils/to-array'; +import { Permission, SchemaOverview } from '../types'; +import { Accountability } from '@directus/shared/types'; +import { toArray } from '@directus/shared/utils'; import getDefaultValue from './get-default-value'; import getLocalType from './get-local-type'; import { mergePermissions } from './merge-permissions'; @@ -27,13 +28,28 @@ export async function getSchema(options?: { let result: SchemaOverview; if (env.CACHE_SCHEMA !== false && schemaCache) { - const cachedSchema = (await schemaCache.get('schema')) as SchemaOverview; + let cachedSchema; + + try { + cachedSchema = (await schemaCache.get('schema')) as SchemaOverview; + } catch (err: any) { + logger.warn(err, `[schema-cache] Couldn't retrieve cache. ${err}`); + } if (cachedSchema) { result = cachedSchema; } else { result = await getDatabaseSchema(database, schemaInspector); - await schemaCache.set('schema', result, typeof env.CACHE_SCHEMA === 'string' ? ms(env.CACHE_SCHEMA) : undefined); + + try { + await schemaCache.set( + 'schema', + result, + typeof env.CACHE_SCHEMA === 'string' ? ms(env.CACHE_SCHEMA) : undefined + ); + } catch (err: any) { + logger.warn(err, `[schema-cache] Couldn't save cache. ${err}`); + } } } else { result = await getDatabaseSchema(database, schemaInspector); @@ -100,11 +116,21 @@ async function getDatabaseSchema( ]; for (const [collection, info] of Object.entries(schemaOverview)) { + if (toArray(env.DB_EXCLUDE_TABLES).includes(collection)) { + logger.trace(`Collection "${collection}" is configured to be excluded and will be ignored`); + continue; + } + if (!info.primary) { logger.warn(`Collection "${collection}" doesn't have a primary key column and will be ignored`); continue; } + if (collection.includes(' ')) { + logger.warn(`Collection "${collection}" has a space in the name and will be ignored`); + continue; + } + const collectionMeta = collections.find((collectionMeta) => collectionMeta.collection === collection); result.collections[collection] = { @@ -115,18 +141,20 @@ async function getDatabaseSchema( note: collectionMeta?.note || null, sortField: collectionMeta?.sort_field || null, accountability: collectionMeta ? collectionMeta.accountability : 'all', - fields: mapValues(schemaOverview[collection].columns, (column) => ({ - field: column.column_name, - defaultValue: getDefaultValue(column) ?? null, - nullable: column.is_nullable ?? true, - type: getLocalType(column) || 'alias', - dbType: column.data_type, - precision: column.numeric_precision || null, - scale: column.numeric_scale || null, - special: [], - note: null, - alias: false, - })), + fields: mapValues(schemaOverview[collection].columns, (column) => { + return { + field: column.column_name, + defaultValue: getDefaultValue(column) ?? null, + nullable: column.is_nullable ?? true, + type: getLocalType(column).type, + dbType: column.data_type, + precision: column.numeric_precision || null, + scale: column.numeric_scale || null, + special: [], + note: null, + alias: false, + }; + }), }; } @@ -147,20 +175,19 @@ async function getDatabaseSchema( if (!result.collections[field.collection]) continue; const existing = result.collections[field.collection].fields[field.field]; + const column = schemaOverview[field.collection].columns[field.field]; + const special = field.special ? toArray(field.special) : []; + const { type = 'alias' } = existing && column ? getLocalType(column, { special }) : {}; result.collections[field.collection].fields[field.field] = { field: field.field, defaultValue: existing?.defaultValue ?? null, nullable: existing?.nullable ?? true, - type: existing - ? getLocalType(schemaOverview[field.collection].columns[field.field], { - special: field.special ? toArray(field.special) : [], - }) - : 'alias', + type: type, dbType: existing?.dbType || null, precision: existing?.precision || null, scale: existing?.scale || null, - special: field.special ? toArray(field.special) : [], + special: special, note: field.note, alias: existing?.alias ?? true, }; diff --git a/api/src/utils/is-jwt.ts b/api/src/utils/is-directus-jwt.ts similarity index 62% rename from api/src/utils/is-jwt.ts rename to api/src/utils/is-directus-jwt.ts index 819d7405be..4dc7ceae52 100644 --- a/api/src/utils/is-jwt.ts +++ b/api/src/utils/is-directus-jwt.ts @@ -2,9 +2,10 @@ import atob from 'atob'; import logger from '../logger'; /** - * Check if a given string conforms to the structure of a JWT. + * Check if a given string conforms to the structure of a JWT + * and whether it is issued by Directus. */ -export default function isJWT(string: string): boolean { +export default function isDirectusJWT(string: string): boolean { const parts = string.split('.'); // JWTs have the structure header.payload.signature @@ -15,7 +16,7 @@ export default function isJWT(string: string): boolean { atob(parts[0]); atob(parts[1]); atob(parts[2]); - } catch (err) { + } catch (err: any) { logger.error(err); return false; } @@ -23,7 +24,8 @@ export default function isJWT(string: string): boolean { // Check if the header and payload are valid JSON try { JSON.parse(atob(parts[0])); - JSON.parse(atob(parts[1])); + const payload = JSON.parse(atob(parts[1])); + if (payload.iss !== 'directus') return false; } catch { return false; } diff --git a/api/src/utils/is-url-allowed.ts b/api/src/utils/is-url-allowed.ts index 3604ebdbb7..b07ca72951 100644 --- a/api/src/utils/is-url-allowed.ts +++ b/api/src/utils/is-url-allowed.ts @@ -1,5 +1,6 @@ -import { toArray } from './to-array'; +import { toArray } from '@directus/shared/utils'; import logger from '../logger'; +import { URL } from 'url'; /** * Check if url matches allow list either exactly or by domain+path diff --git a/api/src/utils/merge-permissions.ts b/api/src/utils/merge-permissions.ts index 1af0ba1b8e..5b5f029898 100644 --- a/api/src/utils/merge-permissions.ts +++ b/api/src/utils/merge-permissions.ts @@ -25,7 +25,6 @@ function mergePerm(currentPerm: Permission, newPerm: Permission) { let validation = currentPerm.validation; let fields = currentPerm.fields; let presets = currentPerm.presets; - let limit = currentPerm.limit; if (newPerm.permissions) { if (currentPerm.permissions && Object.keys(currentPerm.permissions)[0] === '_or') { @@ -73,16 +72,11 @@ function mergePerm(currentPerm: Permission, newPerm: Permission) { presets = merge({}, presets, newPerm.presets); } - if (newPerm.limit && newPerm.limit > (currentPerm.limit || 0)) { - limit = newPerm.limit; - } - return { ...currentPerm, permissions, validation, fields, presets, - limit, }; } diff --git a/api/src/utils/parse-iptc.ts b/api/src/utils/parse-iptc.ts deleted file mode 100644 index 054ef007e3..0000000000 --- a/api/src/utils/parse-iptc.ts +++ /dev/null @@ -1,51 +0,0 @@ -const IPTC_ENTRY_TYPES = new Map([ - [0x78, 'caption'], - [0x6e, 'credit'], - [0x19, 'keywords'], - [0x37, 'dateCreated'], - [0x50, 'byline'], - [0x55, 'bylineTitle'], - [0x7a, 'captionWriter'], - [0x69, 'headline'], - [0x74, 'copyright'], - [0x0f, 'category'], -]); - -const IPTC_ENTRY_MARKER = Buffer.from([0x1c, 0x02]); - -export default function parseIPTC(buffer: Buffer): Record { - if (!Buffer.isBuffer(buffer)) return {}; - - const iptc: Record = {}; - let lastIptcEntryPos = buffer.indexOf(IPTC_ENTRY_MARKER); - - while (lastIptcEntryPos !== -1) { - lastIptcEntryPos = buffer.indexOf(IPTC_ENTRY_MARKER, lastIptcEntryPos + IPTC_ENTRY_MARKER.byteLength); - - const iptcBlockTypePos = lastIptcEntryPos + IPTC_ENTRY_MARKER.byteLength; - const iptcBlockSizePos = iptcBlockTypePos + 1; - const iptcBlockDataPos = iptcBlockSizePos + 2; - - const iptcBlockType = buffer.readUInt8(iptcBlockTypePos); - const iptcBlockSize = buffer.readUInt16BE(iptcBlockSizePos); - - if (!IPTC_ENTRY_TYPES.has(iptcBlockType)) { - continue; - } - - const iptcBlockTypeId = IPTC_ENTRY_TYPES.get(iptcBlockType); - const iptcData = buffer.slice(iptcBlockDataPos, iptcBlockDataPos + iptcBlockSize).toString(); - - if (iptcBlockTypeId) { - if (iptc[iptcBlockTypeId] == null) { - iptc[iptcBlockTypeId] = iptcData; - } else if (Array.isArray(iptc[iptcBlockTypeId])) { - iptc[iptcBlockTypeId].push(iptcData); - } else { - iptc[iptcBlockTypeId] = [iptc[iptcBlockTypeId], iptcData]; - } - } - } - - return iptc; -} diff --git a/api/src/utils/sanitize-query.ts b/api/src/utils/sanitize-query.ts index 41f2942b46..14d4100780 100644 --- a/api/src/utils/sanitize-query.ts +++ b/api/src/utils/sanitize-query.ts @@ -1,7 +1,8 @@ import { flatten, get, merge, set } from 'lodash'; import logger from '../logger'; -import { Accountability, Aggregate, Filter, Meta, Query, Sort } from '../types'; -import { parseFilter } from '../utils/parse-filter'; +import { Aggregate, Filter, Meta, Query, Sort } from '../types'; +import { Accountability } from '@directus/shared/types'; +import { parseFilter, deepMap } from '@directus/shared/utils'; export function sanitizeQuery(rawQuery: Record, accountability?: Accountability | null): Query { const query: Query = {}; @@ -18,8 +19,8 @@ export function sanitizeQuery(rawQuery: Record, accountability?: Ac query.fields = sanitizeFields(rawQuery.fields); } - if (rawQuery.group) { - query.group = sanitizeFields(rawQuery.group); + if (rawQuery.groupBy) { + query.group = sanitizeFields(rawQuery.groupBy); } if (rawQuery.aggregate) { @@ -60,6 +61,10 @@ export function sanitizeQuery(rawQuery: Record, accountability?: Ac query.deep = sanitizeDeep(rawQuery.deep, accountability); } + if (rawQuery.alias) { + query.alias = sanitizeAlias(rawQuery.alias); + } + return query; } @@ -122,6 +127,14 @@ function sanitizeFilter(rawFilter: any, accountability: Accountability | null) { } } + filters = deepMap(filters, (val) => { + try { + return JSON.parse(val); + } catch { + return val; + } + }); + filters = parseFilter(filters, accountability); return filters; @@ -193,3 +206,17 @@ function sanitizeDeep(deep: Record, accountability?: Accountability } } } + +function sanitizeAlias(rawAlias: any) { + let alias: Record = rawAlias; + + if (typeof rawAlias === 'string') { + try { + alias = JSON.parse(rawAlias); + } catch (err) { + logger.warn('Invalid value passed for alias query parameter.'); + } + } + + return alias; +} diff --git a/api/src/utils/stall.ts b/api/src/utils/stall.ts new file mode 100644 index 0000000000..7dd55a4765 --- /dev/null +++ b/api/src/utils/stall.ts @@ -0,0 +1,36 @@ +import { performance } from 'perf_hooks'; + +/** + * Wait a specific time to meet the stall ms. Useful in cases where you need to make sure that every + * path in a function takes at least X ms (for example authenticate). + * + * @param {number} ms - Stall time to wait until + * @param {number} start - Current start time of the function + * + * @example + * + * ```js + * const STALL_TIME = 100; + * + * // Function will always take (at least) 100ms + * async function doSomething() { + * const timeStart = performance.now(); + * + * if (something === true) { + * await heavy(); + * } + * + * stall(STALL_TIME, timeStart); + * return 'result'; + * } + * ``` + */ +export async function stall(ms: number, start: number): Promise { + const now = performance.now(); + const timeElapsed = now - start; + const timeRemaining = ms - timeElapsed; + + if (timeRemaining <= 0) return; + + return new Promise((resolve) => setTimeout(resolve, timeRemaining)); +} diff --git a/api/src/utils/strip-function.ts b/api/src/utils/strip-function.ts index fab6f7327b..4ebe7a3629 100644 --- a/api/src/utils/strip-function.ts +++ b/api/src/utils/strip-function.ts @@ -1,4 +1,4 @@ -import { REGEX_BETWEEN_PARENS } from '../constants'; +import { REGEX_BETWEEN_PARENS } from '@directus/shared/constants'; /** * Strip the function declarations from a list of fields diff --git a/api/src/utils/track.ts b/api/src/utils/track.ts index 078e15c74e..c0bccc57c0 100644 --- a/api/src/utils/track.ts +++ b/api/src/utils/track.ts @@ -13,8 +13,8 @@ export async function track(event: string): Promise { try { await axios.post('https://telemetry.directus.io/', info); - } catch (err) { - if ('DIRECTUS_DEV' in process.env) { + } catch (err: any) { + if (env.NODE_ENV === 'development') { logger.error(err); } } @@ -27,7 +27,7 @@ async function getEnvInfo(event: string) { event: event, project_id: env.KEY, machine_id: await machineId(), - environment: process.env.NODE_ENV, + environment: env.NODE_ENV, stack: 'node', os: { arch: os.arch(), diff --git a/api/src/utils/transformations.ts b/api/src/utils/transformations.ts new file mode 100644 index 0000000000..75426e5cc6 --- /dev/null +++ b/api/src/utils/transformations.ts @@ -0,0 +1,77 @@ +import { isNil } from 'lodash'; +import { + File, + Transformation, + TransformationParams, + TransformationPreset, + TransformationPresetFormat, + TransformationPresetResize, +} from '../types'; + +// Extract transforms from a preset +export function resolvePreset(input: TransformationParams | TransformationPreset, file: File): Transformation[] { + // Do the format conversion last + return [extractResize(input), ...(input.transforms ?? []), extractToFormat(input, file)].filter( + (transform): transform is Transformation => transform !== undefined + ); +} + +function extractOptions>( + keys: (keyof T)[], + numberKeys: (keyof T)[] = [], + booleanKeys: (keyof T)[] = [] +) { + return function (input: TransformationParams | TransformationPreset): T { + return Object.entries(input).reduce( + (config, [key, value]) => + keys.includes(key as any) && isNil(value) === false + ? { + ...config, + [key]: numberKeys.includes(key as any) + ? +value + : booleanKeys.includes(key as any) + ? Boolean(value) + : value, + } + : config, + {} as T + ); + }; +} + +// Extract format transform from a preset +function extractToFormat(input: TransformationParams | TransformationPreset, file: File): Transformation | undefined { + const options = extractOptions(['format', 'quality'], ['quality'])(input); + return Object.keys(options).length > 0 + ? [ + 'toFormat', + options.format || (file.type!.split('/')[1] as any), + { + quality: options.quality, + }, + ] + : undefined; +} + +function extractResize(input: TransformationParams | TransformationPreset): Transformation | undefined { + const resizable = ['width', 'height'].some((key) => key in input); + if (!resizable) return undefined; + + return [ + 'resize', + extractOptions( + ['width', 'height', 'fit', 'withoutEnlargement'], + ['width', 'height'], + ['withoutEnlargement'] + )(input), + ]; +} + +/** + * Try to extract a file format from an array of `Transformation`'s. + */ +export function maybeExtractFormat(transforms: Transformation[]): string | undefined { + const toFormats = transforms.filter((t) => t[0] === 'toFormat'); + const lastToFormat = toFormats[toFormats.length - 1]; + return lastToFormat ? lastToFormat[1]?.toString() : undefined; +} diff --git a/api/src/utils/url.ts b/api/src/utils/url.ts new file mode 100644 index 0000000000..a084a26757 --- /dev/null +++ b/api/src/utils/url.ts @@ -0,0 +1,78 @@ +import { URL } from 'url'; + +export class Url { + protocol: string | null; + host: string | null; + port: string | null; + path: string[]; + query: Record; + hash: string | null; + + constructor(url: string) { + const parsedUrl = new URL(url, 'http://localhost'); + + const isProtocolRelative = /^\/\//.test(url); + const isRootRelative = /^\/$|^\/[^/]/.test(url); + const isPathRelative = /^\./.test(url); + + this.protocol = + !isProtocolRelative && !isRootRelative && !isPathRelative + ? parsedUrl.protocol.substring(0, parsedUrl.protocol.length - 1) + : null; + this.host = !isRootRelative && !isPathRelative ? parsedUrl.host : null; + this.port = parsedUrl.port !== '' ? parsedUrl.port : null; + this.path = parsedUrl.pathname.split('/').filter((p) => p !== ''); + this.query = Object.fromEntries(parsedUrl.searchParams.entries()); + this.hash = parsedUrl.hash !== '' ? parsedUrl.hash.substring(1) : null; + } + + public isAbsolute(): boolean { + return this.protocol !== null && this.host !== null; + } + + public isProtocolRelative(): boolean { + return this.protocol === null && this.host !== null; + } + + public isRootRelative(): boolean { + return this.protocol === null && this.host === null; + } + + public addPath(...paths: string[]): Url { + const pathToAdd = paths.flatMap((p) => p.split('/')).filter((p) => p !== ''); + + for (const pathSegment of pathToAdd) { + if (pathSegment === '..') { + this.path.pop(); + } else if (pathSegment !== '.') { + this.path.push(pathSegment); + } + } + + return this; + } + + public setQuery(key: string, value: string): Url { + this.query[key] = value; + + return this; + } + + public toString({ rootRelative } = { rootRelative: false }): string { + const protocol = this.protocol !== null ? `${this.protocol}:` : ''; + const host = this.host ?? ''; + const port = this.port !== null ? `:${this.port}` : ''; + const origin = `${this.host !== null ? `${protocol}//` : ''}${host}${port}`; + + const path = `/${this.path.join('/')}`; + const query = + Object.keys(this.query).length !== 0 + ? `?${Object.entries(this.query) + .map(([k, v]) => `${k}=${v}`) + .join('&')}` + : ''; + const hash = this.hash !== null ? `#${this.hash}` : ''; + + return `${!rootRelative ? origin : ''}${path}${query}${hash}`; + } +} diff --git a/api/src/utils/validate-query.ts b/api/src/utils/validate-query.ts index 74b68e800b..5e33b456be 100644 --- a/api/src/utils/validate-query.ts +++ b/api/src/utils/validate-query.ts @@ -2,6 +2,7 @@ import Joi from 'joi'; import { isPlainObject } from 'lodash'; import { InvalidQueryException } from '../exceptions'; import { Query } from '../types'; +import { stringify } from 'wellknown'; const querySchema = Joi.object({ fields: Joi.array().items(Joi.string()), @@ -21,6 +22,7 @@ const querySchema = Joi.object({ export: Joi.string().valid('json', 'csv', 'xml'), aggregate: Joi.object(), deep: Joi.object(), + alias: Joi.object(), }).id('query'); export function validateQuery(query: Query): Query { @@ -30,6 +32,10 @@ export function validateQuery(query: Query): Query { validateFilter(query.filter); } + if (query.alias) { + validateAlias(query.alias); + } + if (error) { throw new InvalidQueryException(error.message); } @@ -43,8 +49,6 @@ function validateFilter(filter: Query['filter']) { for (const [key, nested] of Object.entries(filter)) { if (key === '_and' || key === '_or') { nested.forEach(validateFilter); - } else if (isPlainObject(nested)) { - validateFilter(nested); } else if (key.startsWith('_')) { const value = nested; @@ -76,8 +80,17 @@ function validateFilter(filter: Query['filter']) { case '_nempty': validateBoolean(value, key); break; + + case '_intersects': + case '_nintersects': + case '_intersects_bbox': + case '_nintersects_bbox': + validateGeometry(value, key); + break; } - } else if (isPlainObject(nested) === false && Array.isArray(nested) === false) { + } else if (isPlainObject(nested)) { + validateFilter(nested); + } else if (Array.isArray(nested) === false) { validateFilterPrimitive(nested, '_eq'); } else { validateFilter(nested); @@ -123,3 +136,33 @@ function validateBoolean(value: any, key: string) { return true; } + +function validateGeometry(value: any, key: string) { + try { + stringify(value); + } catch { + throw new InvalidQueryException(`"${key}" has to be a valid GeoJSON object`); + } + + return true; +} + +function validateAlias(alias: any) { + if (isPlainObject(alias) === false) { + throw new InvalidQueryException(`"alias" has to be an object`); + } + + for (const [key, value] of Object.entries(alias)) { + if (typeof key !== 'string') { + throw new InvalidQueryException(`"alias" key has to be a string. "${typeof key}" given.`); + } + + if (typeof value !== 'string') { + throw new InvalidQueryException(`"alias" value has to be a string. "${typeof key}" given.`); + } + + if (key.includes('.') || value.includes('.')) { + throw new InvalidQueryException(`"alias" key/value can't contain a period character \`.\``); + } + } +} diff --git a/api/src/utils/validate-storage.ts b/api/src/utils/validate-storage.ts new file mode 100644 index 0000000000..12478c1dce --- /dev/null +++ b/api/src/utils/validate-storage.ts @@ -0,0 +1,31 @@ +import env from '../env'; +import logger from '../logger'; +import { access } from 'fs-extra'; +import { constants } from 'fs'; +import path from 'path'; + +export async function validateStorage(): Promise { + if (env.DB_CLIENT === 'sqlite3') { + try { + await access(path.dirname(env.DB_FILENAME), constants.R_OK | constants.W_OK); + } catch { + logger.warn( + `Directory for SQLite database file (${path.resolve(path.dirname(env.DB_FILENAME))}) is not read/writeable!` + ); + } + } + + if (env.STORAGE_LOCATIONS.split(',').includes('local')) { + try { + await access(env.STORAGE_LOCAL_ROOT, constants.R_OK | constants.W_OK); + } catch { + logger.warn(`Upload directory (${path.resolve(env.STORAGE_LOCAL_ROOT)}) is not read/writeable!`); + } + } + + try { + await access(env.EXTENSIONS_PATH, constants.R_OK); + } catch { + logger.warn(`Extensions directory (${path.resolve(env.EXTENSIONS_PATH)}) is not readable!`); + } +} diff --git a/api/src/webhooks.ts b/api/src/webhooks.ts index d39d42be80..12f33f0a55 100644 --- a/api/src/webhooks.ts +++ b/api/src/webhooks.ts @@ -61,7 +61,7 @@ function createHandler(webhook: Webhook): ListenerFn { method: webhook.method, data: webhook.data ? webhookPayload : null, }); - } catch (error) { + } catch (error: any) { logger.warn(`Webhook "${webhook.name}" (id: ${webhook.id}) failed`); logger.warn(error); } diff --git a/api/tsconfig.json b/api/tsconfig.json index 0d81c4580c..cf93da6c53 100644 --- a/api/tsconfig.json +++ b/api/tsconfig.json @@ -10,7 +10,8 @@ "strict": true, "lib": ["es2019"], "skipLibCheck": true, - "declaration": true + "declaration": true, + "resolveJsonModule": true }, "exclude": ["node_modules", "dist"] } diff --git a/app/package.json b/app/package.json index 236a38a7ea..8acb19216d 100644 --- a/app/package.json +++ b/app/package.json @@ -1,6 +1,6 @@ { "name": "@directus/app", - "version": "9.0.0-rc.83", + "version": "9.0.0-rc.92", "private": false, "description": "Directus is an Open-Source Headless CMS & API for Managing Custom Databases", "author": "Rijk van Zanten ", @@ -23,72 +23,84 @@ "serve": "vite preview", "copy-docs-images": "rimraf public/img/docs && copyfiles -u 3 \"../docs/assets/**/*\" \"public/img/docs\" --verbose", "predev": "npm run copy-docs-images", - "prebuild": "npm run copy-docs-images", - "prepublishOnly": "npm run build" + "prebuild": "npm run copy-docs-images" }, "gitHead": "24621f3934dc77eb23441331040ed13c676ceffd", "devDependencies": { - "@directus/docs": "9.0.0-rc.83", - "@directus/extension-sdk": "9.0.0-rc.83", - "@directus/format-title": "9.0.0-rc.83", - "@directus/shared": "9.0.0-rc.83", - "@fullcalendar/core": "5.8.0", - "@fullcalendar/daygrid": "5.8.0", - "@fullcalendar/interaction": "5.8.0", - "@fullcalendar/list": "5.8.0", - "@fullcalendar/timegrid": "5.8.0", - "@popperjs/core": "2.9.2", - "@rollup/plugin-yaml": "3.0.0", + "@directus/docs": "9.0.0-rc.92", + "@directus/extensions-sdk": "9.0.0-rc.92", + "@directus/format-title": "9.0.0-rc.92", + "@directus/shared": "9.0.0-rc.92", + "@fullcalendar/core": "5.9.0", + "@fullcalendar/daygrid": "5.9.0", + "@fullcalendar/interaction": "5.9.0", + "@fullcalendar/list": "5.9.0", + "@fullcalendar/timegrid": "5.9.0", + "@mapbox/mapbox-gl-draw": "1.3.0", + "@mapbox/mapbox-gl-draw-static-mode": "1.0.1", + "@mapbox/mapbox-gl-geocoder": "4.7.3", + "@popperjs/core": "2.9.3", + "@rollup/plugin-yaml": "3.1.0", "@sindresorhus/slugify": "2.1.0", - "@tinymce/tinymce-vue": "4.0.3", + "@tinymce/tinymce-vue": "4.0.4", + "@turf/meta": "6.5.0", "@types/base-64": "1.0.0", - "@types/bytes": "3.1.0", - "@types/codemirror": "5.60.1", - "@types/color": "3.0.1", - "@types/diff": "5.0.0", - "@types/dompurify": "2.2.2", - "@types/lodash": "4.14.170", - "@types/markdown-it": "12.0.2", - "@types/marked": "2.0.3", - "@types/mime-types": "2.1.0", + "@types/bytes": "3.1.1", + "@types/codemirror": "5.60.2", + "@types/color": "3.0.2", + "@types/diacritics": "1.3.1", + "@types/diff": "5.0.1", + "@types/dompurify": "2.2.3", + "@types/geojson": "7946.0.8", + "@types/lodash": "4.14.172", + "@types/mapbox__mapbox-gl-draw": "1.2.3", + "@types/mapbox__mapbox-gl-geocoder": "4.7.1", + "@types/markdown-it": "12.2.1", + "@types/marked": "2.0.5", + "@types/mime-types": "2.1.1", "@types/ms": "0.7.31", - "@types/qrcode": "1.4.0", - "@vitejs/plugin-vue": "1.2.4", + "@types/qrcode": "1.4.1", + "@types/wellknown": "0.5.1", + "@vitejs/plugin-vue": "1.6.2", "@vue/cli-plugin-babel": "4.5.13", "@vue/cli-plugin-router": "4.5.13", "@vue/cli-plugin-typescript": "4.5.13", "@vue/cli-plugin-vuex": "4.5.13", "@vue/cli-service": "4.5.13", - "@vue/compiler-sfc": "3.1.2", + "@vue/compiler-sfc": "3.2.11", "apexcharts": "3.26.3", - "axios": "0.21.1", + "axios": "0.21.4", "base-64": "1.0.0", - "codemirror": "5.62.0", + "codemirror": "5.62.3", "copyfiles": "2.4.1", "cropperjs": "1.5.12", - "date-fns": "2.22.1", - "dompurify": "2.2.9", + "date-fns": "2.23.0", + "diacritics": "1.3.0", + "dompurify": "2.3.1", "escape-string-regexp": "5.0.0", "front-matter": "4.0.2", "html-entities": "2.3.2", "jsonlint-mod": "1.7.6", - "marked": "2.1.3", + "maplibre-gl": "1.15.2", + "marked": "3.0.0", "micromustache": "8.0.3", "mime": "2.5.2", "mitt": "3.0.0", - "nanoid": "3.1.23", - "pinia": "2.0.0-beta.3", - "prettier": "2.3.2", + "nanoid": "3.1.25", + "p-queue": "7.1.0", + "pinia": "2.0.0-rc.9", + "prettier": "2.4.0", "pretty-ms": "7.0.1", "qrcode": "1.4.4", "rimraf": "3.0.2", - "sass": "1.35.1", - "tinymce": "5.8.2", - "typescript": "4.3.4", - "vite": "2.3.8", - "vue": "3.1.2", - "vue-i18n": "9.1.6", - "vue-router": "4.0.10", - "vuedraggable": "4.0.3" + "sass": "1.39.2", + "tinymce": "5.9.2", + "typescript": "4.4.3", + "vite": "2.5.7", + "vue": "3.2.11", + "vue-i18n": "9.1.7", + "vue-router": "4.0.11", + "vuedraggable": "4.1.0", + "wellknown": "0.5.0" } } diff --git a/app/src/api.ts b/app/src/api.ts index 017b715330..5c380b1a01 100644 --- a/app/src/api.ts +++ b/app/src/api.ts @@ -3,6 +3,7 @@ import { useRequestsStore } from '@/stores/'; import { getRootPath } from '@/utils/get-root-path'; import axios, { AxiosError, AxiosRequestConfig, AxiosResponse } from 'axios'; import { addQueryToPath } from './utils/add-query-to-path'; +import PQueue from 'p-queue'; const api = axios.create({ baseURL: getRootPath(), @@ -12,6 +13,8 @@ const api = axios.create({ }, }); +const queue = new PQueue({ concurrency: 5, intervalCap: 5, interval: 500, carryoverConcurrencyCount: true }); + interface RequestConfig extends AxiosRequestConfig { id: string; } @@ -24,7 +27,7 @@ export interface RequestError extends AxiosError { response: Response; } -export const onRequest = (config: AxiosRequestConfig): RequestConfig => { +export const onRequest = (config: AxiosRequestConfig): Promise => { const requestsStore = useRequestsStore(); const id = requestsStore.startRequest(); @@ -33,7 +36,9 @@ export const onRequest = (config: AxiosRequestConfig): RequestConfig => { ...config, }; - return requestConfig; + return new Promise((resolve) => { + queue.add(() => resolve(requestConfig)); + }); }; export const onResponse = (response: AxiosResponse | Response): AxiosResponse | Response => { @@ -90,13 +95,13 @@ api.interceptors.response.use(onResponse, onError); export default api; -function getToken() { +export function getToken(): string | null { return api.defaults.headers?.['Authorization']?.split(' ')[1] || null; } export function addTokenToURL(url: string, token?: string): string { - token = token || getToken(); - if (!token) return url; + const accessToken = token || getToken(); + if (!accessToken) return url; - return addQueryToPath(url, { access_token: token }); + return addQueryToPath(url, { access_token: accessToken }); } diff --git a/app/src/app.vue b/app/src/app.vue index de65fffe6e..93a5ec8f56 100644 --- a/app/src/app.vue +++ b/app/src/app.vue @@ -1,7 +1,7 @@ @@ -24,7 +24,6 @@ import { defineComponent, computed } from 'vue'; import useSync from '@/composables/use-sync'; export default defineComponent({ - emits: ['update:indeterminate', 'update:modelValue', 'update:value'], props: { value: { type: String, @@ -71,6 +70,7 @@ export default defineComponent({ default: null, }, }, + emits: ['update:indeterminate', 'update:modelValue', 'update:value'], setup(props, { emit }) { const internalValue = useSync(props, 'value', emit); @@ -224,10 +224,6 @@ body { opacity: 0.1; } } - - input { - // - } } .prepend, diff --git a/app/src/components/v-chip/v-chip.vue b/app/src/components/v-chip/v-chip.vue index e5be0ac9fa..011f2870e3 100644 --- a/app/src/components/v-chip/v-chip.vue +++ b/app/src/components/v-chip/v-chip.vue @@ -19,7 +19,6 @@ import { defineComponent, ref, computed } from 'vue'; import useSizeClass, { sizeProps } from '@/composables/size-class'; export default defineComponent({ - emits: ['update:active', 'click', 'close'], props: { active: { type: Boolean, @@ -47,6 +46,7 @@ export default defineComponent({ }, ...sizeProps, }, + emits: ['update:active', 'click', 'close'], setup(props, { emit }) { const internalLocalActive = ref(true); diff --git a/app/src/components/v-detail/v-detail.vue b/app/src/components/v-detail/v-detail.vue index d3eb612a6c..03167c464c 100644 --- a/app/src/components/v-detail/v-detail.vue +++ b/app/src/components/v-detail/v-detail.vue @@ -2,7 +2,7 @@
- + {{ label }} @@ -19,7 +19,6 @@ import { defineComponent, computed, ref } from 'vue'; import { i18n } from '@/lang'; export default defineComponent({ - emits: ['update:modelValue'], props: { modelValue: { type: Boolean, @@ -38,7 +37,7 @@ export default defineComponent({ default: false, }, }, - + emits: ['update:modelValue'], setup(props, { emit }) { const localActive = ref(props.startOpen); diff --git a/app/src/components/v-dialog/v-dialog.vue b/app/src/components/v-dialog/v-dialog.vue index 820d3a33f9..953dae9d55 100644 --- a/app/src/components/v-dialog/v-dialog.vue +++ b/app/src/components/v-dialog/v-dialog.vue @@ -20,7 +20,6 @@ import useShortcut from '@/composables/use-shortcut'; import { useDialogRouteLeave } from '@/composables/use-dialog-route'; export default defineComponent({ - emits: ['esc', 'update:modelValue'], props: { modelValue: { type: Boolean, @@ -36,6 +35,7 @@ export default defineComponent({ validator: (val: string) => ['center', 'right'].includes(val), }, }, + emits: ['esc', 'update:modelValue'], setup(props, { emit }) { useShortcut('escape', (event, cancelNext) => { if (internalActive.value) { diff --git a/app/src/components/v-drawer/v-drawer.vue b/app/src/components/v-drawer/v-drawer.vue index 8aa2ac2bcf..c204f26165 100644 --- a/app/src/components/v-drawer/v-drawer.vue +++ b/app/src/components/v-drawer/v-drawer.vue @@ -1,5 +1,5 @@