diff --git a/.eslintignore b/.eslintignore index f06235c460..ae44c032fc 100644 --- a/.eslintignore +++ b/.eslintignore @@ -1,2 +1,3 @@ node_modules dist +templates diff --git a/.eslintrc.js b/.eslintrc.js index d641f4a30e..e338ba96cf 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -38,10 +38,9 @@ module.exports = { parser: '@typescript-eslint/parser', }, extends: [ - 'plugin:vue/vue3-essential', + 'plugin:vue/vue3-recommended', 'eslint:recommended', 'plugin:@typescript-eslint/recommended', - 'plugin:prettier-vue/recommended', 'prettier', ], rules: { diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index e181d3dd09..8de2b788b6 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -5,12 +5,17 @@ body: - type: markdown attributes: value: Hi, thank you for taking the time to create an issue! - - type: markdown + - type: checkboxes + id: troubleshooting attributes: - value: 'Before continuing, you must first have completed all [Troubleshooting Steps](https://docs.directus.io/getting-started/support/#troubleshooting-steps)' - - type: markdown - attributes: - value: Please confirm that an issue describing this problem doesn't exist already. + label: Preflight Checklist + options: + - label: I have completed all [Troubleshooting Steps](https://docs.directus.io/getting-started/support/#troubleshooting-steps). + required: true + - label: I'm on [the latest version of Directus](https://github.com/directus/directus/releases). + required: true + - label: There's [no other issue](https://github.com/directus/directus/issues) that already describes my problem. + required: true - type: textarea attributes: label: Describe the Bug diff --git a/.github/actions/build-images/Dockerfile b/.github/actions/build-images/Dockerfile deleted file mode 100644 index b9e58e9666..0000000000 --- a/.github/actions/build-images/Dockerfile +++ /dev/null @@ -1,15 +0,0 @@ -FROM docker:stable - -RUN \ - apk update && \ - apk upgrade && \ - apk add bash - -COPY ./rootfs/ / - -RUN \ - chmod +x /usr/bin/lib/argsf && \ - chmod +x /usr/bin/entrypoint && \ - chmod +x /usr/bin/semver - -ENTRYPOINT ["entrypoint"] diff --git a/.github/actions/build-images/action.yml b/.github/actions/build-images/action.yml deleted file mode 100644 index b15148ebc1..0000000000 --- a/.github/actions/build-images/action.yml +++ /dev/null @@ -1,47 +0,0 @@ -name: "Build and publish Directus images" -description: "GitHub Action to publish Directus container images." -branding: - icon: archive - color: gray-dark -inputs: - repository: - description: "Repository name" - required: true - registry: - description: "Registry" - required: true - username: - description: "Registry user" - required: true - password: - description: "Registry password" - required: true - version: - description: "Version" - required: true - push: - description: "Push" - required: false - default: "false" - latest: - description: "Latest" - required: false - default: "false" -runs: - using: "docker" - image: "Dockerfile" - args: - - --registry - - ${{ inputs.registry }} - - --repository - - ${{ inputs.repository }} - - --username - - ${{ inputs.username }} - - --password - - ${{ inputs.password }} - - --version - - ${{ inputs.version }} - - --push - - ${{ inputs.push }} - - --latest - - ${{ inputs.latest }} diff --git a/.github/actions/build-images/rootfs/directus/images/main/.editorconfig b/.github/actions/build-images/rootfs/directus/images/main/.editorconfig deleted file mode 100644 index 071b6ae2a1..0000000000 --- a/.github/actions/build-images/rootfs/directus/images/main/.editorconfig +++ /dev/null @@ -1,13 +0,0 @@ -root = true - -[*] -charset = utf-8 -end_of_line = lf -indent_size = 2 -indent_style = space -insert_final_newline = true -tab_width = 2 -trim_trailing_whitespace = true - -[Makefile] -indent_style = tab diff --git a/.github/actions/build-images/rootfs/directus/images/main/Dockerfile b/.github/actions/build-images/rootfs/directus/images/main/Dockerfile deleted file mode 100644 index 42b6478af6..0000000000 --- a/.github/actions/build-images/rootfs/directus/images/main/Dockerfile +++ /dev/null @@ -1,106 +0,0 @@ -# Builder image -FROM alpine:latest AS builder - -ARG VERSION -ARG REPOSITORY=directus/directus - -# Get runtime dependencies from optional dependencies -# defined in package.json of Directus API package -WORKDIR /directus -RUN apk add --no-cache jq \ - && wget -O directus-api-package.json "https://raw.githubusercontent.com/${REPOSITORY}/${VERSION}/api/package.json" \ - && jq '{ \ - name: "directus-project", \ - version: "1.0.0", \ - description: "Directus Project", \ - dependencies: .optionalDependencies \ - }' \ - directus-api-package.json > package.json - -# Directus image -FROM node:16-alpine - -ARG VERSION -ARG REPOSITORY=directus/directus - -LABEL directus.version="${VERSION}" -LABEL org.opencontainers.image.source https://github.com/${REPOSITORY} - -# Default environment variables -# (see https://docs.directus.io/reference/environment-variables/) -ENV \ - PORT="8055" \ - PUBLIC_URL="/" \ - DB_CLIENT="sqlite3" \ - DB_FILENAME="/directus/database/database.sqlite" \ - RATE_LIMITER_ENABLED="false" \ - RATE_LIMITER_STORE="memory" \ - RATE_LIMITER_POINTS="25" \ - RATE_LIMITER_DURATION="1" \ - CACHE_ENABLED="false" \ - STORAGE_LOCATIONS="local" \ - STORAGE_LOCAL_PUBLIC_URL="/uploads" \ - STORAGE_LOCAL_DRIVER="local" \ - STORAGE_LOCAL_ROOT="/directus/uploads" \ - ACCESS_TOKEN_TTL="15m" \ - REFRESH_TOKEN_TTL="7d" \ - REFRESH_TOKEN_COOKIE_SECURE="false" \ - REFRESH_TOKEN_COOKIE_SAME_SITE="lax" \ - OAUTH_PROVIDERS="" \ - EXTENSIONS_PATH="/directus/extensions" \ - EMAIL_FROM="no-reply@directus.io" \ - EMAIL_TRANSPORT="sendmail" \ - EMAIL_SENDMAIL_NEW_LINE="unix" \ - EMAIL_SENDMAIL_PATH="/usr/sbin/sendmail" - -RUN \ - # Install system dependencies - # - 'bash' for entrypoint script - # - 'ssmtp' to be able to send mails - # - 'util-linux' not sure if this is required - apk upgrade --no-cache && apk add --no-cache \ - bash \ - ssmtp \ - util-linux \ - # Install global node dependencies - && npm install -g \ - yargs \ - pino \ - pino-colada \ - # Create directory for Directus with corresponding ownership - # (can be omitted on newer Docker versions since WORKDIR below will do the same) - && mkdir /directus && chown node:node /directus - -# Switch to user 'node' and directory '/directus' -USER node -WORKDIR /directus - -# Get package.json from builder image -COPY --from=builder --chown=node:node /directus/package.json . - -RUN \ - # Install Directus and runtime dependencies - # (retry if it fails for some reason, e.g. release not published yet) - for i in $(seq 10); do npm install "directus@${VERSION}" && break || sleep 30; done && \ - npm install \ - # Create data directories - && mkdir -p \ - database \ - extensions/displays \ - extensions/interfaces \ - extensions/layouts \ - extensions/modules \ - uploads - -# Expose data directories as volumes -VOLUME \ - /directus/database \ - /directus/extensions \ - /directus/uploads - -# Copy rootfs files -COPY ./rootfs / - -EXPOSE 8055 -SHELL ["/bin/bash", "-c"] -ENTRYPOINT ["entrypoint"] diff --git a/.github/actions/build-images/rootfs/directus/images/main/rootfs/usr/local/bin/entrypoint b/.github/actions/build-images/rootfs/directus/images/main/rootfs/usr/local/bin/entrypoint deleted file mode 100755 index 7b2d1a6298..0000000000 --- a/.github/actions/build-images/rootfs/directus/images/main/rootfs/usr/local/bin/entrypoint +++ /dev/null @@ -1,64 +0,0 @@ -#!/usr/bin/env bash - -set -e - -function bootstrap() { - local warn=false - - if [ "${KEY}" == "" ] ; then - export KEY=$(uuidgen) - warn=true - fi - - if [ "${SECRET}" == "" ] ; then - export SECRET=$(node -e 'console.log(require("nanoid").nanoid(32))') - warn=true - fi - - if [ "${warn}" == "true" ] ; then - print --level=warn --stdin < -> WARNING! -> -> The KEY and SECRET environment variables are not set. Some -> temporary variables were generated to fill the gap, but in -> production this is going to cause problems. -> -> Reference: -> https://docs.directus.io/reference/environment-variables.html -> -> -WARN - fi - - # Create folder if using sqlite and file doesn't exist - if [ "${DB_CLIENT}" == "sqlite3" ] ; then - if [ "${DB_FILENAME}" == "" ] ; then - print --level=error "Missing DB_FILENAME environment variable" - exit 1 - fi - - if [ ! -f "${DB_FILENAME}" ] ; then - mkdir -p $(dirname ${DB_FILENAME}) - fi - fi - - npx directus bootstrap -} - -command="" -if [ $# -eq 0 ] ; then - command="start" -elif [ "${1}" == "bash" ] || [ "${1}" == "shell" ] ; then - shift - exec bash $@ -elif [ "${1}" == "command" ] ; then - shift - exec $@ -else - command="${1}" - shift -fi - -bootstrap -exec npx directus "${command}" $@ diff --git a/.github/actions/build-images/rootfs/directus/images/main/rootfs/usr/local/bin/print b/.github/actions/build-images/rootfs/directus/images/main/rootfs/usr/local/bin/print deleted file mode 100755 index 037ecd8f0f..0000000000 --- a/.github/actions/build-images/rootfs/directus/images/main/rootfs/usr/local/bin/print +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env node - -// Workarounds? -process.env.NODE_PATH = "/usr/local/lib/node_modules"; -require("module").Module._initPaths(); - -/** - * Read lines from stdin - */ -async function readlines() { - const chunks = []; - for await (const chunk of process.stdin) { - chunks.push(chunk); - } - - const lines = chunks.join("").split("\n"); - lines.pop(); - return lines; -} - -(async function () { - // Logger - const yargs = require("yargs"); - const logger = require("pino")({ - prettyPrint: process.env.LOG_STYLE !== "raw", - prettifier: require("pino-colada"), - level: process.env.LOG_LEVEL || "info", - }); - - function write(...message) { - if (level in logger) { - logger[level](...message); - } else { - logger.info(...message); - } - } - - const args = yargs.argv; - const level = args.level || "info"; - const stdin = args.stdin || false; - - if (stdin) { - const lines = await readlines(); - lines.forEach((line) => write(line)); - } else { - write(...args._); - } -})(); diff --git a/.github/actions/build-images/rootfs/usr/bin/entrypoint b/.github/actions/build-images/rootfs/usr/bin/entrypoint deleted file mode 100644 index d08bc87bb2..0000000000 --- a/.github/actions/build-images/rootfs/usr/bin/entrypoint +++ /dev/null @@ -1,138 +0,0 @@ -#!/usr/bin/env bash - -set -e - -root=$(dirname ${0}) -source ${root}/lib/argsf - -# -# Makes a set of tags -# -function make_tags() { - local prefix="" - local version=${1} - - semver get major ${version} > /dev/null 2>&1 - if [ "$?" != "0" ]; then - echo "${version}" - else - if [ "${version:0:1}" == "v" ]; then - prefix="v" - fi - - major="$(semver get major ${version})" - minor="${major}.$(semver get minor ${version})" - patch="${minor}.$(semver get patch ${version})" - - prerel="$(semver get prerel ${version})" - if [ "${prerel}" == "" ]; then - is_prerel=false - else - is_prerel=true - fi - - build="$(semver get build ${version})" - if [ "${build}" == "" ]; then - is_build=false - else - is_build=true - fi - - if [ "${is_prerel}" == "true" ]; then - echo "${prefix}${major}-${prerel}" - echo "${prefix}${minor}-${prerel}" - echo "${prefix}${patch}-${prerel}" - if [ "${is_build}" == "true" ]; then - echo "${prefix}${major}-${prerel}-${build}" - fi - else - echo "${prefix}${major}" - echo "${prefix}${minor}" - echo "${prefix}${patch}" - if [ "${is_build}" == "true" ]; then - echo "${prefix}${patch}-${build}" - fi - fi - fi -} - -# -# Build script -# -function main() { - username=$(argument username) - password=$(argument password) - - push=$(argument push "false") - latest=$(argument latest "false") - - registry=$(argument registry "") - registry=$(echo "${registry}" | tr '[:upper:]' '[:lower:]') - - repository=$(argument repository "directus/directus") - repository=$(echo "${repository}" | tr '[:upper:]' '[:lower:]') - - version=$(argument version "") - context=$(argument context ".") - - image="${repository}" - if [ "${registry}" != "" ]; then - image="${registry}/${image}" - fi - - # Normalize tag - if [ "${version}" == "" ]; then - version=${GITHUB_REF##*/} - else - version=${version##*/} - fi - - if [ "${version}" == "" ]; then - version=$(echo ${GITHUB_SHA:-"000000000000"} | cut -c1-12) - fi - - tags=$(make_tags ${version}) - echo "Tags = ${tags}" - - # build image - docker build \ - -t directus:main \ - --build-arg VERSION=${version} \ - --build-arg REPOSITORY=${repository} \ - /directus/images/main - - # login into registry - docker login -u "${username}" -p "${password}" "${registry}" - - # Push latest - # TODO: check if it's really the latest - if [ "${latest}" == "true" ]; then - fqin="${image}:latest" - echo "Tagging ${fqin}" - docker tag directus:main ${fqin} - if [ "${push}" == "true" ]; then - echo "Pushing tag ${fqin}" - docker push "${fqin}" - fi - fi - - # Push tags - for tag in $tags - do - tag=$(echo "${tag}" | tr '[:upper:]' '[:lower:]') - fqin="${image}:${tag}" - echo "Tagging ${fqin}" - docker tag directus:main "${fqin}" - if [ "${push}" == "true" ]; then - echo "Pushing tag ${fqin}" - docker push "${fqin}" - fi - done - - echo "Finished." - - exit $? -} - -main -exit $? diff --git a/.github/actions/build-images/rootfs/usr/bin/lib/argsf b/.github/actions/build-images/rootfs/usr/bin/lib/argsf deleted file mode 100644 index 0869fa25bd..0000000000 --- a/.github/actions/build-images/rootfs/usr/bin/lib/argsf +++ /dev/null @@ -1,98 +0,0 @@ -# -# Arguments and Flags (argsf) -# This is meant to work with bash shell -# To use, source this file into your bash scripts -# -# Implemented by João Biondo -# https://github.com/WoLfulus/argsf -# - -declare _ARGCOUNT=$# -declare _ARGDATA=("$@") -declare -A _ARGMAP -declare -A _FLAGMAP - -for ((_arg_index_key=1;_arg_index_key<=$#;_arg_index_key++)) -do - _arg_index_value=$(expr $_arg_index_key + 1) - _arg_key=${!_arg_index_key} - _arg_value=${!_arg_index_value} - if [[ $_arg_key == *"--"* ]]; then - if [[ $_arg_key == *" "* ]]; then - continue - fi - _arg_name="${_arg_key:2}" - _FLAGMAP[${_arg_name}]=1 - if [[ $_arg_value != *"--"* ]] || [[ $_arg_value == *" "* ]] ; then - _ARGMAP[${_arg_name}]="$_arg_value" - else - _ARGMAP[${_arg_name}]="" - fi - fi -done - -function _argument() { - if test "${_ARGMAP[${ARG_NAME}]+isset}" ; then - echo ${_ARGMAP[${ARG_NAME}]} - else - if [ ${ARG_DEFAULT} -eq 0 ]; then - echo "Error: required argument '--${ARG_NAME}' not specified" 1>&2 - exit 1 - else - echo ${ARG_DEFAULT_VALUE} - fi - fi -} - -function argument() { - if [ $# -eq 1 ]; then - ARG_NAME="$1" ARG_DEFAULT=0 ARG_DEFAULT_VALUE= _argument "${_ARGUMENT_DATA}" - elif [ $# -eq 2 ]; then - ARG_NAME="$1" ARG_DEFAULT=1 ARG_DEFAULT_VALUE="$2" _argument "${_ARGUMENT_DATA}" - else - echo "argument: invalid number of arguments" 1>&2 - return 1 - fi - return 0 -} - -function flage() { - if [ $# -eq 1 ]; then - if [[ ${_FLAGMAP[$1]} ]] ; then - echo "true" - return 0 - elif [[ ${_FLAGMAP[no-$1]} ]] ; then - echo "false" - return 0 - else - echo "true" - return 0 - fi - else - echo "flag: invalid number of arguments" 1>&2 - return 1 - fi -} - -function flagd() { - if [ $# -eq 1 ]; then - if [[ ${_FLAGMAP[$1]} ]] ; then - echo "true" - return 0 - elif [[ ${_FLAGMAP[no-$1]} ]] ; then - echo "false" - return 0 - else - echo "false" - return 0 - fi - else - echo "flag: invalid number of arguments" 1>&2 - return 1 - fi -} - -function flag() { - flagd $1 - return $? -} diff --git a/.github/actions/build-images/rootfs/usr/bin/semver b/.github/actions/build-images/rootfs/usr/bin/semver deleted file mode 100644 index c3d5075162..0000000000 --- a/.github/actions/build-images/rootfs/usr/bin/semver +++ /dev/null @@ -1,284 +0,0 @@ -#!/usr/bin/env bash - -# -# Copyright (c) 2014-2015 François Saint-Jacques -# -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 3, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A -# PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# this program. If not, see . -# - -set -o errexit -o nounset -o pipefail - -NAT='0|[1-9][0-9]*' -ALPHANUM='[0-9]*[A-Za-z-][0-9A-Za-z-]*' -IDENT="$NAT|$ALPHANUM" -FIELD='[0-9A-Za-z-]+' - -SEMVER_REGEX="\ -^[vV]?\ -($NAT)\\.($NAT)\\.($NAT)\ -(\\-(${IDENT})(\\.(${IDENT}))*)?\ -(\\+${FIELD}(\\.${FIELD})*)?$" - -PROG=semver -PROG_VERSION="3.0.0" - -USAGE="\ -Usage: - $PROG bump (major|minor|patch|release|prerel |build ) - $PROG compare - $PROG get (major|minor|patch|release|prerel|build) - $PROG --help - $PROG --version -Arguments: - A version must match the following regular expression: - \"${SEMVER_REGEX}\" - In English: - -- The version must match X.Y.Z[-PRERELEASE][+BUILD] - where X, Y and Z are non-negative integers. - -- PRERELEASE is a dot separated sequence of non-negative integers and/or - identifiers composed of alphanumeric characters and hyphens (with - at least one non-digit). Numeric identifiers must not have leading - zeros. A hyphen (\"-\") introduces this optional part. - -- BUILD is a dot separated sequence of identifiers composed of alphanumeric - characters and hyphens. A plus (\"+\") introduces this optional part. - See definition. - A string as defined by PRERELEASE above. - A string as defined by BUILD above. -Options: - -v, --version Print the version of this tool. - -h, --help Print this help message. -Commands: - bump Bump by one of major, minor, patch; zeroing or removing - subsequent parts. \"bump prerel\" sets the PRERELEASE part and - removes any BUILD part. \"bump build\" sets the BUILD part. - \"bump release\" removes any PRERELEASE or BUILD parts. - The bumped version is written to stdout. - compare Compare with , output to stdout the - following values: -1 if is newer, 0 if equal, 1 if - older. The BUILD part is not used in comparisons. - get Extract given part of , where part is one of major, minor, - patch, prerel, build, or release. -See also: - https://semver.org -- Semantic Versioning 2.0.0" - -function error { - echo -e "$1" >&2 - exit 1 -} - -function usage-help { - error "$USAGE" -} - -function usage-version { - echo -e "${PROG}: $PROG_VERSION" - exit 0 -} - -function validate-version { - local version=$1 - if [[ "$version" =~ $SEMVER_REGEX ]]; then - # if a second argument is passed, store the result in var named by $2 - if [ "$#" -eq "2" ]; then - local major=${BASH_REMATCH[1]} - local minor=${BASH_REMATCH[2]} - local patch=${BASH_REMATCH[3]} - local prere=${BASH_REMATCH[4]} - local build=${BASH_REMATCH[8]} - eval "$2=(\"$major\" \"$minor\" \"$patch\" \"$prere\" \"$build\")" - else - echo "$version" - fi - else - error "version $version does not match the semver scheme 'X.Y.Z(-PRERELEASE)(+BUILD)'. See help for more information." - fi -} - -function is-nat { - [[ "$1" =~ ^($NAT)$ ]] -} - -function is-null { - [ -z "$1" ] -} - -function order-nat { - [ "$1" -lt "$2" ] && { echo -1 ; return ; } - [ "$1" -gt "$2" ] && { echo 1 ; return ; } - echo 0 -} - -function order-string { - [[ $1 < $2 ]] && { echo -1 ; return ; } - [[ $1 > $2 ]] && { echo 1 ; return ; } - echo 0 -} - -# given two (named) arrays containing NAT and/or ALPHANUM fields, compare them -# one by one according to semver 2.0.0 spec. Return -1, 0, 1 if left array ($1) -# is less-than, equal, or greater-than the right array ($2). The longer array -# is considered greater-than the shorter if the shorter is a prefix of the longer. -# -function compare-fields { - local l="$1[@]" - local r="$2[@]" - local leftfield=( "${!l}" ) - local rightfield=( "${!r}" ) - local left - local right - - local i=$(( -1 )) - local order=$(( 0 )) - - while true - do - [ $order -ne 0 ] && { echo $order ; return ; } - - : $(( i++ )) - left="${leftfield[$i]}" - right="${rightfield[$i]}" - - is-null "$left" && is-null "$right" && { echo 0 ; return ; } - is-null "$left" && { echo -1 ; return ; } - is-null "$right" && { echo 1 ; return ; } - - is-nat "$left" && is-nat "$right" && { order=$(order-nat "$left" "$right") ; continue ; } - is-nat "$left" && { echo -1 ; return ; } - is-nat "$right" && { echo 1 ; return ; } - { order=$(order-string "$left" "$right") ; continue ; } - done -} - -# shellcheck disable=SC2206 # checked by "validate"; ok to expand prerel id's into array -function compare-version { - local order - validate-version "$1" V - validate-version "$2" V_ - - # compare major, minor, patch - - local left=( "${V[0]}" "${V[1]}" "${V[2]}" ) - local right=( "${V_[0]}" "${V_[1]}" "${V_[2]}" ) - - order=$(compare-fields left right) - [ "$order" -ne 0 ] && { echo "$order" ; return ; } - - # compare pre-release ids when M.m.p are equal - - local prerel="${V[3]:1}" - local prerel_="${V_[3]:1}" - local left=( ${prerel//./ } ) - local right=( ${prerel_//./ } ) - - # if left and right have no pre-release part, then left equals right - # if only one of left/right has pre-release part, that one is less than simple M.m.p - - [ -z "$prerel" ] && [ -z "$prerel_" ] && { echo 0 ; return ; } - [ -z "$prerel" ] && { echo 1 ; return ; } - [ -z "$prerel_" ] && { echo -1 ; return ; } - - # otherwise, compare the pre-release id's - - compare-fields left right -} - -function command-bump { - local new; local version; local sub_version; local command; - - case $# in - 2) case $1 in - major|minor|patch|release) command=$1; version=$2;; - *) usage-help;; - esac ;; - 3) case $1 in - prerel|build) command=$1; sub_version=$2 version=$3 ;; - *) usage-help;; - esac ;; - *) usage-help;; - esac - - validate-version "$version" parts - # shellcheck disable=SC2154 - local major="${parts[0]}" - local minor="${parts[1]}" - local patch="${parts[2]}" - local prere="${parts[3]}" - local build="${parts[4]}" - - case "$command" in - major) new="$((major + 1)).0.0";; - minor) new="${major}.$((minor + 1)).0";; - patch) new="${major}.${minor}.$((patch + 1))";; - release) new="${major}.${minor}.${patch}";; - prerel) new=$(validate-version "${major}.${minor}.${patch}-${sub_version}");; - build) new=$(validate-version "${major}.${minor}.${patch}${prere}+${sub_version}");; - *) usage-help ;; - esac - - echo "$new" - exit 0 -} - -function command-compare { - local v; local v_; - - case $# in - 2) v=$(validate-version "$1"); v_=$(validate-version "$2") ;; - *) usage-help ;; - esac - - set +u # need unset array element to evaluate to null - compare-version "$v" "$v_" - exit 0 -} - - -# shellcheck disable=SC2034 -function command-get { - local part version - - if [[ "$#" -ne "2" ]] || [[ -z "$1" ]] || [[ -z "$2" ]]; then - usage-help - exit 0 - fi - - part="$1" - version="$2" - - validate-version "$version" parts - local major="${parts[0]}" - local minor="${parts[1]}" - local patch="${parts[2]}" - local prerel="${parts[3]:1}" - local build="${parts[4]:1}" - local release="${major}.${minor}.${patch}" - - case "$part" in - major|minor|patch|release|prerel|build) echo "${!part}" ;; - *) usage-help ;; - esac - - exit 0 -} - -case $# in - 0) echo "Unknown command: $*"; usage-help;; -esac - -case $1 in - --help|-h) echo -e "$USAGE"; exit 0;; - --version|-v) usage-version ;; - bump) shift; command-bump "$@";; - get) shift; command-get "$@";; - compare) shift; command-compare "$@";; - *) echo "Unknown arguments: $*"; usage-help;; -esac diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml deleted file mode 100644 index 4ecc900801..0000000000 --- a/.github/workflows/build-images.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: build-images -on: - release: - types: - - published - -jobs: - build: - runs-on: ubuntu-latest - steps: - - name: Sleep for 30 seconds - uses: jakejarvis/wait-action@master - with: - time: '30s' - - - name: Checkout - uses: actions/checkout@v2 - - - name: Build GitHub Container Registry - uses: ./.github/actions/build-images - with: - registry: "ghcr.io" - repository: "${{ github.repository }}" - username: "${{ secrets.REGISTRY_USERNAME }}" - password: "${{ secrets.REGISTRY_PASSWORD }}" - version: "${{ github.ref }}" - latest: "true" - push: "true" - - - name: Build Docker Hub - uses: ./.github/actions/build-images - with: - registry: "docker.io" - repository: "${{ github.repository }}" - username: "${{ secrets.DOCKERHUB_USERNAME }}" - password: "${{ secrets.DOCKERHUB_PASSWORD }}" - version: "${{ github.ref }}" - latest: "true" - push: "true" diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml deleted file mode 100644 index 5071a2aa14..0000000000 --- a/.github/workflows/create-release.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: create-release -on: - push: - tags: - - 'v*' -jobs: - build: - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v2 - - - name: Create Release - id: create_release - uses: actions/create-release@v1 - env: - GITHUB_TOKEN: ${{ secrets.REPOSITORY_TOKEN }} - with: - tag_name: ${{ github.ref }} - release_name: ${{ github.ref }} - body: | - Directus ${{ github.ref }} - draft: false - prerelease: false diff --git a/.github/workflows/e2e-tests.yml b/.github/workflows/e2e-full.yml similarity index 91% rename from .github/workflows/e2e-tests.yml rename to .github/workflows/e2e-full.yml index f4db1a211b..abfe21f7cd 100644 --- a/.github/workflows/e2e-tests.yml +++ b/.github/workflows/e2e-full.yml @@ -1,4 +1,4 @@ -name: Run e2e tests +name: E2E on: push: branches: @@ -10,12 +10,13 @@ jobs: fail-fast: false matrix: db: ['mssql', 'mysql', 'postgres', 'maria', 'sqlite3'] - node-version: ['12-alpine', '14-alpine', '16-alpine'] + # node-version: ['12-alpine', '14-alpine', '16-alpine'] + node-version: ['16-alpine'] env: CACHED_IMAGE: ghcr.io/directus/directus-e2e-test-cache:${{ matrix.node-version }} steps: - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.9.0 + uses: styfle/cancel-workflow-action@0.9.1 with: access_token: ${{ secrets.GITHUB_TOKEN }} - name: Login to GitHub Container Registry diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml new file mode 100644 index 0000000000..da384331de --- /dev/null +++ b/.github/workflows/e2e.yml @@ -0,0 +1,42 @@ +name: E2E +on: + pull_request: + branches: + - main +jobs: + tests: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + db: ['postgres'] + node-version: ['16-alpine'] + env: + CACHED_IMAGE: ghcr.io/directus/directus-e2e-test-cache:${{ matrix.node-version }} + steps: + - name: Cancel Previous Runs + uses: styfle/cancel-workflow-action@0.9.1 + with: + access_token: ${{ secrets.GITHUB_TOKEN }} + - uses: actions/checkout@v2 + - uses: actions/setup-node@v2 + with: + node-version: '16' + - name: restore node_modules cache + uses: actions/cache@v2 + with: + path: | + node_modules + **/node_modules + key: ${{ runner.os }}-${{ hashFiles('**/package-lock.json') }} + - name: Install dependencies + run: | + npm install + - name: Build + run: | + npm run build + - name: Run tests + env: + TEST_NODE_VERSION: ${{ matrix.node-version }} + TEST_DB: ${{ matrix.db }} + run: npm run test:e2e diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index a09b6fa102..2911c4a036 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -14,7 +14,7 @@ jobs: steps: - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.9.0 + uses: styfle/cancel-workflow-action@0.9.1 with: access_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000000..e623131c5b --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,148 @@ +name: Release + +on: + push: + tags: + - 'v*' + +env: + GHCR_IMAGE: ghcr.io/${{ github.repository }} + DOCKERHUB_IMAGE: ${{ github.repository }} + +jobs: + create-release: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Create Release + id: create_release + uses: actions/create-release@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + tag_name: ${{ github.ref }} + release_name: ${{ github.ref }} + body: | + Directus ${{ github.ref }} + draft: false + prerelease: false + + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Use Node.js + uses: actions/setup-node@v2 + with: + node-version: '16.x' + + - uses: c-hive/gha-npm-cache@v1 + - run: npm ci + - run: npm run build + - run: node docker/pack + + - name: Cache build artifacts + uses: actions/cache@v2 + with: + path: '**/dist' + key: build-artifacts-${{ github.sha }} + + publish-npm: + runs-on: ubuntu-latest + needs: build + steps: + - uses: actions/checkout@v2 + - name: Restore build artifacts + uses: actions/cache@v2 + with: + path: '**/dist' + key: build-artifacts-${{ github.sha }} + - name: Use Node.js + uses: actions/setup-node@v2 + with: + node-version: '16.x' + registry-url: 'https://registry.npmjs.org' + + - run: npm ci + + - run: npx lerna publish from-git --no-verify-access --yes + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + + build-images: + runs-on: ubuntu-latest + needs: build + steps: + - uses: actions/checkout@v2 + + - name: Restore build artifacts + uses: actions/cache@v2 + with: + path: '**/dist' + key: build-artifacts-${{ github.sha }} + + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + + - name: Cache Docker layers + uses: actions/cache@v2 + with: + path: /tmp/.buildx-cache + key: ${{ runner.os }}-buildx-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-buildx- + + - name: Docker meta + id: meta + uses: docker/metadata-action@v3 + with: + images: | + ${{ env.DOCKERHUB_IMAGE }} + ${{ env.GHCR_IMAGE }} + # Remove this once v9 is released + flavor: | + latest=true + tags: | + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=semver,pattern={{major}} + + - name: Login to DockerHub + uses: docker/login-action@v1 + if: ${{ env.DOCKERHUB_IMAGE }} + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + + - name: Login to GHCR + uses: docker/login-action@v1 + if: ${{ env.GHCR_IMAGE }} + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push + uses: docker/build-push-action@v2 + with: + context: . + file: './docker/Dockerfile' + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + platforms: linux/amd64,linux/arm64 + push: true + cache-from: type=local,src=/tmp/.buildx-cache + cache-to: type=local,dest=/tmp/.buildx-cache-new + + # Temp fix + # https://github.com/docker/build-push-action/issues/252 + # https://github.com/moby/buildkit/issues/1896 + - name: Move cache + run: | + rm -rf /tmp/.buildx-cache + mv /tmp/.buildx-cache-new /tmp/.buildx-cache diff --git a/Dockerfile b/Dockerfile index 1c12ca1d99..4e488976fc 100644 --- a/Dockerfile +++ b/Dockerfile @@ -26,32 +26,12 @@ FROM node:${NODE_VERSION} #ENV TNS_ADMIN /usr/lib/instantclient #ENV ORACLE_HOME /usr/lib/instantclient -RUN npm i -g lerna - WORKDIR /directus -COPY package*.json ./ -COPY lerna.json ./ -COPY api/package.json api/ -COPY api/cli.js api/ -COPY app/package.json app/ -COPY docs/package.json docs/ -COPY packages/create-directus-project/package.json packages/create-directus-project/ -COPY packages/create-directus-project/lib/index.js packages/create-directus-project/lib/ -COPY packages/drive/package.json packages/drive/ -COPY packages/drive-azure/package.json packages/drive-azure/ -COPY packages/drive-gcs/package.json packages/drive-gcs/ -COPY packages/drive-s3/package.json packages/drive-s3/ -COPY packages/format-title/package.json packages/format-title/ -COPY packages/gatsby-source-directus/package.json packages/gatsby-source-directus/ -COPY packages/schema/package.json packages/schema/ -COPY packages/sdk/package.json packages/sdk/ -COPY packages/specs/package.json packages/specs/ - -RUN npx lerna bootstrap - COPY . . +RUN npm install + WORKDIR /directus/api CMD ["sh", "-c", "node ./dist/cli/index.js bootstrap; node ./dist/start.js;"] diff --git a/api/example.env b/api/example.env index 9249c9483c..9ca6f4b637 100644 --- a/api/example.env +++ b/api/example.env @@ -103,6 +103,7 @@ ACCESS_TOKEN_TTL="15m" REFRESH_TOKEN_TTL="7d" REFRESH_TOKEN_COOKIE_SECURE="false" REFRESH_TOKEN_COOKIE_SAME_SITE="lax" +REFRESH_TOKEN_COOKIE_NAME="directus_refresh_token" CORS_ENABLED="true" CORS_ORIGIN="true" diff --git a/api/package.json b/api/package.json index 1d28e0ae3e..cb365eb377 100644 --- a/api/package.json +++ b/api/package.json @@ -1,6 +1,6 @@ { "name": "directus", - "version": "9.0.0-rc.83", + "version": "9.0.0-rc.88", "license": "GPL-3.0-only", "homepage": "https://github.com/directus/directus#readme", "description": "Directus is a real-time API and App dashboard for managing SQL database content.", @@ -55,9 +55,8 @@ "prebuild": "npm run cleanup", "build": "tsc --build && copyfiles \"src/**/*.*\" -e \"src/**/*.ts\" -u 1 dist", "cleanup": "rimraf dist", - "dev": "cross-env DIRECTUS_DEV=true NODE_ENV=development ts-node-dev --files --transpile-only --respawn --watch \".env\" --inspect --exit-child -- src/start.ts", - "cli": "cross-env DIRECTUS_DEV=true NODE_ENV=development ts-node --script-mode --transpile-only src/cli/index.ts", - "prepublishOnly": "npm run build" + "dev": "cross-env NODE_ENV=development SERVE_APP=false ts-node-dev --files --transpile-only --respawn --watch \".env\" --inspect --exit-child -- src/start.ts", + "cli": "cross-env NODE_ENV=development SERVE_APP=false ts-node --script-mode --transpile-only src/cli/index.ts" }, "engines": { "node": ">=12.20.0" @@ -69,15 +68,15 @@ "example.env" ], "dependencies": { - "@directus/app": "9.0.0-rc.83", - "@directus/drive": "9.0.0-rc.83", - "@directus/drive-azure": "9.0.0-rc.83", - "@directus/drive-gcs": "9.0.0-rc.83", - "@directus/drive-s3": "9.0.0-rc.83", - "@directus/format-title": "9.0.0-rc.83", - "@directus/schema": "9.0.0-rc.83", - "@directus/shared": "9.0.0-rc.83", - "@directus/specs": "9.0.0-rc.83", + "@directus/app": "9.0.0-rc.88", + "@directus/drive": "9.0.0-rc.88", + "@directus/drive-azure": "9.0.0-rc.88", + "@directus/drive-gcs": "9.0.0-rc.88", + "@directus/drive-s3": "9.0.0-rc.88", + "@directus/format-title": "9.0.0-rc.88", + "@directus/schema": "9.0.0-rc.88", + "@directus/shared": "9.0.0-rc.88", + "@directus/specs": "9.0.0-rc.88", "@godaddy/terminus": "^4.9.0", "@rollup/plugin-alias": "^3.1.2", "@rollup/plugin-virtual": "^2.0.3", @@ -99,14 +98,13 @@ "dotenv": "^10.0.0", "eventemitter2": "^6.4.3", "execa": "^5.1.1", - "exif-reader": "^1.0.3", + "exifr": "^7.1.2", "express": "^4.17.1", "express-session": "^1.17.2", "fs-extra": "^10.0.0", "grant": "^5.4.14", "graphql": "^15.5.0", "graphql-compose": "^9.0.1", - "icc": "^2.0.0", "inquirer": "^8.1.1", "joi": "^17.3.0", "js-yaml": "^4.1.0", @@ -115,7 +113,7 @@ "jsonwebtoken": "^8.5.1", "keyv": "^4.0.3", "knex": "^0.95.6", - "knex-schema-inspector": "^1.5.7", + "knex-schema-inspector": "1.5.13", "liquidjs": "^9.25.0", "lodash": "^4.17.21", "macos-release": "^2.4.1", @@ -125,12 +123,13 @@ "node-cron": "^3.0.0", "node-machine-id": "^1.1.12", "nodemailer": "^6.6.1", + "object-hash": "^2.2.0", "openapi3-ts": "^2.0.0", "ora": "^5.4.0", "otplib": "^12.0.1", - "pino": "^6.11.3", + "pino": "6.13.0", "pino-colada": "^2.1.0", - "pino-http": "^5.5.0", + "pino-http": "5.6.0", "prettier": "^2.3.1", "qs": "^6.9.4", "rate-limiter-flexible": "^2.2.2", @@ -138,6 +137,7 @@ "rollup": "^2.52.1", "sharp": "^0.28.3", "stream-json": "^1.7.1", + "update-check": "^1.5.4", "uuid": "^8.3.2", "uuid-validate": "0.0.3" }, @@ -151,43 +151,43 @@ "memcached": "^2.2.2", "mysql": "^2.18.1", "nodemailer-mailgun-transport": "^2.1.3", - "oracledb": "^5.0.0", "pg": "^8.6.0", "sqlite3": "^5.0.2", "tedious": "^11.0.8" }, "gitHead": "24621f3934dc77eb23441331040ed13c676ceffd", "devDependencies": { - "@types/async": "3.2.6", + "@types/async": "3.2.7", "@types/atob": "2.1.2", - "@types/body-parser": "1.19.0", - "@types/busboy": "0.2.3", + "@types/body-parser": "1.19.1", + "@types/busboy": "0.2.4", "@types/cookie-parser": "1.4.2", - "@types/cors": "2.8.10", + "@types/cors": "2.8.12", "@types/destroy": "1.0.0", - "@types/express": "4.17.12", + "@types/express": "4.17.13", "@types/express-pino-logger": "4.0.2", - "@types/express-session": "1.17.3", - "@types/fs-extra": "9.0.11", - "@types/inquirer": "7.3.2", - "@types/js-yaml": "4.0.1", - "@types/json2csv": "5.0.2", - "@types/jsonwebtoken": "8.5.2", - "@types/keyv": "3.1.1", - "@types/lodash": "4.14.170", + "@types/express-session": "1.17.4", + "@types/fs-extra": "9.0.12", + "@types/inquirer": "7.3.3", + "@types/js-yaml": "4.0.2", + "@types/json2csv": "5.0.3", + "@types/jsonwebtoken": "8.5.4", + "@types/keyv": "3.1.2", + "@types/lodash": "4.14.172", "@types/mime-types": "2.1.0", "@types/ms": "0.7.31", "@types/node": "15.12.2", - "@types/node-cron": "2.0.3", - "@types/nodemailer": "6.4.2", - "@types/qs": "6.9.6", - "@types/sharp": "0.28.3", - "@types/stream-json": "1.7.0", - "@types/uuid": "8.3.0", + "@types/node-cron": "2.0.4", + "@types/nodemailer": "6.4.4", + "@types/object-hash": "2.1.1", + "@types/qs": "6.9.7", + "@types/sharp": "0.28.5", + "@types/stream-json": "1.7.1", + "@types/uuid": "8.3.1", "@types/uuid-validate": "0.0.1", "copyfiles": "2.4.1", "cross-env": "7.0.3", - "ts-node-dev": "1.1.7", - "typescript": "4.3.4" + "ts-node-dev": "1.1.8", + "typescript": "4.3.5" } } diff --git a/api/src/app.ts b/api/src/app.ts index 5a470f813a..3f05840818 100644 --- a/api/src/app.ts +++ b/api/src/app.ts @@ -24,7 +24,7 @@ import settingsRouter from './controllers/settings'; import usersRouter from './controllers/users'; import utilsRouter from './controllers/utils'; import webhooksRouter from './controllers/webhooks'; -import { isInstalled, validateDBConnection } from './database'; +import { isInstalled, validateDBConnection, validateMigrations } from './database'; import { emitAsyncSafe } from './emitter'; import env from './env'; import { InvalidPayloadException } from './exceptions'; @@ -47,6 +47,12 @@ import { session } from './middleware/session'; export default async function createApp(): Promise { validateEnv(['KEY', 'SECRET']); + try { + new URL(env.PUBLIC_URL); + } catch { + logger.warn('PUBLIC_URL is not a valid URL'); + } + await validateDBConnection(); if ((await isInstalled()) === false) { @@ -54,6 +60,10 @@ export default async function createApp(): Promise { process.exit(1); } + if ((await validateMigrations()) === false) { + logger.warn(`Database migrations have not all been run`); + } + await initializeExtensions(); registerExtensionHooks(); @@ -99,7 +109,15 @@ export default async function createApp(): Promise { app.use(cors); } - if (!('DIRECTUS_DEV' in process.env)) { + app.get('/', (req, res, next) => { + if (env.ROOT_REDIRECT) { + res.redirect(env.ROOT_REDIRECT); + } else { + next(); + } + }); + + if (env.SERVE_APP) { const adminPath = require.resolve('@directus/app/dist/index.html'); const publicUrl = env.PUBLIC_URL.endsWith('/') ? env.PUBLIC_URL : env.PUBLIC_URL + '/'; @@ -107,14 +125,6 @@ export default async function createApp(): Promise { let html = fse.readFileSync(adminPath, 'utf-8'); html = html.replace(//, `\n\t\t`); - app.get('/', (req, res, next) => { - if (env.ROOT_REDIRECT) { - res.redirect(env.ROOT_REDIRECT); - } else { - next(); - } - }); - app.get('/admin', (req, res) => res.send(html)); app.use('/admin', express.static(path.join(adminPath, '..'))); app.use('/admin/*', (req, res) => { diff --git a/api/src/cache.ts b/api/src/cache.ts index 423a28b323..f19a92d9e1 100644 --- a/api/src/cache.ts +++ b/api/src/cache.ts @@ -12,12 +12,12 @@ export function getCache(): { cache: Keyv | null; schemaCache: Keyv | null } { if (env.CACHE_ENABLED === true && cache === null) { validateEnv(['CACHE_NAMESPACE', 'CACHE_TTL', 'CACHE_STORE']); cache = getKeyvInstance(ms(env.CACHE_TTL as string)); - cache.on('error', (err) => logger.error(err)); + cache.on('error', (err) => logger.warn(err, `[cache] ${err}`)); } if (env.CACHE_SCHEMA !== false && schemaCache === null) { schemaCache = getKeyvInstance(typeof env.CACHE_SCHEMA === 'string' ? ms(env.CACHE_SCHEMA) : undefined); - schemaCache.on('error', (err) => logger.error(err)); + schemaCache.on('error', (err) => logger.warn(err, `[cache] ${err}`)); } return { cache, schemaCache }; @@ -43,7 +43,11 @@ function getConfig(store: 'memory' | 'redis' | 'memcache' = 'memory', ttl: numbe if (store === 'redis') { const KeyvRedis = require('@keyv/redis'); - config.store = new KeyvRedis(env.CACHE_REDIS || getConfigFromEnv('CACHE_REDIS_')); + + config.store = new KeyvRedis(env.CACHE_REDIS || getConfigFromEnv('CACHE_REDIS_'), { + commandTimeout: 500, + retryStrategy: false, + }); } if (store === 'memcache') { diff --git a/api/src/cli/commands/bootstrap/index.ts b/api/src/cli/commands/bootstrap/index.ts index af305fcc6b..75ebd55a55 100644 --- a/api/src/cli/commands/bootstrap/index.ts +++ b/api/src/cli/commands/bootstrap/index.ts @@ -1,3 +1,4 @@ +import { Knex } from 'knex'; import { nanoid } from 'nanoid'; import runMigrations from '../../../database/migrations/run'; import installDatabase from '../../../database/seeds/run'; @@ -5,19 +6,16 @@ import env from '../../../env'; import logger from '../../../logger'; import { getSchema } from '../../../utils/get-schema'; import { RolesService, UsersService, SettingsService } from '../../../services'; -import getDatabase, { isInstalled, hasDatabaseConnection } from '../../../database'; +import getDatabase, { isInstalled, validateDBConnection, hasDatabaseConnection } from '../../../database'; import { SchemaOverview } from '../../../types'; export default async function bootstrap({ skipAdminInit }: { skipAdminInit?: boolean }): Promise { logger.info('Initializing bootstrap...'); - if ((await isDatabaseAvailable()) === false) { - logger.error(`Can't connect to the database`); - process.exit(1); - } - const database = getDatabase(); + await waitForDatabase(database); + if ((await isInstalled()) === false) { logger.info('Installing Directus system tables...'); @@ -48,19 +46,20 @@ export default async function bootstrap({ skipAdminInit }: { skipAdminInit?: boo process.exit(0); } -async function isDatabaseAvailable() { +async function waitForDatabase(database: Knex) { const tries = 5; const secondsBetweenTries = 5; for (let i = 0; i < tries; i++) { - if (await hasDatabaseConnection()) { + if (await hasDatabaseConnection(database)) { return true; } await new Promise((resolve) => setTimeout(resolve, secondsBetweenTries * 1000)); } - return false; + // This will throw and exit the process if the database is not available + await validateDBConnection(database); } async function createDefaultAdmin(schema: SchemaOverview) { diff --git a/api/src/cli/commands/init/questions.ts b/api/src/cli/commands/init/questions.ts index 26516b8667..3ab6468786 100644 --- a/api/src/cli/commands/init/questions.ts +++ b/api/src/cli/commands/init/questions.ts @@ -50,6 +50,13 @@ const password = (): Record => ({ mask: '*', }); +const encrypt = (): Record => ({ + type: 'confirm', + name: 'options__encrypt', + message: 'Encrypt Connection:', + default: false, +}); + const ssl = (): Record => ({ type: 'confirm', name: 'ssl', @@ -62,5 +69,5 @@ export const databaseQuestions = { mysql: [host, port, database, user, password], pg: [host, port, database, user, password, ssl], oracledb: [host, port, database, user, password], - mssql: [host, port, database, user, password], + mssql: [host, port, database, user, password, encrypt], }; diff --git a/api/src/cli/utils/create-db-connection.ts b/api/src/cli/utils/create-db-connection.ts index 13d6273381..2eac7fc6df 100644 --- a/api/src/cli/utils/create-db-connection.ts +++ b/api/src/cli/utils/create-db-connection.ts @@ -9,6 +9,7 @@ export type Credentials = { user?: string; password?: string; ssl?: boolean; + options__encrypt?: boolean; }; export default function createDBConnection( client: 'sqlite3' | 'mysql' | 'pg' | 'oracledb' | 'mssql', @@ -23,26 +24,26 @@ export default function createDBConnection( filename: filename as string, }; } else { - if (client !== 'pg') { - const { host, port, database, user, password } = credentials as Credentials; + const { host, port, database, user, password } = credentials as Credentials; - connection = { - host: host, - port: Number(port), - database: database, - user: user, - password: password, - }; - } else { - const { host, port, database, user, password, ssl } = credentials as Credentials; + connection = { + host: host, + port: Number(port), + database: database, + user: user, + password: password, + }; - connection = { - host: host, - port: Number(port), - database: database, - user: user, - password: password, - ssl: ssl, + if (client === 'pg') { + const { ssl } = credentials as Credentials; + connection['ssl'] = ssl; + } + + if (client === 'mssql') { + const { options__encrypt } = credentials as Credentials; + + (connection as Knex.MsSqlConnectionConfig)['options'] = { + encrypt: options__encrypt, }; } } diff --git a/api/src/cli/utils/create-env/env-stub.liquid b/api/src/cli/utils/create-env/env-stub.liquid index 21ca7e197b..d381a60185 100644 --- a/api/src/cli/utils/create-env/env-stub.liquid +++ b/api/src/cli/utils/create-env/env-stub.liquid @@ -38,6 +38,7 @@ ACCESS_TOKEN_TTL="15m" REFRESH_TOKEN_TTL="7d" REFRESH_TOKEN_COOKIE_SECURE=false REFRESH_TOKEN_COOKIE_SAME_SITE="lax" +REFRESH_TOKEN_COOKIE_NAME="directus_refresh_token" #################################################################################################### ## SSO (OAuth) Providers diff --git a/api/src/constants.ts b/api/src/constants.ts index 2fdbd23fb0..889a792792 100644 --- a/api/src/constants.ts +++ b/api/src/constants.ts @@ -1,42 +1,42 @@ -import { Transformation } from './types'; +import { TransformationParams } from './types'; -export const SYSTEM_ASSET_ALLOW_LIST: Transformation[] = [ +export const SYSTEM_ASSET_ALLOW_LIST: TransformationParams[] = [ { key: 'system-small-cover', - width: 64, - height: 64, - fit: 'cover', + transforms: [['resize', { width: 64, height: 64, fit: 'cover' }]], }, { key: 'system-small-contain', - width: 64, - fit: 'contain', + transforms: [['resize', { width: 64, fit: 'contain' }]], }, { key: 'system-medium-cover', - width: 300, - height: 300, - fit: 'cover', + transforms: [['resize', { width: 300, height: 300, fit: 'cover' }]], }, { key: 'system-medium-contain', - width: 300, - fit: 'contain', + transforms: [['resize', { width: 300, fit: 'contain' }]], }, { key: 'system-large-cover', - width: 800, - height: 600, - fit: 'cover', + transforms: [['resize', { width: 800, height: 800, fit: 'cover' }]], }, { key: 'system-large-contain', - width: 800, - fit: 'contain', + transforms: [['resize', { width: 800, fit: 'contain' }]], }, ]; -export const ASSET_TRANSFORM_QUERY_KEYS = ['key', 'width', 'height', 'fit', 'withoutEnlargement', 'quality']; +export const ASSET_TRANSFORM_QUERY_KEYS = [ + 'key', + 'transforms', + 'width', + 'height', + 'format', + 'fit', + 'quality', + 'withoutEnlargement', +]; export const FILTER_VARIABLES = ['$NOW', '$CURRENT_USER', '$CURRENT_ROLE']; diff --git a/api/src/controllers/assets.ts b/api/src/controllers/assets.ts index b64775af98..b48e939aa8 100644 --- a/api/src/controllers/assets.ts +++ b/api/src/controllers/assets.ts @@ -10,7 +10,7 @@ import { ForbiddenException, InvalidQueryException, RangeNotSatisfiableException import useCollection from '../middleware/use-collection'; import { AssetsService, PayloadService } from '../services'; import storage from '../storage'; -import { Transformation } from '../types/assets'; +import { TransformationParams, TransformationMethods, TransformationPreset } from '../types/assets'; import asyncHandler from '../utils/async-handler'; const router = Router(); @@ -68,26 +68,63 @@ router.get( if ('key' in transformation && Object.keys(transformation).length > 1) { throw new InvalidQueryException(`You can't combine the "key" query parameter with any other transformation.`); } - if ('quality' in transformation && (Number(transformation.quality) < 1 || Number(transformation.quality) > 100)) { - throw new InvalidQueryException(`"quality" Parameter has to between 1 to 100`); + + if ('transforms' in transformation) { + let transforms: unknown; + + // Try parse the JSON array + try { + transforms = JSON.parse(transformation['transforms'] as string); + } catch { + throw new InvalidQueryException(`"transforms" Parameter needs to be a JSON array of allowed transformations.`); + } + + // Check if it is actually an array. + if (!Array.isArray(transforms)) { + throw new InvalidQueryException(`"transforms" Parameter needs to be a JSON array of allowed transformations.`); + } + + // Check against ASSETS_TRANSFORM_MAX_OPERATIONS + if (transforms.length > Number(env.ASSETS_TRANSFORM_MAX_OPERATIONS)) { + throw new InvalidQueryException( + `"transforms" Parameter is only allowed ${env.ASSETS_TRANSFORM_MAX_OPERATIONS} transformations.` + ); + } + + // Check the transformations are valid + transforms.forEach((transform) => { + const name = transform[0]; + + if (!TransformationMethods.includes(name)) { + throw new InvalidQueryException(`"transforms" Parameter does not allow "${name}" as a transformation.`); + } + }); + + transformation.transforms = transforms; } - const systemKeys = SYSTEM_ASSET_ALLOW_LIST.map((transformation) => transformation.key); + const systemKeys = SYSTEM_ASSET_ALLOW_LIST.map((transformation) => transformation.key!); const allKeys: string[] = [ ...systemKeys, - ...(assetSettings.storage_asset_presets || []).map((transformation: Transformation) => transformation.key), + ...(assetSettings.storage_asset_presets || []).map((transformation: TransformationParams) => transformation.key), ]; // For use in the next request handler res.locals.shortcuts = [...SYSTEM_ASSET_ALLOW_LIST, ...(assetSettings.storage_asset_presets || [])]; res.locals.transformation = transformation; - if (Object.keys(transformation).length === 0) { + if ( + Object.keys(transformation).length === 0 || + ('transforms' in transformation && transformation.transforms!.length === 0) + ) { return next(); } + if (assetSettings.storage_asset_transform === 'all') { - if (transformation.key && allKeys.includes(transformation.key as string) === false) + if (transformation.key && allKeys.includes(transformation.key as string) === false) { throw new InvalidQueryException(`Key "${transformation.key}" isn't configured.`); + } + return next(); } else if (assetSettings.storage_asset_transform === 'presets') { if (allKeys.includes(transformation.key as string)) return next(); @@ -107,9 +144,9 @@ router.get( schema: req.schema, }); - const transformation: Transformation = res.locals.transformation.key - ? res.locals.shortcuts.find( - (transformation: Transformation) => transformation.key === res.locals.transformation.key + const transformation: TransformationParams | TransformationPreset = res.locals.transformation.key + ? (res.locals.shortcuts as TransformationPreset[]).find( + (transformation) => transformation.key === res.locals.transformation.key ) : res.locals.transformation; diff --git a/api/src/controllers/auth.ts b/api/src/controllers/auth.ts index 2f6229a9fd..4c37c1654d 100644 --- a/api/src/controllers/auth.ts +++ b/api/src/controllers/auth.ts @@ -11,7 +11,8 @@ import { respond } from '../middleware/respond'; import { AuthenticationService, UsersService } from '../services'; import asyncHandler from '../utils/async-handler'; import getEmailFromProfile from '../utils/get-email-from-profile'; -import { toArray } from '../utils/to-array'; +import { toArray } from '@directus/shared/utils'; +import logger from '../logger'; const router = Router(); @@ -59,7 +60,7 @@ router.post( } if (mode === 'cookie') { - res.cookie('directus_refresh_token', refreshToken, { + res.cookie(env.REFRESH_TOKEN_COOKIE_NAME, refreshToken, { httpOnly: true, domain: env.REFRESH_TOKEN_COOKIE_DOMAIN, maxAge: ms(env.REFRESH_TOKEN_TTL as string), @@ -88,7 +89,7 @@ router.post( schema: req.schema, }); - const currentRefreshToken = req.body.refresh_token || req.cookies.directus_refresh_token; + const currentRefreshToken = req.body.refresh_token || req.cookies[env.REFRESH_TOKEN_COOKIE_NAME]; if (!currentRefreshToken) { throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`); @@ -107,7 +108,7 @@ router.post( } if (mode === 'cookie') { - res.cookie('directus_refresh_token', refreshToken, { + res.cookie(env.REFRESH_TOKEN_COOKIE_NAME, refreshToken, { httpOnly: true, domain: env.REFRESH_TOKEN_COOKIE_DOMAIN, maxAge: ms(env.REFRESH_TOKEN_TTL as string), @@ -136,7 +137,7 @@ router.post( schema: req.schema, }); - const currentRefreshToken = req.body.refresh_token || req.cookies.directus_refresh_token; + const currentRefreshToken = req.body.refresh_token || req.cookies[env.REFRESH_TOKEN_COOKIE_NAME]; if (!currentRefreshToken) { throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`); @@ -144,8 +145,8 @@ router.post( await authenticationService.logout(currentRefreshToken); - if (req.cookies.directus_refresh_token) { - res.clearCookie('directus_refresh_token', { + if (req.cookies[env.REFRESH_TOKEN_COOKIE_NAME]) { + res.clearCookie(env.REFRESH_TOKEN_COOKIE_NAME, { httpOnly: true, domain: env.REFRESH_TOKEN_COOKIE_DOMAIN, secure: env.REFRESH_TOKEN_COOKIE_SECURE ?? false, @@ -161,7 +162,7 @@ router.post( router.post( '/password/request', asyncHandler(async (req, res, next) => { - if (!req.body.email) { + if (typeof req.body.email !== 'string') { throw new InvalidPayloadException(`"email" field is required.`); } @@ -180,6 +181,7 @@ router.post( if (err instanceof InvalidPayloadException) { throw err; } else { + logger.warn(err, `[email] ${err}`); return next(); } } @@ -190,11 +192,11 @@ router.post( router.post( '/password/reset', asyncHandler(async (req, res, next) => { - if (!req.body.token) { + if (typeof req.body.token !== 'string') { throw new InvalidPayloadException(`"token" field is required.`); } - if (!req.body.password) { + if (typeof req.body.password !== 'string') { throw new InvalidPayloadException(`"password" field is required.`); } @@ -320,6 +322,9 @@ router.get( }); } catch (error) { emitStatus('fail'); + + logger.warn(error); + if (redirect) { let reason = 'UNKNOWN_EXCEPTION'; @@ -340,7 +345,7 @@ router.get( emitStatus('success'); if (redirect) { - res.cookie('directus_refresh_token', refreshToken, { + res.cookie(env.REFRESH_TOKEN_COOKIE_NAME, refreshToken, { httpOnly: true, domain: env.REFRESH_TOKEN_COOKIE_DOMAIN, maxAge: ms(env.REFRESH_TOKEN_TTL as string), diff --git a/api/src/controllers/extensions.ts b/api/src/controllers/extensions.ts index 7bfa287604..72a1708815 100644 --- a/api/src/controllers/extensions.ts +++ b/api/src/controllers/extensions.ts @@ -3,18 +3,17 @@ import asyncHandler from '../utils/async-handler'; import { RouteNotFoundException } from '../exceptions'; import { listExtensions, getAppExtensionSource } from '../extensions'; import { respond } from '../middleware/respond'; -import { depluralize } from '@directus/shared/utils'; -import { AppExtensionType, Plural } from '@directus/shared/types'; -import { APP_EXTENSION_TYPES } from '@directus/shared/constants'; +import { depluralize, isAppExtension } from '@directus/shared/utils'; +import { Plural } from '@directus/shared/types'; const router = Router(); router.get( '/:type', asyncHandler(async (req, res, next) => { - const type = depluralize(req.params.type as Plural); + const type = depluralize(req.params.type as Plural); - if (APP_EXTENSION_TYPES.includes(type) === false) { + if (!isAppExtension(type)) { throw new RouteNotFoundException(req.path); } @@ -32,9 +31,9 @@ router.get( router.get( '/:type/index.js', asyncHandler(async (req, res) => { - const type = depluralize(req.params.type as Plural); + const type = depluralize(req.params.type as Plural); - if (APP_EXTENSION_TYPES.includes(type) === false) { + if (!isAppExtension(type)) { throw new RouteNotFoundException(req.path); } diff --git a/api/src/controllers/fields.ts b/api/src/controllers/fields.ts index 2d45235b2b..732e6a589c 100644 --- a/api/src/controllers/fields.ts +++ b/api/src/controllers/fields.ts @@ -6,7 +6,8 @@ import validateCollection from '../middleware/collection-exists'; import { respond } from '../middleware/respond'; import useCollection from '../middleware/use-collection'; import { FieldsService } from '../services/fields'; -import { Field, types } from '../types'; +import { Field, Type } from '@directus/shared/types'; +import { TYPES } from '@directus/shared/constants'; import asyncHandler from '../utils/async-handler'; const router = Router(); @@ -65,7 +66,7 @@ const newFieldSchema = Joi.object({ collection: Joi.string().optional(), field: Joi.string().required(), type: Joi.string() - .valid(...types, ...ALIAS_TYPES) + .valid(...TYPES, ...ALIAS_TYPES) .allow(null) .optional(), schema: Joi.object({ @@ -93,7 +94,7 @@ router.post( throw new InvalidPayloadException(error.message); } - const field: Partial & { field: string; type: typeof types[number] | null } = req.body; + const field: Partial & { field: string; type: Type | null } = req.body; await service.createField(req.params.collection, field); @@ -152,7 +153,7 @@ router.patch( const updateSchema = Joi.object({ type: Joi.string() - .valid(...types, ...ALIAS_TYPES) + .valid(...TYPES, ...ALIAS_TYPES) .allow(null), schema: Joi.object({ default_value: Joi.any(), @@ -183,7 +184,7 @@ router.patch( throw new InvalidPayloadException(`You need to provide "type" when providing "schema".`); } - const fieldData: Partial & { field: string; type: typeof types[number] } = req.body; + const fieldData: Partial & { field: string; type: Type } = req.body; if (!fieldData.field) fieldData.field = req.params.field; diff --git a/api/src/controllers/files.ts b/api/src/controllers/files.ts index 88982d5b55..3538a94937 100644 --- a/api/src/controllers/files.ts +++ b/api/src/controllers/files.ts @@ -11,7 +11,7 @@ import { validateBatch } from '../middleware/validate-batch'; import { FilesService, MetaService } from '../services'; import { File, PrimaryKey } from '../types'; import asyncHandler from '../utils/async-handler'; -import { toArray } from '../utils/to-array'; +import { toArray } from '@directus/shared/utils'; const router = express.Router(); diff --git a/api/src/controllers/utils.ts b/api/src/controllers/utils.ts index 1edf1e0ae7..a4414aebef 100644 --- a/api/src/controllers/utils.ts +++ b/api/src/controllers/utils.ts @@ -2,12 +2,13 @@ import argon2 from 'argon2'; import { Router } from 'express'; import Joi from 'joi'; import { nanoid } from 'nanoid'; -import { InvalidPayloadException, InvalidQueryException } from '../exceptions'; +import { ForbiddenException, InvalidPayloadException, InvalidQueryException } from '../exceptions'; import collectionExists from '../middleware/collection-exists'; import { respond } from '../middleware/respond'; import { RevisionsService, UtilsService, ImportService } from '../services'; import asyncHandler from '../utils/async-handler'; import Busboy from 'busboy'; +import { getCache } from '../cache'; const router = Router(); @@ -115,4 +116,20 @@ router.post( }) ); +router.post( + '/cache/clear', + asyncHandler(async (req, res) => { + if (req.accountability?.admin !== true) { + throw new ForbiddenException(); + } + + const { cache, schemaCache } = getCache(); + + await cache?.clear(); + await schemaCache?.clear(); + + res.status(200).end(); + }) +); + export default router; diff --git a/api/src/database/index.ts b/api/src/database/index.ts index 61824a64bd..14d31b1a4b 100644 --- a/api/src/database/index.ts +++ b/api/src/database/index.ts @@ -5,6 +5,9 @@ import env from '../env'; import logger from '../logger'; import { getConfigFromEnv } from '../utils/get-config-from-env'; import { validateEnv } from '../utils/validate-env'; +import fse from 'fs-extra'; +import path from 'path'; +import { merge } from 'lodash'; let database: Knex | null = null; let inspector: ReturnType | null = null; @@ -65,6 +68,13 @@ export default function getDatabase(): Knex { }; } + if (env.DB_CLIENT === 'mssql') { + // This brings MS SQL in line with the other DB vendors. We shouldn't do any automatic + // timezone conversion on the database level, especially not when other database vendors don't + // act the same + merge(knexConfig, { connection: { options: { useUTC: false } } }); + } + database = knex(knexConfig); const times: Record = {}; @@ -94,8 +104,8 @@ export function getSchemaInspector(): ReturnType { return inspector; } -export async function hasDatabaseConnection(): Promise { - const database = getDatabase(); +export async function hasDatabaseConnection(database?: Knex): Promise { + database = database ?? getDatabase(); try { if (env.DB_CLIENT === 'oracledb') { @@ -103,15 +113,22 @@ export async function hasDatabaseConnection(): Promise { } else { await database.raw('SELECT 1'); } + return true; } catch { return false; } } -export async function validateDBConnection(): Promise { +export async function validateDBConnection(database?: Knex): Promise { + database = database ?? getDatabase(); + try { - await hasDatabaseConnection(); + if (env.DB_CLIENT === 'oracledb') { + await database.raw('select 1 from DUAL'); + } else { + await database.raw('SELECT 1'); + } } catch (error) { logger.error(`Can't connect to the database.`); logger.error(error); @@ -127,3 +144,35 @@ export async function isInstalled(): Promise { // exists when using the installer CLI. return await inspector.hasTable('directus_collections'); } + +export async function validateMigrations(): Promise { + const database = getDatabase(); + + try { + let migrationFiles = await fse.readdir(path.join(__dirname, 'migrations')); + + const customMigrationsPath = path.resolve(env.EXTENSIONS_PATH, 'migrations'); + + let customMigrationFiles = + ((await fse.pathExists(customMigrationsPath)) && (await fse.readdir(customMigrationsPath))) || []; + + migrationFiles = migrationFiles.filter( + (file: string) => file.startsWith('run') === false && file.endsWith('.d.ts') === false + ); + + customMigrationFiles = customMigrationFiles.filter((file: string) => file.endsWith('.js')); + + migrationFiles.push(...customMigrationFiles); + + const requiredVersions = migrationFiles.map((filePath) => filePath.split('-')[0]); + const completedVersions = (await database.select('version').from('directus_migrations')).map( + ({ version }) => version + ); + + return requiredVersions.every((version) => completedVersions.includes(version)); + } catch (error) { + logger.error(`Database migrations cannot be found`); + logger.error(error); + throw process.exit(1); + } +} diff --git a/api/src/database/migrations/20201105B-change-webhook-url-type.ts b/api/src/database/migrations/20201105B-change-webhook-url-type.ts index 3107d4d9dd..b733d5a21b 100644 --- a/api/src/database/migrations/20201105B-change-webhook-url-type.ts +++ b/api/src/database/migrations/20201105B-change-webhook-url-type.ts @@ -1,7 +1,6 @@ import { Knex } from 'knex'; // @ts-ignore import Client_Oracledb from 'knex/lib/dialects/oracledb'; -import env from '../../env'; async function oracleAlterUrl(knex: Knex, type: string): Promise { await knex.raw('ALTER TABLE "directus_webhooks" ADD "url__temp" ?', [knex.raw(type)]); @@ -23,7 +22,7 @@ export async function up(knex: Knex): Promise { } export async function down(knex: Knex): Promise { - if (env.DB_CLIENT === 'oracledb') { + if (knex.client instanceof Client_Oracledb) { await oracleAlterUrl(knex, 'VARCHAR2(255)'); return; } diff --git a/api/src/database/migrations/20210312A-webhooks-collections-text.ts b/api/src/database/migrations/20210312A-webhooks-collections-text.ts index 489e0253f5..4e93c092d1 100644 --- a/api/src/database/migrations/20210312A-webhooks-collections-text.ts +++ b/api/src/database/migrations/20210312A-webhooks-collections-text.ts @@ -1,7 +1,6 @@ import { Knex } from 'knex'; // @ts-ignore import Client_Oracledb from 'knex/lib/dialects/oracledb'; -import env from '../../env'; async function oracleAlterCollections(knex: Knex, type: string): Promise { await knex.raw('ALTER TABLE "directus_webhooks" ADD "collections__temp" ?', [knex.raw(type)]); @@ -23,7 +22,7 @@ export async function up(knex: Knex): Promise { } export async function down(knex: Knex): Promise { - if (env.DB_CLIENT === 'oracledb') { + if (knex.client instanceof Client_Oracledb) { await oracleAlterCollections(knex, 'VARCHAR2(255)'); return; } diff --git a/api/src/database/migrations/20210626A-change-filesize-bigint.ts b/api/src/database/migrations/20210626A-change-filesize-bigint.ts index 53544c34c5..03b2a8b420 100644 --- a/api/src/database/migrations/20210626A-change-filesize-bigint.ts +++ b/api/src/database/migrations/20210626A-change-filesize-bigint.ts @@ -1,12 +1,22 @@ import { Knex } from 'knex'; +// @ts-ignore +import Client_Oracledb from 'knex/lib/dialects/oracledb'; export async function up(knex: Knex): Promise { + if (knex.client instanceof Client_Oracledb) { + return; + } + await knex.schema.alterTable('directus_files', (table) => { table.bigInteger('filesize').nullable().defaultTo(null).alter(); }); } export async function down(knex: Knex): Promise { + if (knex.client instanceof Client_Oracledb) { + return; + } + await knex.schema.alterTable('directus_files', (table) => { table.integer('filesize').nullable().defaultTo(null).alter(); }); diff --git a/api/src/database/migrations/20210716A-add-conditions-to-fields.ts b/api/src/database/migrations/20210716A-add-conditions-to-fields.ts new file mode 100644 index 0000000000..efead1ddf5 --- /dev/null +++ b/api/src/database/migrations/20210716A-add-conditions-to-fields.ts @@ -0,0 +1,13 @@ +import { Knex } from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.schema.alterTable('directus_fields', (table) => { + table.json('conditions'); + }); +} + +export async function down(knex: Knex): Promise { + await knex.schema.alterTable('directus_files', (table) => { + table.dropColumn('conditions'); + }); +} diff --git a/api/src/database/migrations/20210721A-add-default-folder.ts b/api/src/database/migrations/20210721A-add-default-folder.ts new file mode 100644 index 0000000000..5bf17ef17a --- /dev/null +++ b/api/src/database/migrations/20210721A-add-default-folder.ts @@ -0,0 +1,22 @@ +import { Knex } from 'knex'; +import { getDefaultIndexName } from '../../utils/get-default-index-name'; + +const indexName = getDefaultIndexName('foreign', 'directus_settings', 'storage_default_folder'); + +export async function up(knex: Knex): Promise { + await knex.schema.alterTable('directus_settings', (table) => { + table + .uuid('storage_default_folder') + .references('id') + .inTable('directus_folders') + .withKeyName(indexName) + .onDelete('SET NULL'); + }); +} + +export async function down(knex: Knex): Promise { + await knex.schema.alterTable('directus_files', (table) => { + table.dropForeign(['storage_default_folder'], indexName); + table.dropColumn('storage_default_folder'); + }); +} diff --git a/api/src/database/migrations/20210802A-replace-groups.ts b/api/src/database/migrations/20210802A-replace-groups.ts new file mode 100644 index 0000000000..7ae9e05373 --- /dev/null +++ b/api/src/database/migrations/20210802A-replace-groups.ts @@ -0,0 +1,49 @@ +import { Knex } from 'knex'; +import logger from '../../logger'; + +export async function up(knex: Knex): Promise { + const dividerGroups = await knex.select('*').from('directus_fields').where('interface', '=', 'group-divider'); + + for (const dividerGroup of dividerGroups) { + const newOptions: { showHeader: true; headerIcon?: string; headerColor?: string } = { showHeader: true }; + + if (dividerGroup.options) { + try { + const options = + typeof dividerGroup.options === 'string' ? JSON.parse(dividerGroup.options) : dividerGroup.options; + + if (options.icon) newOptions.headerIcon = options.icon; + if (options.color) newOptions.headerColor = options.color; + } catch (err) { + logger.warn(`Couldn't convert previous options from field ${dividerGroup.collection}.${dividerGroup.field}`); + logger.warn(err); + } + } + + try { + await knex('directus_fields') + .update({ + interface: 'group-standard', + options: JSON.stringify(newOptions), + }) + .where('id', '=', dividerGroup.id); + } catch (err) { + logger.warn(`Couldn't update ${dividerGroup.collection}.${dividerGroup.field} to new group interface`); + logger.warn(err); + } + } + + await knex('directus_fields') + .update({ + interface: 'group-standard', + }) + .where({ interface: 'group-raw' }); +} + +export async function down(knex: Knex): Promise { + await knex('directus_fields') + .update({ + interface: 'group-raw', + }) + .where('interface', '=', 'group-standard'); +} diff --git a/api/src/database/migrations/20210803A-add-required-to-fields.ts b/api/src/database/migrations/20210803A-add-required-to-fields.ts new file mode 100644 index 0000000000..352496d07a --- /dev/null +++ b/api/src/database/migrations/20210803A-add-required-to-fields.ts @@ -0,0 +1,13 @@ +import { Knex } from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.schema.alterTable('directus_fields', (table) => { + table.boolean('required').defaultTo(false); + }); +} + +export async function down(knex: Knex): Promise { + await knex.schema.alterTable('directus_fields', (table) => { + table.dropColumn('required'); + }); +} diff --git a/api/src/database/migrations/20210805A-update-groups.ts b/api/src/database/migrations/20210805A-update-groups.ts new file mode 100644 index 0000000000..350268a52f --- /dev/null +++ b/api/src/database/migrations/20210805A-update-groups.ts @@ -0,0 +1,35 @@ +import { Knex } from 'knex'; + +export async function up(knex: Knex): Promise { + const groups = await knex.select('*').from('directus_fields').where({ interface: 'group-standard' }); + + const raw = []; + const detail = []; + + for (const group of groups) { + const options = typeof group.options === 'string' ? JSON.parse(group.options) : group.options || {}; + + if (options.showHeader === true) { + detail.push(group); + } else { + raw.push(group); + } + } + + for (const field of raw) { + await knex('directus_fields').update({ interface: 'group-raw' }).where({ id: field.id }); + } + + for (const field of detail) { + await knex('directus_fields').update({ interface: 'group-detail' }).where({ id: field.id }); + } +} + +export async function down(knex: Knex): Promise { + await knex('directus_fields') + .update({ + interface: 'group-standard', + }) + .where({ interface: 'group-detail' }) + .orWhere({ interface: 'group-raw' }); +} diff --git a/api/src/database/migrations/20210805B-change-image-metadata-structure.ts b/api/src/database/migrations/20210805B-change-image-metadata-structure.ts new file mode 100644 index 0000000000..952b4db42b --- /dev/null +++ b/api/src/database/migrations/20210805B-change-image-metadata-structure.ts @@ -0,0 +1,94 @@ +import { Knex } from 'knex'; + +// Change image metadata structure to match the output from 'exifr' +export async function up(knex: Knex): Promise { + const files = await knex + .select<{ id: number; metadata: string }[]>('id', 'metadata') + .from('directus_files') + .whereNotNull('metadata'); + + for (const { id, metadata } of files) { + let prevMetadata; + + try { + prevMetadata = JSON.parse(metadata); + } catch { + continue; + } + + // Update only required if metadata has 'exif' data + if (prevMetadata.exif) { + // Get all data from 'exif' and rename the following keys: + // - 'image' to 'ifd0' + // - 'thumbnail to 'ifd1' + // - 'interoperability' to 'interop' + const newMetadata = prevMetadata.exif; + + if (newMetadata.image) { + newMetadata.ifd0 = newMetadata.image; + delete newMetadata.image; + } + if (newMetadata.thumbnail) { + newMetadata.ifd1 = newMetadata.thumbnail; + delete newMetadata.thumbnail; + } + if (newMetadata.interoperability) { + newMetadata.interop = newMetadata.interoperability; + delete newMetadata.interoperability; + } + if (prevMetadata.icc) { + newMetadata.icc = prevMetadata.icc; + } + if (prevMetadata.iptc) { + newMetadata.iptc = prevMetadata.iptc; + } + + await knex('directus_files') + .update({ metadata: JSON.stringify(newMetadata) }) + .where({ id }); + } + } +} + +export async function down(knex: Knex): Promise { + const files = await knex + .select<{ id: number; metadata: string }[]>('id', 'metadata') + .from('directus_files') + .whereNotNull('metadata') + .whereNot('metadata', '{}'); + + for (const { id, metadata } of files) { + const prevMetadata = JSON.parse(metadata); + + // Update only required if metadata has keys other than 'icc' and 'iptc' + if (Object.keys(prevMetadata).filter((key) => key !== 'icc' && key !== 'iptc').length > 0) { + // Put all data under 'exif' and rename/move keys afterwards + const newMetadata: { exif: Record; icc?: unknown; iptc?: unknown } = { exif: prevMetadata }; + + if (newMetadata.exif.ifd0) { + newMetadata.exif.image = newMetadata.exif.ifd0; + delete newMetadata.exif.ifd0; + } + if (newMetadata.exif.ifd1) { + newMetadata.exif.thumbnail = newMetadata.exif.ifd1; + delete newMetadata.exif.ifd1; + } + if (newMetadata.exif.interop) { + newMetadata.exif.interoperability = newMetadata.exif.interop; + delete newMetadata.exif.interop; + } + if (newMetadata.exif.icc) { + newMetadata.icc = newMetadata.exif.icc; + delete newMetadata.exif.icc; + } + if (newMetadata.exif.iptc) { + newMetadata.iptc = newMetadata.exif.iptc; + delete newMetadata.exif.iptc; + } + + await knex('directus_files') + .update({ metadata: JSON.stringify(newMetadata) }) + .where({ id }); + } + } +} diff --git a/api/src/database/migrations/run.ts b/api/src/database/migrations/run.ts index 5b3566fd1d..fb9c20bc6a 100644 --- a/api/src/database/migrations/run.ts +++ b/api/src/database/migrations/run.ts @@ -5,12 +5,7 @@ import fse from 'fs-extra'; import { Knex } from 'knex'; import path from 'path'; import env from '../../env'; - -type Migration = { - version: string; - name: string; - timestamp: Date; -}; +import { Migration } from '../../types'; export default async function run(database: Knex, direction: 'up' | 'down' | 'latest'): Promise { let migrationFiles = await fse.readdir(__dirname); diff --git a/api/src/database/run-ast.ts b/api/src/database/run-ast.ts index 57aec476e4..0421e0d516 100644 --- a/api/src/database/run-ast.ts +++ b/api/src/database/run-ast.ts @@ -7,7 +7,7 @@ import { applyFunctionToColumnName } from '../utils/apply-function-to-column-nam import applyQuery from '../utils/apply-query'; import { getColumn } from '../utils/get-column'; import { stripFunction } from '../utils/strip-function'; -import { toArray } from '../utils/to-array'; +import { toArray } from '@directus/shared/utils'; import getDatabase from './index'; type RunASTOptions = { diff --git a/api/src/database/seeds/run.ts b/api/src/database/seeds/run.ts index 2ad2eb0916..cf86c8eddf 100644 --- a/api/src/database/seeds/run.ts +++ b/api/src/database/seeds/run.ts @@ -3,13 +3,13 @@ import yaml from 'js-yaml'; import { Knex } from 'knex'; import { isObject } from 'lodash'; import path from 'path'; -import { types } from '../../types'; +import { Type } from '@directus/shared/types'; type TableSeed = { table: string; columns: { [column: string]: { - type?: typeof types[number]; + type?: Type; primary?: boolean; nullable?: boolean; default?: any; @@ -45,6 +45,8 @@ export default async function runSeed(database: Knex): Promise { for (const [columnName, columnInfo] of Object.entries(seedData.columns)) { let column: Knex.ColumnBuilder; + if (columnInfo.type === 'alias' || columnInfo.type === 'unknown') return; + if (columnInfo.type === 'string') { column = tableBuilder.string(columnName, columnInfo.length); } else if (columnInfo.increments) { diff --git a/api/src/database/system-data/collections/collections.yaml b/api/src/database/system-data/collections/collections.yaml index 5086cda422..7cc7270f5f 100644 --- a/api/src/database/system-data/collections/collections.yaml +++ b/api/src/database/system-data/collections/collections.yaml @@ -8,6 +8,7 @@ defaults: note: null translations: null display_template: null + accountability: 'all' data: - collection: directus_activity diff --git a/api/src/database/system-data/fields/fields.yaml b/api/src/database/system-data/fields/fields.yaml index 5dfc5fcb25..82dd60a3ca 100644 --- a/api/src/database/system-data/fields/fields.yaml +++ b/api/src/database/system-data/fields/fields.yaml @@ -73,3 +73,8 @@ fields: - collection: directus_fields field: note width: half + + - collection: directus_fields + field: conditions + hidden: true + special: json diff --git a/api/src/database/system-data/fields/index.ts b/api/src/database/system-data/fields/index.ts index b085e359e2..0f27da3436 100644 --- a/api/src/database/system-data/fields/index.ts +++ b/api/src/database/system-data/fields/index.ts @@ -1,7 +1,7 @@ import fse from 'fs-extra'; import { merge } from 'lodash'; import path from 'path'; -import { FieldMeta } from '../../../types'; +import { FieldMeta } from '@directus/shared/types'; import { requireYAML } from '../../../utils/require-yaml'; const defaults = requireYAML(require.resolve('./_defaults.yaml')); diff --git a/api/src/database/system-data/fields/settings.yaml b/api/src/database/system-data/fields/settings.yaml index 12f5f99944..aba76bc0ae 100644 --- a/api/src/database/system-data/fields/settings.yaml +++ b/api/src/database/system-data/fields/settings.yaml @@ -124,7 +124,7 @@ fields: options: slug: true onlyOnCreate: false - width: half + width: full - field: fit name: Fit type: string @@ -173,6 +173,7 @@ fields: step: 1 width: half - field: withoutEnlargement + name: Upscaling type: boolean schema: default_value: false @@ -181,6 +182,51 @@ fields: width: half options: label: Don't upscale images + - field: format + name: Format + type: string + schema: + is_nullable: false + default_value: '' + meta: + interface: select-dropdown + options: + allowNone: true + choices: + - value: jpeg + text: JPEG + - value: png + text: PNG + - value: webp + text: WebP + - value: tiff + text: Tiff + width: half + - field: transforms + name: Additional Transformations + type: json + schema: + is_nullable: false + default_value: [] + meta: + note: + The Sharp method name and its arguments. See https://sharp.pixelplumbing.com/api-constructor for more + information. + interface: json + options: + template: > + [ + ["blur", 45], + ["grayscale"], + ["extend", { "right": 500, "background": "rgb(255, 0, 0)" }] + ] + placeholder: > + [ + ["blur", 45], + ["grayscale"], + ["extend", { "right": 500, "background": "rgb(255, 0, 0)" }] + ] + width: full template: '{{key}}' special: json width: full @@ -197,6 +243,11 @@ fields: text: Presets Only width: half + - field: storage_default_folder + interface: system-folder + width: half + note: Default folder where new files are uploaded + - field: overrides_divider interface: presentation-divider options: diff --git a/api/src/env.ts b/api/src/env.ts index 9b50dc1901..2dff4b2de1 100644 --- a/api/src/env.ts +++ b/api/src/env.ts @@ -8,7 +8,7 @@ import fs from 'fs'; import { clone, toNumber, toString } from 'lodash'; import path from 'path'; import { requireYAML } from './utils/require-yaml'; -import { toArray } from './utils/to-array'; +import { toArray } from '@directus/shared/utils'; const acceptedEnvTypes = ['string', 'number', 'regex', 'array']; @@ -16,7 +16,7 @@ const defaults: Record = { CONFIG_PATH: path.resolve(process.cwd(), '.env'), PORT: 8055, - PUBLIC_URL: 'http://localhost:8055', + PUBLIC_URL: '/', MAX_PAYLOAD_SIZE: '100kb', STORAGE_LOCATIONS: 'local', @@ -34,6 +34,7 @@ const defaults: Record = { REFRESH_TOKEN_TTL: '7d', REFRESH_TOKEN_COOKIE_SECURE: false, REFRESH_TOKEN_COOKIE_SAME_SITE: 'lax', + REFRESH_TOKEN_COOKIE_NAME: 'directus_refresh_token', ROOT_REDIRECT: './admin', @@ -64,9 +65,12 @@ const defaults: Record = { TELEMETRY: true, - ASSETS_CACHE_TTL: '30m', + ASSETS_CACHE_TTL: '30d', ASSETS_TRANSFORM_MAX_CONCURRENT: 1, ASSETS_TRANSFORM_IMAGE_MAX_DIMENSION: 6000, + ASSETS_TRANSFORM_MAX_OPERATIONS: 5, + + SERVE_APP: true, }; // Allows us to force certain environment variable into a type, instead of relying @@ -170,6 +174,8 @@ function getEnvironmentValueByType(envVariableString: string) { return new RegExp(envVariableValue); case 'string': return envVariableValue; + case 'json': + return tryJSON(envVariableValue); } } @@ -181,14 +187,14 @@ function processValues(env: Record) { // and store it in the variable with the same name but without '_FILE' at the end let newKey; if (key.length > 5 && key.endsWith('_FILE')) { + newKey = key.slice(0, -5); + if (newKey in env) { + throw new Error( + `Duplicate environment variable encountered: you can't use "${newKey}" and "${key}" simultaneously.` + ); + } try { value = fs.readFileSync(value, { encoding: 'utf8' }); - newKey = key.slice(0, -5); - if (newKey in env) { - throw new Error( - `Duplicate environment variable encountered: you can't use "${key}" and "${newKey}" simultaneously.` - ); - } key = newKey; } catch { throw new Error(`Failed to read value from file "${value}", defined in environment variable "${key}".`); @@ -214,6 +220,9 @@ function processValues(env: Record) { case 'array': env[key] = toArray(value); break; + case 'json': + env[key] = tryJSON(value); + break; } continue; } @@ -247,6 +256,14 @@ function processValues(env: Record) { continue; } + if (String(value).includes(',')) { + env[key] = toArray(value); + } + + // Try converting the value to a JS object. This allows JSON objects to be passed for nested + // config flags, or custom param names (that aren't camelCased) + env[key] = tryJSON(value); + // If '_FILE' variable hasn't been processed yet, store it as it is (string) if (newKey) { env[key] = value; @@ -255,3 +272,11 @@ function processValues(env: Record) { return env; } + +function tryJSON(value: any) { + try { + return JSON.parse(value); + } catch { + return value; + } +} diff --git a/api/src/exceptions/database/contains-null-values.ts b/api/src/exceptions/database/contains-null-values.ts index 67fa622e29..a1456e291e 100644 --- a/api/src/exceptions/database/contains-null-values.ts +++ b/api/src/exceptions/database/contains-null-values.ts @@ -1,4 +1,4 @@ -import { BaseException } from '../base'; +import { BaseException } from '@directus/shared/exceptions'; type Exceptions = { collection: string; diff --git a/api/src/exceptions/database/dialects/mssql.ts b/api/src/exceptions/database/dialects/mssql.ts index dca25c3f05..6c4dc009df 100644 --- a/api/src/exceptions/database/dialects/mssql.ts +++ b/api/src/exceptions/database/dialects/mssql.ts @@ -46,7 +46,7 @@ async function uniqueViolation(error: MSSQLError) { * information_schema when this happens */ - const betweenQuotes = /'([^']+)'/; + const betweenQuotes = /'([^']+)'/g; const betweenParens = /\(([^)]+)\)/g; const quoteMatches = error.message.match(betweenQuotes); @@ -54,21 +54,35 @@ async function uniqueViolation(error: MSSQLError) { if (!quoteMatches || !parenMatches) return error; - const keyName = quoteMatches[1]; + const keyName = quoteMatches[1]?.slice(1, -1); - const database = getDatabase(); + let collection = quoteMatches[0]?.slice(1, -1); + let field: string | null = null; - const constraintUsage = await database - .select('*') - .from('INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE') - .where({ - CONSTRAINT_NAME: keyName, - }) - .first(); + if (keyName) { + const database = getDatabase(); - const collection = constraintUsage.TABLE_NAME; - const field = constraintUsage.COLUMN_NAME; - const invalid = parenMatches[parenMatches.length - 1].slice(1, -1); + const constraintUsage = await database + .select('sys.columns.name as field', database.raw('OBJECT_NAME(??) as collection', ['sys.columns.object_id'])) + .from('sys.indexes') + .innerJoin('sys.index_columns', (join) => { + join + .on('sys.indexes.object_id', '=', 'sys.index_columns.object_id') + .andOn('sys.indexes.index_id', '=', 'sys.index_columns.index_id'); + }) + .innerJoin('sys.columns', (join) => { + join + .on('sys.index_columns.object_id', '=', 'sys.columns.object_id') + .andOn('sys.index_columns.column_id', '=', 'sys.columns.column_id'); + }) + .where('sys.indexes.name', '=', keyName) + .first(); + + collection = constraintUsage?.collection; + field = constraintUsage?.field; + } + + const invalid = parenMatches[parenMatches.length - 1]?.slice(1, -1); return new RecordNotUniqueException(field, { collection, diff --git a/api/src/exceptions/database/invalid-foreign-key.ts b/api/src/exceptions/database/invalid-foreign-key.ts index 7102538757..f95750b3d3 100644 --- a/api/src/exceptions/database/invalid-foreign-key.ts +++ b/api/src/exceptions/database/invalid-foreign-key.ts @@ -1,4 +1,4 @@ -import { BaseException } from '../base'; +import { BaseException } from '@directus/shared/exceptions'; type Extensions = { collection: string; diff --git a/api/src/exceptions/database/not-null-violation.ts b/api/src/exceptions/database/not-null-violation.ts index e857c76d3a..154eda3d57 100644 --- a/api/src/exceptions/database/not-null-violation.ts +++ b/api/src/exceptions/database/not-null-violation.ts @@ -1,4 +1,4 @@ -import { BaseException } from '../base'; +import { BaseException } from '@directus/shared/exceptions'; type Exceptions = { collection: string; diff --git a/api/src/exceptions/database/record-not-unique.ts b/api/src/exceptions/database/record-not-unique.ts index 2bbb68cfff..47464b9fa8 100644 --- a/api/src/exceptions/database/record-not-unique.ts +++ b/api/src/exceptions/database/record-not-unique.ts @@ -1,4 +1,4 @@ -import { BaseException } from '../base'; +import { BaseException } from '@directus/shared/exceptions'; type Extensions = { collection: string; diff --git a/api/src/exceptions/database/value-out-of-range.ts b/api/src/exceptions/database/value-out-of-range.ts index e238274377..48f42de9f5 100644 --- a/api/src/exceptions/database/value-out-of-range.ts +++ b/api/src/exceptions/database/value-out-of-range.ts @@ -1,4 +1,4 @@ -import { BaseException } from '../base'; +import { BaseException } from '@directus/shared/exceptions'; type Exceptions = { collection: string; diff --git a/api/src/exceptions/database/value-too-long.ts b/api/src/exceptions/database/value-too-long.ts index 4d27b67099..0c0bfabf49 100644 --- a/api/src/exceptions/database/value-too-long.ts +++ b/api/src/exceptions/database/value-too-long.ts @@ -1,4 +1,4 @@ -import { BaseException } from '../base'; +import { BaseException } from '@directus/shared/exceptions'; type Extensions = { collection: string; diff --git a/api/src/exceptions/forbidden.ts b/api/src/exceptions/forbidden.ts index 4b464d7e00..fd969abb05 100644 --- a/api/src/exceptions/forbidden.ts +++ b/api/src/exceptions/forbidden.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class ForbiddenException extends BaseException { constructor() { diff --git a/api/src/exceptions/graphql-validation.ts b/api/src/exceptions/graphql-validation.ts index 09ddb81a29..dc193b9db9 100644 --- a/api/src/exceptions/graphql-validation.ts +++ b/api/src/exceptions/graphql-validation.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class GraphQLValidationException extends BaseException { constructor(extensions: Record) { diff --git a/api/src/exceptions/hit-rate-limit.ts b/api/src/exceptions/hit-rate-limit.ts index 25f2f61663..077f5c0193 100644 --- a/api/src/exceptions/hit-rate-limit.ts +++ b/api/src/exceptions/hit-rate-limit.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; type Extensions = { limit: number; diff --git a/api/src/exceptions/illegal-asset-transformation.ts b/api/src/exceptions/illegal-asset-transformation.ts index 8b964cc433..0dbc174354 100644 --- a/api/src/exceptions/illegal-asset-transformation.ts +++ b/api/src/exceptions/illegal-asset-transformation.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class IllegalAssetTransformation extends BaseException { constructor(message: string) { diff --git a/api/src/exceptions/index.ts b/api/src/exceptions/index.ts index c55b89d4eb..d00eaf78ba 100644 --- a/api/src/exceptions/index.ts +++ b/api/src/exceptions/index.ts @@ -1,5 +1,3 @@ -export * from './base'; -export * from './failed-validation'; export * from './forbidden'; export * from './graphql-validation'; export * from './hit-rate-limit'; diff --git a/api/src/exceptions/invalid-credentials.ts b/api/src/exceptions/invalid-credentials.ts index 015cd3b012..cfdfc258b0 100644 --- a/api/src/exceptions/invalid-credentials.ts +++ b/api/src/exceptions/invalid-credentials.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class InvalidCredentialsException extends BaseException { constructor(message = 'Invalid user credentials.') { diff --git a/api/src/exceptions/invalid-ip.ts b/api/src/exceptions/invalid-ip.ts index 4709418d5f..73ca7d068d 100644 --- a/api/src/exceptions/invalid-ip.ts +++ b/api/src/exceptions/invalid-ip.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class InvalidIPException extends BaseException { constructor(message = 'Invalid IP address.') { diff --git a/api/src/exceptions/invalid-otp.ts b/api/src/exceptions/invalid-otp.ts index da7d736cc0..13e2db07f8 100644 --- a/api/src/exceptions/invalid-otp.ts +++ b/api/src/exceptions/invalid-otp.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class InvalidOTPException extends BaseException { constructor(message = 'Invalid user OTP.') { diff --git a/api/src/exceptions/invalid-payload.ts b/api/src/exceptions/invalid-payload.ts index b041444e8b..d40382f87b 100644 --- a/api/src/exceptions/invalid-payload.ts +++ b/api/src/exceptions/invalid-payload.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class InvalidPayloadException extends BaseException { constructor(message: string, extensions?: Record) { diff --git a/api/src/exceptions/invalid-query.ts b/api/src/exceptions/invalid-query.ts index 0419ee0536..fd7ed5f134 100644 --- a/api/src/exceptions/invalid-query.ts +++ b/api/src/exceptions/invalid-query.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class InvalidQueryException extends BaseException { constructor(message: string) { diff --git a/api/src/exceptions/method-not-allowed.ts b/api/src/exceptions/method-not-allowed.ts index 6d43769bc2..656d92d43b 100644 --- a/api/src/exceptions/method-not-allowed.ts +++ b/api/src/exceptions/method-not-allowed.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; type Extensions = { allow: string[]; diff --git a/api/src/exceptions/range-not-satisfiable.ts b/api/src/exceptions/range-not-satisfiable.ts index 6b169dde0e..437e0fac7e 100644 --- a/api/src/exceptions/range-not-satisfiable.ts +++ b/api/src/exceptions/range-not-satisfiable.ts @@ -1,5 +1,5 @@ import { Range } from '@directus/drive'; -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class RangeNotSatisfiableException extends BaseException { constructor(range: Range) { diff --git a/api/src/exceptions/route-not-found.ts b/api/src/exceptions/route-not-found.ts index 6f054af4b1..d225dba10b 100644 --- a/api/src/exceptions/route-not-found.ts +++ b/api/src/exceptions/route-not-found.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class RouteNotFoundException extends BaseException { constructor(path: string) { diff --git a/api/src/exceptions/service-unavailable.ts b/api/src/exceptions/service-unavailable.ts index f425f100d2..cd8a39860e 100644 --- a/api/src/exceptions/service-unavailable.ts +++ b/api/src/exceptions/service-unavailable.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; type Extensions = { service: string; diff --git a/api/src/exceptions/unprocessable-entity.ts b/api/src/exceptions/unprocessable-entity.ts index 6fe580d5ea..5a6dc25f30 100644 --- a/api/src/exceptions/unprocessable-entity.ts +++ b/api/src/exceptions/unprocessable-entity.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class UnprocessableEntityException extends BaseException { constructor(message: string) { diff --git a/api/src/exceptions/user-suspended.ts b/api/src/exceptions/user-suspended.ts index 4267fc6ae9..9bd5ae6d89 100644 --- a/api/src/exceptions/user-suspended.ts +++ b/api/src/exceptions/user-suspended.ts @@ -1,4 +1,4 @@ -import { BaseException } from './base'; +import { BaseException } from '@directus/shared/exceptions'; export class UserSuspendedException extends BaseException { constructor(message = 'User suspended.') { diff --git a/api/src/extensions.ts b/api/src/extensions.ts index d1c94e60ed..3385f98fd1 100644 --- a/api/src/extensions.ts +++ b/api/src/extensions.ts @@ -7,8 +7,8 @@ import { getLocalExtensions, getPackageExtensions, resolvePackage, -} from '@directus/shared/utils'; -import { APP_EXTENSION_TYPES, SHARED_DEPS } from '@directus/shared/constants'; +} from '@directus/shared/utils/node'; +import { APP_EXTENSION_TYPES, APP_SHARED_DEPS } from '@directus/shared/constants'; import getDatabase from './database'; import emitter from './emitter'; import env from './env'; @@ -31,10 +31,15 @@ let extensions: Extension[] = []; let extensionBundles: Partial> = {}; export async function initializeExtensions(): Promise { - await ensureExtensionDirs(env.EXTENSIONS_PATH); - extensions = await getExtensions(); + try { + await ensureExtensionDirs(env.EXTENSIONS_PATH); + extensions = await getExtensions(); + } catch (err) { + logger.warn(`Couldn't load extensions`); + logger.warn(err); + } - if (!('DIRECTUS_DEV' in process.env)) { + if (env.SERVE_APP ?? env.NODE_ENV !== 'development') { extensionBundles = await generateExtensionBundles(); } @@ -74,7 +79,7 @@ async function getExtensions(): Promise { } async function generateExtensionBundles() { - const sharedDepsMapping = await getSharedDepsMapping(SHARED_DEPS); + const sharedDepsMapping = await getSharedDepsMapping(APP_SHARED_DEPS); const internalImports = Object.entries(sharedDepsMapping).map(([name, path]) => ({ find: name, replacement: path, diff --git a/api/src/grant.ts b/api/src/grant.ts index e8b702a961..7704ea3716 100644 --- a/api/src/grant.ts +++ b/api/src/grant.ts @@ -3,7 +3,7 @@ */ import env from './env'; -import { toArray } from './utils/to-array'; +import { toArray } from '@directus/shared/utils'; import { getConfigFromEnv } from './utils/get-config-from-env'; const enabledProviders = toArray(env.OAUTH_PROVIDERS).map((provider) => provider.toLowerCase()); diff --git a/api/src/logger.ts b/api/src/logger.ts index d03bd7aa2d..40b96aef44 100644 --- a/api/src/logger.ts +++ b/api/src/logger.ts @@ -7,7 +7,7 @@ import env from './env'; const pinoOptions: LoggerOptions = { level: env.LOG_LEVEL || 'info', redact: { - paths: ['req.headers.authorization', 'req.cookies.directus_refresh_token'], + paths: ['req.headers.authorization', `req.cookies.${env.REFRESH_TOKEN_COOKIE_NAME}`], censor: '--redact--', }, }; @@ -19,16 +19,20 @@ if (env.LOG_STYLE !== 'raw') { const logger = pino(pinoOptions); -export const expressLogger = pinoHTTP({ - logger, - serializers: { - req(request: Request) { - const output = stdSerializers.req(request); - output.url = redactQuery(output.url); - return output; - }, +export const expressLogger = pinoHTTP( + { + logger, }, -}) as RequestHandler; + { + serializers: { + req(request: Request) { + const output = stdSerializers.req(request); + output.url = redactQuery(output.url); + return output; + }, + }, + } +) as RequestHandler; export default logger; diff --git a/api/src/mailer.ts b/api/src/mailer.ts index 1b62abb55d..ab32b8cd7d 100644 --- a/api/src/mailer.ts +++ b/api/src/mailer.ts @@ -1,6 +1,7 @@ import nodemailer, { Transporter } from 'nodemailer'; import env from './env'; import logger from './logger'; +import { getConfigFromEnv } from './utils/get-config-from-env'; let transporter: Transporter; @@ -23,13 +24,16 @@ export default function getMailer(): Transporter { }; } + const tls: Record = getConfigFromEnv('EMAIL_SMTP_TLS_'); + transporter = nodemailer.createTransport({ pool: env.EMAIL_SMTP_POOL, host: env.EMAIL_SMTP_HOST, port: env.EMAIL_SMTP_PORT, secure: env.EMAIL_SMTP_SECURE, ignoreTLS: env.EMAIL_SMTP_IGNORE_TLS, - auth: auth, + auth, + tls, } as Record); } else if (env.EMAIL_TRANSPORT.toLowerCase() === 'mailgun') { const mg = require('nodemailer-mailgun-transport'); @@ -39,6 +43,7 @@ export default function getMailer(): Transporter { api_key: env.EMAIL_MAILGUN_API_KEY, domain: env.EMAIL_MAILGUN_DOMAIN, }, + host: env.EMAIL_MAILGUN_HOST || 'https://api.mailgun.net', }) as any ); } else { diff --git a/api/src/middleware/cache.ts b/api/src/middleware/cache.ts index 613cc81572..747db06885 100644 --- a/api/src/middleware/cache.ts +++ b/api/src/middleware/cache.ts @@ -4,6 +4,7 @@ import env from '../env'; import asyncHandler from '../utils/async-handler'; import { getCacheControlHeader } from '../utils/get-cache-headers'; import { getCacheKey } from '../utils/get-cache-key'; +import logger from '../logger'; const checkCacheMiddleware: RequestHandler = asyncHandler(async (req, res, next) => { const { cache } = getCache(); @@ -17,10 +18,26 @@ const checkCacheMiddleware: RequestHandler = asyncHandler(async (req, res, next) } const key = getCacheKey(req); - const cachedData = await cache.get(key); + + let cachedData; + + try { + cachedData = await cache.get(key); + } catch (err) { + logger.warn(err, `[cache] Couldn't read key ${key}. ${err.message}`); + return next(); + } if (cachedData) { - const cacheExpiryDate = (await cache.get(`${key}__expires_at`)) as number | null; + let cacheExpiryDate; + + try { + cacheExpiryDate = (await cache.get(`${key}__expires_at`)) as number | null; + } catch (err) { + logger.warn(err, `[cache] Couldn't read key ${`${key}__expires_at`}. ${err.message}`); + return next(); + } + const cacheTTL = cacheExpiryDate ? cacheExpiryDate - Date.now() : null; res.setHeader('Cache-Control', getCacheControlHeader(req, cacheTTL)); diff --git a/api/src/middleware/error-handler.ts b/api/src/middleware/error-handler.ts index 24a61390ad..88a6ecff17 100644 --- a/api/src/middleware/error-handler.ts +++ b/api/src/middleware/error-handler.ts @@ -1,9 +1,10 @@ import { ErrorRequestHandler } from 'express'; import { emitAsyncSafe } from '../emitter'; import env from '../env'; -import { BaseException, MethodNotAllowedException } from '../exceptions'; +import { MethodNotAllowedException } from '../exceptions'; +import { BaseException } from '@directus/shared/exceptions'; import logger from '../logger'; -import { toArray } from '../utils/to-array'; +import { toArray } from '@directus/shared/utils'; // Note: keep all 4 parameters here. That's how Express recognizes it's the error handler, even if // we don't use next diff --git a/api/src/middleware/respond.ts b/api/src/middleware/respond.ts index ed4a4032a2..072f3db411 100644 --- a/api/src/middleware/respond.ts +++ b/api/src/middleware/respond.ts @@ -8,6 +8,7 @@ import asyncHandler from '../utils/async-handler'; import { getCacheKey } from '../utils/get-cache-key'; import { parse as toXML } from 'js2xmlparser'; import { getCacheControlHeader } from '../utils/get-cache-headers'; +import logger from '../logger'; export const respond: RequestHandler = asyncHandler(async (req, res) => { const { cache } = getCache(); @@ -20,8 +21,14 @@ export const respond: RequestHandler = asyncHandler(async (req, res) => { res.locals.cache !== false ) { const key = getCacheKey(req); - await cache.set(key, res.locals.payload, ms(env.CACHE_TTL as string)); - await cache.set(`${key}__expires_at`, Date.now() + ms(env.CACHE_TTL as string)); + + try { + await cache.set(key, res.locals.payload, ms(env.CACHE_TTL as string)); + await cache.set(`${key}__expires_at`, Date.now() + ms(env.CACHE_TTL as string)); + } catch (err) { + logger.warn(err, `[cache] Couldn't set key ${key}. ${err}`); + } + res.setHeader('Cache-Control', getCacheControlHeader(req, ms(env.CACHE_TTL as string))); res.setHeader('Vary', 'Origin, Cache-Control'); } else { diff --git a/api/src/middleware/validate-batch.ts b/api/src/middleware/validate-batch.ts index 90bcd2538a..594e2e8754 100644 --- a/api/src/middleware/validate-batch.ts +++ b/api/src/middleware/validate-batch.ts @@ -1,6 +1,7 @@ import { RequestHandler } from 'express'; import Joi from 'joi'; -import { FailedValidationException, InvalidPayloadException } from '../exceptions'; +import { InvalidPayloadException } from '../exceptions'; +import { FailedValidationException } from '@directus/shared/exceptions'; import asyncHandler from '../utils/async-handler'; import { sanitizeQuery } from '../utils/sanitize-query'; diff --git a/api/src/server.ts b/api/src/server.ts index cb483021e0..ca90b90cfb 100644 --- a/api/src/server.ts +++ b/api/src/server.ts @@ -8,6 +8,7 @@ import url from 'url'; import createApp from './app'; import getDatabase from './database'; import { emitAsyncSafe } from './emitter'; +import env from './env'; import logger from './logger'; export default async function createServer(): Promise { @@ -86,9 +87,7 @@ export default async function createServer(): Promise { async function beforeShutdown() { emitAsyncSafe('server.stop.before', { server }); - if ('DIRECTUS_DEV' in process.env) { - logger.info('Restarting...'); - } else { + if (env.NODE_ENV !== 'development') { logger.info('Shutting down...'); } } @@ -102,7 +101,7 @@ export default async function createServer(): Promise { async function onShutdown() { emitAsyncSafe('server.stop'); - if (!('DIRECTUS_DEV' in process.env)) { + if (env.NODE_ENV !== 'development') { logger.info('Directus shut down OK. Bye bye!'); } } diff --git a/api/src/services/assets.ts b/api/src/services/assets.ts index baae16fb31..ef38f019c0 100644 --- a/api/src/services/assets.ts +++ b/api/src/services/assets.ts @@ -1,15 +1,18 @@ import { Range, StatResponse } from '@directus/drive'; -import { Knex } from 'knex'; -import path from 'path'; -import sharp, { ResizeOptions } from 'sharp'; -import getDatabase from '../database'; -import { RangeNotSatisfiableException, IllegalAssetTransformation } from '../exceptions'; -import storage from '../storage'; -import { AbstractServiceOptions, Accountability, Transformation } from '../types'; -import { AuthorizationService } from './authorization'; import { Semaphore } from 'async-mutex'; +import { Knex } from 'knex'; +import { contentType } from 'mime-types'; +import ObjectHash from 'object-hash'; +import path from 'path'; +import sharp from 'sharp'; +import getDatabase from '../database'; import env from '../env'; -import { File } from '../types'; +import { IllegalAssetTransformation, RangeNotSatisfiableException } from '../exceptions'; +import storage from '../storage'; +import { AbstractServiceOptions, File, Transformation, TransformationParams, TransformationPreset } from '../types'; +import { Accountability } from '@directus/shared/types'; +import { AuthorizationService } from './authorization'; +import * as TransformationUtils from '../utils/transformations'; sharp.concurrency(1); @@ -30,7 +33,7 @@ export class AssetsService { async getAsset( id: string, - transformation: Transformation, + transformation: TransformationParams | TransformationPreset, range?: Range ): Promise<{ stream: NodeJS.ReadableStream; file: any; stat: StatResponse }> { const publicSettings = await this.knex @@ -53,18 +56,23 @@ export class AssetsService { } const type = file.type; + const transforms = TransformationUtils.resolvePreset(transformation, file); // We can only transform JPEG, PNG, and WebP - if (type && Object.keys(transformation).length > 0 && ['image/jpeg', 'image/png', 'image/webp'].includes(type)) { - const resizeOptions = this.parseTransformation(transformation); + if (type && transforms.length > 0 && ['image/jpeg', 'image/png', 'image/webp', 'image/tiff'].includes(type)) { + const maybeNewFormat = TransformationUtils.maybeExtractFormat(transforms); const assetFilename = path.basename(file.filename_disk, path.extname(file.filename_disk)) + - this.getAssetSuffix(transformation) + - path.extname(file.filename_disk); + getAssetSuffix(transforms) + + (maybeNewFormat ? `.${maybeNewFormat}` : path.extname(file.filename_disk)); const { exists } = await storage.disk(file.storage).exists(assetFilename); + if (maybeNewFormat) { + file.type = contentType(assetFilename) || null; + } + if (exists) { return { stream: storage.disk(file.storage).getStream(assetFilename, range), @@ -94,15 +102,9 @@ export class AssetsService { const transformer = sharp({ limitInputPixels: Math.pow(env.ASSETS_TRANSFORM_IMAGE_MAX_DIMENSION, 2), sequentialRead: true, - }) - .rotate() - .resize(resizeOptions); + }).rotate(); - if (transformation.quality) { - transformer.toFormat(type.substring(6) as 'jpeg' | 'png' | 'webp', { - quality: Number(transformation.quality), - }); - } + transforms.forEach(([method, ...args]) => (transformer[method] as any).apply(transformer, args)); await storage.disk(file.storage).put(assetFilename, readStream.pipe(transformer), type); @@ -118,28 +120,9 @@ export class AssetsService { return { stream: readStream, file, stat }; } } - - private parseTransformation(transformation: Transformation): ResizeOptions { - const resizeOptions: ResizeOptions = {}; - - if (transformation.width) resizeOptions.width = Number(transformation.width); - if (transformation.height) resizeOptions.height = Number(transformation.height); - if (transformation.fit) resizeOptions.fit = transformation.fit; - if (transformation.withoutEnlargement) - resizeOptions.withoutEnlargement = Boolean(transformation.withoutEnlargement); - - return resizeOptions; - } - - private getAssetSuffix(transformation: Transformation) { - if (Object.keys(transformation).length === 0) return ''; - - return ( - '__' + - Object.entries(transformation) - .sort((a, b) => (a[0] > b[0] ? 1 : -1)) - .map((e) => e.join('_')) - .join(',') - ); - } } + +const getAssetSuffix = (transforms: Transformation[]) => { + if (Object.keys(transforms).length === 0) return ''; + return `__${ObjectHash.sha1(transforms)}`; +}; diff --git a/api/src/services/authentication.ts b/api/src/services/authentication.ts index ae944df822..82bb2772e3 100644 --- a/api/src/services/authentication.ts +++ b/api/src/services/authentication.ts @@ -15,9 +15,12 @@ import { } from '../exceptions'; import { createRateLimiter } from '../rate-limiter'; import { ActivityService } from '../services/activity'; -import { AbstractServiceOptions, Accountability, Action, SchemaOverview, Session } from '../types'; +import { AbstractServiceOptions, Action, SchemaOverview, Session } from '../types'; +import { Accountability } from '@directus/shared/types'; import { SettingsService } from './settings'; import { merge } from 'lodash'; +import { performance } from 'perf_hooks'; +import { stall } from '../utils/stall'; type AuthenticateOptions = { email: string; @@ -52,6 +55,9 @@ export class AuthenticationService { async authenticate( options: AuthenticateOptions ): Promise<{ accessToken: any; refreshToken: any; expires: any; id?: any }> { + const STALL_TIME = 100; + const timeStart = performance.now(); + const settingsService = new SettingsService({ knex: this.knex, schema: this.schema, @@ -97,8 +103,10 @@ export class AuthenticationService { emitStatus('fail'); if (user?.status === 'suspended') { + await stall(STALL_TIME, timeStart); throw new UserSuspendedException(); } else { + await stall(STALL_TIME, timeStart); throw new InvalidCredentialsException(); } } @@ -125,17 +133,20 @@ export class AuthenticationService { if (password !== undefined) { if (!user.password) { emitStatus('fail'); + await stall(STALL_TIME, timeStart); throw new InvalidCredentialsException(); } if ((await argon2.verify(user.password, password)) === false) { emitStatus('fail'); + await stall(STALL_TIME, timeStart); throw new InvalidCredentialsException(); } } if (user.tfa_secret && !otp) { emitStatus('fail'); + await stall(STALL_TIME, timeStart); throw new InvalidOTPException(`"otp" is required`); } @@ -144,6 +155,7 @@ export class AuthenticationService { if (otpValid === false) { emitStatus('fail'); + await stall(STALL_TIME, timeStart); throw new InvalidOTPException(`"otp" is invalid`); } } @@ -193,6 +205,8 @@ export class AuthenticationService { await loginAttemptsLimiter.set(user.id, 0, 0); } + await stall(STALL_TIME, timeStart); + return { accessToken, refreshToken, diff --git a/api/src/services/authorization.ts b/api/src/services/authorization.ts index 2ddbd9f7a2..e36f49d0a8 100644 --- a/api/src/services/authorization.ts +++ b/api/src/services/authorization.ts @@ -1,13 +1,14 @@ import { Knex } from 'knex'; -import { cloneDeep, flatten, merge, uniq, uniqWith } from 'lodash'; +import { cloneDeep, merge, uniq, uniqWith, flatten, isNil } from 'lodash'; import getDatabase from '../database'; -import { FailedValidationException, ForbiddenException } from '../exceptions'; +import { ForbiddenException } from '../exceptions'; +import { FailedValidationException } from '@directus/shared/exceptions'; +import { validatePayload, parseFilter } from '@directus/shared/utils'; +import { Accountability } from '@directus/shared/types'; import { AbstractServiceOptions, - Accountability, AST, FieldNode, - Filter, Item, NestedCollectionNode, Permission, @@ -15,9 +16,8 @@ import { PrimaryKey, Query, SchemaOverview, + Aggregate, } from '../types'; -import generateJoi from '../utils/generate-joi'; -import { parseFilter } from '../utils/parse-filter'; import { ItemsService } from './items'; import { PayloadService } from './payload'; @@ -71,7 +71,13 @@ export class AuthorizationService { if (ast.type === 'm2a') { collections.push(...ast.names.map((name) => ({ collection: name, field: ast.fieldKey }))); - /** @TODO add nestedNode */ + for (const children of Object.values(ast.children)) { + for (const nestedNode of children) { + if (nestedNode.type !== 'field') { + collections.push(...getCollectionsFromAST(nestedNode)); + } + } + } } else { collections.push({ collection: ast.name, @@ -89,17 +95,23 @@ export class AuthorizationService { } function validateFields(ast: AST | NestedCollectionNode | FieldNode) { - if (ast.type !== 'field' && ast.type !== 'm2a') { - /** @TODO remove m2a check */ - const collection = ast.name; + if (ast.type !== 'field') { + if (ast.type === 'm2a') { + for (const [collection, children] of Object.entries(ast.children)) { + checkFields(collection, children, ast.query?.[collection]?.aggregate); + } + } else { + checkFields(ast.name, ast.children, ast.query?.aggregate); + } + } + function checkFields(collection: string, children: (NestedCollectionNode | FieldNode)[], aggregate?: Aggregate) { // We check the availability of the permissions in the step before this is run const permissions = permissionsForCollections.find((permission) => permission.collection === collection)!; - const allowedFields = permissions.fields || []; - if (ast.query.aggregate && allowedFields.includes('*') === false) { - for (const [_operation, aliasMap] of Object.entries(ast.query.aggregate)) { + if (aggregate && allowedFields.includes('*') === false) { + for (const [_operation, aliasMap] of Object.entries(aggregate)) { if (!aliasMap) continue; for (const [column, _alias] of Object.entries(aliasMap)) { @@ -108,7 +120,7 @@ export class AuthorizationService { } } - for (const childNode of ast.children) { + for (const childNode of children) { if (childNode.type !== 'field') { validateFields(childNode); continue; @@ -129,43 +141,61 @@ export class AuthorizationService { ast: AST | NestedCollectionNode | FieldNode, accountability: Accountability | null ): AST | NestedCollectionNode | FieldNode { - if (ast.type !== 'field' && ast.type !== 'm2a') { - /** @TODO remove m2a check */ - const collection = ast.name; + if (ast.type !== 'field') { + if (ast.type === 'm2a') { + const collections = Object.keys(ast.children); + for (const collection of collections) { + updateFilterQuery(collection, ast.query[collection]); + } + + for (const [collection, children] of Object.entries(ast.children)) { + ast.children[collection] = children.map((child) => applyFilters(child, accountability)) as ( + | NestedCollectionNode + | FieldNode + )[]; + } + } else { + const collection = ast.name; + + updateFilterQuery(collection, ast.query); + + ast.children = ast.children.map((child) => applyFilters(child, accountability)) as ( + | NestedCollectionNode + | FieldNode + )[]; + } + } + + return ast; + + function updateFilterQuery(collection: string, query: Query) { // We check the availability of the permissions in the step before this is run const permissions = permissionsForCollections.find((permission) => permission.collection === collection)!; const parsedPermissions = parseFilter(permissions.permissions, accountability); - if (!ast.query.filter || Object.keys(ast.query.filter).length === 0) { - ast.query.filter = { _and: [] }; + if (!query.filter || Object.keys(query.filter).length === 0) { + query.filter = { _and: [] }; } else { - ast.query.filter = { _and: [ast.query.filter] }; + query.filter = { _and: [query.filter] }; } if (parsedPermissions && Object.keys(parsedPermissions).length > 0) { - ast.query.filter._and.push(parsedPermissions); + query.filter._and.push(parsedPermissions); } - if (ast.query.filter._and.length === 0) delete ast.query.filter._and; + if (query.filter._and.length === 0) delete query.filter._and; - if (permissions.limit && ast.query.limit && ast.query.limit > permissions.limit) { + if (permissions.limit && query.limit && query.limit > permissions.limit) { throw new ForbiddenException(); } // Default to the permissions limit if limit hasn't been set - if (permissions.limit && !ast.query.limit) { - ast.query.limit = permissions.limit; + if (permissions.limit && !query.limit) { + query.limit = permissions.limit; } - - ast.children = ast.children.map((child) => applyFilters(child, accountability)) as ( - | NestedCollectionNode - | FieldNode - )[]; } - - return ast; } } @@ -173,8 +203,6 @@ export class AuthorizationService { * Checks if the provided payload matches the configured permissions, and adds the presets to the payload. */ validatePayload(action: PermissionsAction, collection: string, data: Partial): Promise> { - const validationErrors: FailedValidationException[] = []; - const payload = cloneDeep(data); let permission: Permission | undefined; @@ -216,44 +244,57 @@ export class AuthorizationService { const payloadWithPresets = merge({}, preset, payload); - const requiredColumns: string[] = []; + const hasValidationRules = + isNil(permission.validation) === false && Object.keys(permission.validation ?? {}).length > 0; - for (const [name, field] of Object.entries(this.schema.collections[collection].fields)) { + const requiredColumns: SchemaOverview['collections'][string]['fields'][string][] = []; + + for (const field of Object.values(this.schema.collections[collection].fields)) { const specials = field?.special ?? []; const hasGenerateSpecial = ['uuid', 'date-created', 'role-created', 'user-created'].some((name) => specials.includes(name) ); - const isRequired = field.nullable === false && field.defaultValue === null && hasGenerateSpecial === false; + const notNullable = field.nullable === false && hasGenerateSpecial === false; - if (isRequired) { - requiredColumns.push(name); + if (notNullable) { + requiredColumns.push(field); } } + if (hasValidationRules === false && requiredColumns.length === 0) { + return payloadWithPresets; + } + if (requiredColumns.length > 0) { - permission.validation = { - _and: [permission.validation, {}], - }; + permission.validation = hasValidationRules ? { _and: [permission.validation] } : { _and: [] }; - if (action === 'create') { - for (const name of requiredColumns) { - permission.validation._and[1][name] = { - _submitted: true, - }; + for (const field of requiredColumns) { + if (action === 'create' && field.defaultValue === null) { + permission.validation._and.push({ + [field.field]: { + _submitted: true, + }, + }); } - } else { - for (const name of requiredColumns) { - permission.validation._and[1][name] = { + + permission.validation._and.push({ + [field.field]: { _nnull: true, - }; - } + }, + }); } } + const validationErrors: FailedValidationException[] = []; + validationErrors.push( - ...this.validateJoi(parseFilter(permission.validation || {}, this.accountability), payloadWithPresets) + ...flatten( + validatePayload(parseFilter(permission.validation!, this.accountability), payloadWithPresets).map((error) => + error.details.map((details) => new FailedValidationException(details)) + ) + ) ); if (validationErrors.length > 0) throw validationErrors; @@ -261,48 +302,6 @@ export class AuthorizationService { return payloadWithPresets; } - validateJoi(validation: Filter, payload: Partial): FailedValidationException[] { - if (!validation) return []; - - const errors: FailedValidationException[] = []; - - /** - * Note there can only be a single _and / _or per level - */ - - if (Object.keys(validation)[0] === '_and') { - const subValidation = Object.values(validation)[0]; - - const nestedErrors = flatten( - subValidation.map((subObj: Record) => { - return this.validateJoi(subObj, payload); - }) - ).filter((err?: FailedValidationException) => err); - errors.push(...nestedErrors); - } else if (Object.keys(validation)[0] === '_or') { - const subValidation = Object.values(validation)[0]; - const nestedErrors = flatten( - subValidation.map((subObj: Record) => this.validateJoi(subObj, payload)) - ); - - const allErrored = subValidation.length === nestedErrors.length; - - if (allErrored) { - errors.push(...nestedErrors); - } - } else { - const schema = generateJoi(validation); - - const { error } = schema.validate(payload, { abortEarly: false }); - - if (error) { - errors.push(...error.details.map((details) => new FailedValidationException(details))); - } - } - - return errors; - } - async checkAccess(action: PermissionsAction, collection: string, pk: PrimaryKey | PrimaryKey[]): Promise { if (this.accountability?.admin === true) return; diff --git a/api/src/services/collections.ts b/api/src/services/collections.ts index 3b87db7fa2..7ce3c213b5 100644 --- a/api/src/services/collections.ts +++ b/api/src/services/collections.ts @@ -10,14 +10,8 @@ import logger from '../logger'; import { FieldsService, RawField } from '../services/fields'; import { ItemsService, MutationOptions } from '../services/items'; import Keyv from 'keyv'; -import { - AbstractServiceOptions, - Accountability, - Collection, - CollectionMeta, - FieldMeta, - SchemaOverview, -} from '../types'; +import { AbstractServiceOptions, Collection, CollectionMeta, SchemaOverview } from '../types'; +import { Accountability, FieldMeta } from '@directus/shared/types'; export type RawCollection = { collection: string; @@ -213,6 +207,11 @@ export class CollectionsService { const collections: Collection[] = []; + /** + * The collections as known in the schema cache. + */ + const knownCollections = Object.keys(this.schema.collections); + for (const table of tablesInDatabase) { const collection: Collection = { collection: table.name, @@ -220,7 +219,12 @@ export class CollectionsService { schema: table, }; - collections.push(collection); + // By only returning collections that are known in the schema cache, we prevent weird + // situations where the collections endpoint returns different info from every other + // collection + if (knownCollections.includes(table.name)) { + collections.push(collection); + } } return collections; @@ -272,6 +276,8 @@ export class CollectionsService { const collections: Collection[] = []; + const knownCollections = Object.keys(this.schema.collections); + for (const table of tables) { const collection: Collection = { collection: table.name, @@ -279,7 +285,12 @@ export class CollectionsService { schema: table, }; - collections.push(collection); + // By only returning collections that are known in the schema cache, we prevent weird + // situations where the collections endpoint returns different info from every other + // collection + if (knownCollections.includes(table.name)) { + collections.push(collection); + } } return collections; diff --git a/api/src/services/fields.ts b/api/src/services/fields.ts index b821508a6e..f4d5256a6f 100644 --- a/api/src/services/fields.ts +++ b/api/src/services/fields.ts @@ -11,16 +11,18 @@ import { ForbiddenException, InvalidPayloadException } from '../exceptions'; import { translateDatabaseError } from '../exceptions/database/translate'; import { ItemsService } from '../services/items'; import { PayloadService } from '../services/payload'; -import { AbstractServiceOptions, Accountability, FieldMeta, SchemaOverview, types } from '../types'; -import { Field } from '../types/field'; +import { AbstractServiceOptions, SchemaOverview } from '../types'; +import { Accountability } from '@directus/shared/types'; +import { Field, FieldMeta, Type } from '@directus/shared/types'; import getDefaultValue from '../utils/get-default-value'; import getLocalType from '../utils/get-local-type'; -import { toArray } from '../utils/to-array'; -import { isEqual } from 'lodash'; +import { toArray } from '@directus/shared/utils'; +import { isEqual, isNil } from 'lodash'; import { RelationsService } from './relations'; import Keyv from 'keyv'; +import { DeepPartial } from '@directus/shared/types'; -export type RawField = DeepPartial & { field: string; type: typeof types[number] }; +export type RawField = DeepPartial & { field: string; type: Type }; export class FieldsService { knex: Knex; @@ -213,15 +215,20 @@ export class FieldsService { async createField( collection: string, - field: Partial & { field: string; type: typeof types[number] | null }, + field: Partial & { field: string; type: Type | null }, table?: Knex.CreateTableBuilder // allows collection creation to ): Promise { if (this.accountability && this.accountability.admin !== true) { throw new ForbiddenException(); } + const exists = + field.field in this.schema.collections[collection].fields || + isNil(await this.knex.select('id').from('directus_fields').where({ collection, field: field.field }).first()) === + false; + // Check if field already exists, either as a column, or as a row in directus_fields - if (field.field in this.schema.collections[collection].fields) { + if (exists) { throw new InvalidPayloadException(`Field "${field.field}" already exists in collection "${collection}"`); } @@ -313,7 +320,6 @@ export class FieldsService { return field.field; } - /** @todo save accountability */ async deleteField(collection: string, field: string): Promise { if (this.accountability && this.accountability.admin !== true) { throw new ForbiddenException(); @@ -434,6 +440,9 @@ export class FieldsService { public addColumnToTable(table: Knex.CreateTableBuilder, field: RawField | Field, alter: Column | null = null): void { let column: Knex.ColumnBuilder; + // Don't attempt to add a DB column for alias / corrupt fields + if (field.type === 'alias' || field.type === 'unknown') return; + if (field.schema?.has_auto_increment) { column = table.increments(field.field); } else if (field.type === 'string') { @@ -445,6 +454,10 @@ export class FieldsService { column = table.string(field.field); } else if (field.type === 'hash') { column = table.string(field.field, 255); + } else if (field.type === 'dateTime') { + column = table.dateTime(field.field, { useTz: false }); + } else if (field.type === 'timestamp') { + column = table.timestamp(field.field, { useTz: true }); } else { column = table[field.type](field.field); } diff --git a/api/src/services/files.ts b/api/src/services/files.ts index 9b9bdc6780..80b03f0b0d 100644 --- a/api/src/services/files.ts +++ b/api/src/services/files.ts @@ -1,7 +1,6 @@ import formatTitle from '@directus/format-title'; import axios, { AxiosResponse } from 'axios'; -import parseEXIF from 'exif-reader'; -import { parse as parseICC } from 'icc'; +import exifr from 'exifr'; import { clone } from 'lodash'; import { extension } from 'mime-types'; import path from 'path'; @@ -13,8 +12,7 @@ import { ForbiddenException, ServiceUnavailableException } from '../exceptions'; import logger from '../logger'; import storage from '../storage'; import { AbstractServiceOptions, File, PrimaryKey } from '../types'; -import parseIPTC from '../utils/parse-iptc'; -import { toArray } from '../utils/to-array'; +import { toArray } from '@directus/shared/utils'; import { ItemsService, MutationOptions } from './items'; export class FilesService extends ItemsService { @@ -32,6 +30,14 @@ export class FilesService extends ItemsService { ): Promise { const payload = clone(data); + if ('folder' in payload === false) { + const settings = await this.knex.select('storage_default_folder').from('directus_settings').first(); + + if (settings?.storage_default_folder) { + payload.folder = settings.storage_default_folder; + } + } + if (primaryKey !== undefined) { await this.updateOne(primaryKey, payload, { emitEvents: false }); @@ -46,9 +52,10 @@ export class FilesService extends ItemsService { primaryKey = await this.createOne(payload, { emitEvents: false }); } - const fileExtension = path.extname(payload.filename_download) || (payload.type && extension(payload.type)); + const fileExtension = + path.extname(payload.filename_download) || (payload.type && '.' + extension(payload.type)) || ''; - payload.filename_disk = primaryKey + '.' + fileExtension; + payload.filename_disk = primaryKey + (fileExtension || ''); if (!payload.type) { payload.type = 'application/octet-stream'; @@ -77,37 +84,30 @@ export class FilesService extends ItemsService { payload.height = meta.height; } - payload.filesize = meta.size; payload.metadata = {}; - if (meta.icc) { - try { - payload.metadata.icc = parseICC(meta.icc); - } catch (err) { - logger.warn(`Couldn't extract ICC information from file`); - logger.warn(err); + try { + payload.metadata = await exifr.parse(buffer.content, { + icc: true, + iptc: true, + ifd1: true, + interop: true, + translateValues: true, + reviveValues: true, + mergeOutput: false, + }); + if (payload.metadata?.iptc?.Headline) { + payload.title = payload.metadata.iptc.Headline; } - } - - if (meta.exif) { - try { - payload.metadata.exif = parseEXIF(meta.exif); - } catch (err) { - logger.warn(`Couldn't extract EXIF information from file`); - logger.warn(err); + if (!payload.description && payload.metadata?.iptc?.Caption) { + payload.description = payload.metadata.iptc.Caption; } - } - - if (meta.iptc) { - try { - payload.metadata.iptc = parseIPTC(meta.iptc); - payload.title = payload.metadata.iptc.headline || payload.title; - payload.description = payload.description || payload.metadata.iptc.caption; - payload.tags = payload.metadata.iptc.keywords; - } catch (err) { - logger.warn(`Couldn't extract IPTC information from file`); - logger.warn(err); + if (payload.metadata?.iptc?.Keywords) { + payload.tags = payload.metadata.iptc.Keywords; } + } catch (err) { + logger.warn(`Couldn't extract metadata from file`); + logger.warn(err); } } diff --git a/api/src/services/graphql.ts b/api/src/services/graphql.ts index ca297efffd..e0a7a721f9 100644 --- a/api/src/services/graphql.ts +++ b/api/src/services/graphql.ts @@ -44,11 +44,14 @@ import { import { Knex } from 'knex'; import { flatten, get, mapKeys, merge, set, uniq } from 'lodash'; import ms from 'ms'; +import { getCache } from '../cache'; import getDatabase from '../database'; import env from '../env'; -import { BaseException, GraphQLValidationException, InvalidPayloadException } from '../exceptions'; +import { ForbiddenException, GraphQLValidationException, InvalidPayloadException } from '../exceptions'; +import { BaseException } from '@directus/shared/exceptions'; import { listExtensions } from '../extensions'; -import { AbstractServiceOptions, Accountability, Action, GraphQLParams, Item, Query, SchemaOverview } from '../types'; +import { Accountability } from '@directus/shared/types'; +import { AbstractServiceOptions, Action, GraphQLParams, Item, Query, SchemaOverview } from '../types'; import { getGraphQLType } from '../utils/get-graphql-type'; import { reduceSchema } from '../utils/reduce-schema'; import { sanitizeQuery } from '../utils/sanitize-query'; @@ -1103,7 +1106,7 @@ export class GraphQLService { * Select the correct service for the given collection. This allows the individual services to run * their custom checks (f.e. it allows UsersService to prevent updating TFA secret from outside) */ - getService(collection: string): RolesService { + getService(collection: string): ItemsService { const opts = { knex: this.knex, accountability: this.accountability, @@ -1376,7 +1379,7 @@ export class GraphQLService { userAgent: req?.get('user-agent'), }); if (args.mode === 'cookie') { - res?.cookie('directus_refresh_token', result.refreshToken, { + res?.cookie(env.REFRESH_TOKEN_COOKIE_NAME, result.refreshToken, { httpOnly: true, domain: env.REFRESH_TOKEN_COOKIE_DOMAIN, maxAge: ms(env.REFRESH_TOKEN_TTL as string), @@ -1407,13 +1410,13 @@ export class GraphQLService { accountability: accountability, schema: this.schema, }); - const currentRefreshToken = args.refresh_token || req?.cookies.directus_refresh_token; + const currentRefreshToken = args.refresh_token || req?.cookies[env.REFRESH_TOKEN_COOKIE_NAME]; if (!currentRefreshToken) { throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`); } const result = await authenticationService.refresh(currentRefreshToken); if (args.mode === 'cookie') { - res?.cookie('directus_refresh_token', result.refreshToken, { + res?.cookie(env.REFRESH_TOKEN_COOKIE_NAME, result.refreshToken, { httpOnly: true, domain: env.REFRESH_TOKEN_COOKIE_DOMAIN, maxAge: ms(env.REFRESH_TOKEN_TTL as string), @@ -1443,7 +1446,7 @@ export class GraphQLService { accountability: accountability, schema: this.schema, }); - const currentRefreshToken = args.refresh_token || req?.cookies.directus_refresh_token; + const currentRefreshToken = args.refresh_token || req?.cookies[env.REFRESH_TOKEN_COOKIE_NAME]; if (!currentRefreshToken) { throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`); } @@ -1609,6 +1612,21 @@ export class GraphQLService { return true; }, }, + utils_cache_clear: { + type: GraphQLVoid, + resolve: async () => { + if (this.accountability?.admin !== true) { + throw new ForbiddenException(); + } + + const { cache, schemaCache } = getCache(); + + await cache?.clear(); + await schemaCache?.clear(); + + return; + }, + }, users_invite_accept: { type: GraphQLBoolean, args: { diff --git a/api/src/services/import.ts b/api/src/services/import.ts index f72203090a..b82ca3b6a2 100644 --- a/api/src/services/import.ts +++ b/api/src/services/import.ts @@ -1,6 +1,7 @@ import { Knex } from 'knex'; import getDatabase from '../database'; -import { AbstractServiceOptions, Accountability, SchemaOverview } from '../types'; +import { AbstractServiceOptions, SchemaOverview } from '../types'; +import { Accountability } from '@directus/shared/types'; import { ForbiddenException, InvalidPayloadException } from '../exceptions'; import StreamArray from 'stream-json/streamers/StreamArray'; import { ItemsService } from './items'; diff --git a/api/src/services/items.ts b/api/src/services/items.ts index 7971593a34..8f4aa38718 100644 --- a/api/src/services/items.ts +++ b/api/src/services/items.ts @@ -9,10 +9,10 @@ import env from '../env'; import { ForbiddenException, InvalidPayloadException } from '../exceptions'; import { translateDatabaseError } from '../exceptions/database/translate'; import logger from '../logger'; +import { Accountability } from '@directus/shared/types'; import { AbstractService, AbstractServiceOptions, - Accountability, Action, Item as AnyItem, PermissionsAction, @@ -21,7 +21,7 @@ import { SchemaOverview, } from '../types'; import getASTFromQuery from '../utils/get-ast-from-query'; -import { toArray } from '../utils/to-array'; +import { toArray } from '@directus/shared/utils'; import { AuthorizationService } from './authorization'; import { PayloadService } from './payload'; @@ -279,6 +279,17 @@ export class ItemsService implements AbstractSer throw new ForbiddenException(); } + emitAsyncSafe(`${this.eventScope}.read`, { + event: `${this.eventScope}.read`, + accountability: this.accountability, + collection: this.collection, + query, + action: 'read', + payload: records, + schema: this.schema, + database: getDatabase(), + }); + return records as Item[]; } @@ -306,17 +317,6 @@ export class ItemsService implements AbstractSer throw new ForbiddenException(); } - emitAsyncSafe(`${this.eventScope}.read`, { - event: `${this.eventScope}.read`, - accountability: this.accountability, - collection: this.collection, - item: key, - action: 'read', - payload: results, - schema: this.schema, - database: getDatabase(), - }); - return results[0]; } @@ -344,17 +344,6 @@ export class ItemsService implements AbstractSer const results = await this.readByQuery(queryWithKeys, opts); - emitAsyncSafe(`${this.eventScope}.read`, { - event: `${this.eventScope}.read`, - accountability: this.accountability, - collection: this.collection, - item: keys, - action: 'read', - payload: results, - schema: this.schema, - database: getDatabase(), - }); - return results; } diff --git a/api/src/services/mail/index.ts b/api/src/services/mail/index.ts index 19efaf95bd..b02a4c90ce 100644 --- a/api/src/services/mail/index.ts +++ b/api/src/services/mail/index.ts @@ -6,7 +6,8 @@ import getDatabase from '../../database'; import env from '../../env'; import { InvalidPayloadException } from '../../exceptions'; import logger from '../../logger'; -import { AbstractServiceOptions, Accountability, SchemaOverview } from '../../types'; +import { AbstractServiceOptions, SchemaOverview } from '../../types'; +import { Accountability } from '@directus/shared/types'; import getMailer from '../../mailer'; import { Transporter, SendMailOptions } from 'nodemailer'; import prettier from 'prettier'; diff --git a/api/src/services/meta.ts b/api/src/services/meta.ts index c7b2ff6df7..d88a099246 100644 --- a/api/src/services/meta.ts +++ b/api/src/services/meta.ts @@ -1,10 +1,11 @@ import { Knex } from 'knex'; import getDatabase from '../database'; import { ForbiddenException } from '../exceptions'; -import { AbstractServiceOptions, Accountability, SchemaOverview } from '../types'; +import { AbstractServiceOptions, SchemaOverview } from '../types'; +import { Accountability } from '@directus/shared/types'; import { Query } from '../types/query'; import { applyFilter, applySearch } from '../utils/apply-query'; -import { parseFilter } from '../utils/parse-filter'; +import { parseFilter } from '@directus/shared/utils'; export class MetaService { knex: Knex; diff --git a/api/src/services/payload.ts b/api/src/services/payload.ts index 63bd7c6bed..d428f760d7 100644 --- a/api/src/services/payload.ts +++ b/api/src/services/payload.ts @@ -1,13 +1,14 @@ import argon2 from 'argon2'; -import { format, formatISO, parse, parseISO } from 'date-fns'; +import { format, parseISO } from 'date-fns'; import Joi from 'joi'; import { Knex } from 'knex'; import { clone, cloneDeep, isObject, isPlainObject, omit } from 'lodash'; import { v4 as uuidv4 } from 'uuid'; import getDatabase from '../database'; import { ForbiddenException, InvalidPayloadException } from '../exceptions'; -import { AbstractServiceOptions, Accountability, Item, PrimaryKey, Query, SchemaOverview } from '../types'; -import { toArray } from '../utils/to-array'; +import { AbstractServiceOptions, Item, PrimaryKey, Query, SchemaOverview, Alterations } from '../types'; +import { Accountability } from '@directus/shared/types'; +import { toArray } from '@directus/shared/utils'; import { ItemsService } from './items'; type Action = 'create' | 'read' | 'update'; @@ -21,16 +22,6 @@ type Transformers = { }) => Promise; }; -type Alterations = { - create: { - [key: string]: any; - }[]; - update: { - [key: string]: any; - }[]; - delete: (number | string)[]; -}; - /** * Process a given payload for a collection to ensure the special fields (hash, uuid, date etc) are * handled correctly. @@ -50,13 +41,6 @@ export class PayloadService { return this; } - /** - * @todo allow this to be extended - * - * @todo allow these extended special types to have "field dependencies"? - * f.e. the file-links transformer needs the id and filename_download to be fetched from the DB - * in order to work - */ public transformers: Transformers = { async hash({ action, value }) { if (!value) return; @@ -222,11 +206,15 @@ export class PayloadService { ['dateTime', 'date', 'timestamp'].includes(field.type) ); - if (dateColumns.length === 0) return payloads; + const timeColumns = fieldsInCollection.filter(([_name, field]) => { + return field.type === 'time'; + }); + + if (dateColumns.length === 0 && timeColumns.length === 0) return payloads; for (const [name, dateColumn] of dateColumns) { for (const payload of payloads) { - let value = payload[name]; + let value: number | string | Date = payload[name]; if (value === null || value === '0000-00-00') { payload[name] = null; @@ -236,32 +224,54 @@ export class PayloadService { if (!value) continue; if (action === 'read') { - if (typeof value === 'string') value = new Date(value); + if (typeof value === 'number' || typeof value === 'string') { + value = new Date(value); + } if (dateColumn.type === 'timestamp') { - const newValue = formatISO(value); + const newValue = value.toISOString(); payload[name] = newValue; } if (dateColumn.type === 'dateTime') { - // Strip off the Z at the end of a non-timezone datetime value - const newValue = format(value, "yyyy-MM-dd'T'HH:mm:ss"); + const year = String(value.getUTCFullYear()); + const month = String(value.getUTCMonth() + 1).padStart(2, '0'); + const date = String(value.getUTCDate()).padStart(2, '0'); + const hours = String(value.getUTCHours()).padStart(2, '0'); + const minutes = String(value.getUTCMinutes()).padStart(2, '0'); + const seconds = String(value.getUTCSeconds()).padStart(2, '0'); + + const newValue = `${year}-${month}-${date}T${hours}:${minutes}:${seconds}`; payload[name] = newValue; } if (dateColumn.type === 'date') { + const [year, month, day] = value.toISOString().substr(0, 10).split('-'); + // Strip off the time / timezone information from a date-only value - const newValue = format(value, 'yyyy-MM-dd'); + const newValue = `${year}-${month}-${day}`; payload[name] = newValue; } } else { - if (value instanceof Date === false) { + if (value instanceof Date === false && typeof value === 'string') { if (dateColumn.type === 'date') { - const newValue = parse(value, 'yyyy-MM-dd', new Date()); - payload[name] = newValue; + const [date] = value.split('T'); + const [year, month, day] = date.split('-'); + + payload[name] = new Date(Date.UTC(Number(year), Number(month) - 1, Number(day))); } - if (dateColumn.type === 'timestamp' || dateColumn.type === 'dateTime') { + if (dateColumn.type === 'dateTime') { + const [date, time] = value.split('T'); + const [year, month, day] = date.split('-'); + const [hours, minutes, seconds] = time.substring(0, 8).split(':'); + + payload[name] = new Date( + Date.UTC(Number(year), Number(month) - 1, Number(day), Number(hours), Number(minutes), Number(seconds)) + ); + } + + if (dateColumn.type === 'timestamp') { const newValue = parseISO(value); payload[name] = newValue; } @@ -270,6 +280,22 @@ export class PayloadService { } } + /** + * Some DB drivers (MS SQL f.e.) return time values as Date objects. For consistencies sake, + * we'll abstract those back to hh:mm:ss + */ + for (const [name] of timeColumns) { + for (const payload of payloads) { + const value = payload[name]; + + if (!value) continue; + + if (action === 'read') { + if (value instanceof Date) payload[name] = format(value, 'HH:mm:ss'); + } + } + } + return payloads; } @@ -318,6 +344,9 @@ export class PayloadService { const relatedPrimary = this.schema.collections[relatedCollection].primary; const relatedRecord: Partial = payload[relation.field]; + + if (['string', 'number'].includes(typeof relatedRecord)) continue; + const hasPrimaryKey = relatedPrimary in relatedRecord; let relatedPrimaryKey: PrimaryKey = relatedRecord[relatedPrimary]; @@ -455,7 +484,8 @@ export class PayloadService { schema: this.schema, }); - const relatedRecords: Partial[] = []; + const recordsToUpsert: Partial[] = []; + const savedPrimaryKeys: PrimaryKey[] = []; // Nested array of individual items if (Array.isArray(payload[relation.meta!.one_field!])) { @@ -465,30 +495,46 @@ export class PayloadService { let record = cloneDeep(relatedRecord); if (typeof relatedRecord === 'string' || typeof relatedRecord === 'number') { - const exists = !!(await this.knex - .select(relatedPrimaryKeyField) + const existingRecord = await this.knex + .select(relatedPrimaryKeyField, relation.field) .from(relation.collection) .where({ [relatedPrimaryKeyField]: record }) - .first()); + .first(); - if (exists === false) { + if (!!existingRecord === false) { throw new ForbiddenException(); } + // If the related item is already associated to the current item, and there's no + // other updates (which is indicated by the fact that this is just the PK, we can + // ignore updating this item. This makes sure we don't trigger any update logic + // for items that aren't actually being updated. NOTE: We use == here, as the + // primary key might be reported as a string instead of number, coming from the + // http route, and or a bigInteger in the DB + if ( + existingRecord[relation.field] == parent || + existingRecord[relation.field] == payload[currentPrimaryKeyField] + ) { + savedPrimaryKeys.push(existingRecord[relatedPrimaryKeyField]); + continue; + } + record = { [relatedPrimaryKeyField]: relatedRecord, }; } - relatedRecords.push({ + recordsToUpsert.push({ ...record, [relation.field]: parent || payload[currentPrimaryKeyField], }); } - const savedPrimaryKeys = await itemsService.upsertMany(relatedRecords, { - onRevisionCreate: (id) => revisions.push(id), - }); + savedPrimaryKeys.push( + ...(await itemsService.upsertMany(recordsToUpsert, { + onRevisionCreate: (id) => revisions.push(id), + })) + ); const query: Query = { filter: { @@ -540,7 +586,7 @@ export class PayloadService { } if (alterations.update) { - const primaryKeyField = this.schema.collections[this.collection].primary; + const primaryKeyField = this.schema.collections[relation.collection].primary; for (const item of alterations.update) { await itemsService.updateOne( diff --git a/api/src/services/relations.ts b/api/src/services/relations.ts index 1aa0523e33..6df956a4ab 100644 --- a/api/src/services/relations.ts +++ b/api/src/services/relations.ts @@ -1,8 +1,9 @@ import { Knex } from 'knex'; import { systemRelationRows } from '../database/system-data/relations'; import { ForbiddenException, InvalidPayloadException } from '../exceptions'; -import { AbstractServiceOptions, SchemaOverview, Query, Relation, RelationMeta, Accountability } from '../types'; -import { toArray } from '../utils/to-array'; +import { AbstractServiceOptions, SchemaOverview, Query, Relation, RelationMeta } from '../types'; +import { Accountability } from '@directus/shared/types'; +import { toArray } from '@directus/shared/utils'; import { ItemsService, QueryOptions } from './items'; import { PermissionsService } from './permissions'; import SchemaInspector from '@directus/schema'; diff --git a/api/src/services/roles.ts b/api/src/services/roles.ts index 57f7203f7c..eff48f77b4 100644 --- a/api/src/services/roles.ts +++ b/api/src/services/roles.ts @@ -1,6 +1,6 @@ -import { UnprocessableEntityException } from '../exceptions'; -import { AbstractServiceOptions, PrimaryKey } from '../types'; -import { ItemsService } from './items'; +import { ForbiddenException, UnprocessableEntityException } from '../exceptions'; +import { AbstractServiceOptions, PrimaryKey, Query, Alterations, Item } from '../types'; +import { ItemsService, MutationOptions } from './items'; import { PermissionsService } from './permissions'; import { PresetsService } from './presets'; import { UsersService } from './users'; @@ -10,21 +10,89 @@ export class RolesService extends ItemsService { super('directus_roles', options); } + private async checkForOtherAdminRoles(excludeKeys: PrimaryKey[]): Promise { + // Make sure there's at least one admin role left after this deletion is done + const otherAdminRoles = await this.knex + .count('*', { as: 'count' }) + .from('directus_roles') + .whereNotIn('id', excludeKeys) + .andWhere({ admin_access: true }) + .first(); + + const otherAdminRolesCount = +(otherAdminRoles?.count || 0); + if (otherAdminRolesCount === 0) throw new UnprocessableEntityException(`You can't delete the last admin role.`); + } + + private async checkForOtherAdminUsers(key: PrimaryKey, users: Alterations | Item[]): Promise { + const role = await this.knex.select('admin_access').from('directus_roles').where('id', '=', key).first(); + + if (!role) throw new ForbiddenException(); + + // The users that will now be in this new non-admin role + let userKeys: PrimaryKey[] = []; + + if (Array.isArray(users)) { + userKeys = users.map((user) => (typeof user === 'string' ? user : user.id)).filter((id) => id); + } else { + userKeys = users.update.map((user) => user.id).filter((id) => id); + } + + const usersThatWereInRoleBefore = (await this.knex.select('id').from('directus_users').where('role', '=', key)).map( + (user) => user.id + ); + const usersThatAreRemoved = usersThatWereInRoleBefore.filter((id) => userKeys.includes(id) === false); + + const usersThatAreAdded = Array.isArray(users) ? users : users.create; + + // If the role the users are moved to is an admin-role, and there's at least 1 (new) admin + // user, we don't have to check for other admin + // users + if ((role.admin_access === true || role.admin_access === 1) && usersThatAreAdded.length > 0) return; + + const otherAdminUsers = await this.knex + .count('*', { as: 'count' }) + .from('directus_users') + .whereNotIn('directus_users.id', [...userKeys, ...usersThatAreRemoved]) + .andWhere({ 'directus_roles.admin_access': true }) + .leftJoin('directus_roles', 'directus_users.role', 'directus_roles.id') + .first(); + + const otherAdminUsersCount = +(otherAdminUsers?.count || 0); + + if (otherAdminUsersCount === 0) { + throw new UnprocessableEntityException(`You can't remove the last admin user from the admin role.`); + } + + return; + } + + async updateOne(key: PrimaryKey, data: Record, opts?: MutationOptions): Promise { + if ('admin_access' in data && data.admin_access === false) { + await this.checkForOtherAdminRoles([key]); + } + + if ('users' in data) { + await this.checkForOtherAdminUsers(key, data.users); + } + + return super.updateOne(key, data, opts); + } + + async updateMany(keys: PrimaryKey[], data: Record, opts?: MutationOptions): Promise { + if ('admin_access' in data && data.admin_access === false) { + await this.checkForOtherAdminRoles(keys); + } + + return super.updateMany(keys, data, opts); + } + async deleteOne(key: PrimaryKey): Promise { await this.deleteMany([key]); return key; } async deleteMany(keys: PrimaryKey[]): Promise { - // Make sure there's at least one admin role left after this deletion is done - const otherAdminRoles = await this.knex - .count('*', { as: 'count' }) - .from('directus_roles') - .whereNotIn('id', keys) - .andWhere({ admin_access: true }) - .first(); - const otherAdminRolesCount = +(otherAdminRoles?.count || 0); - if (otherAdminRolesCount === 0) throw new UnprocessableEntityException(`You can't delete the last admin role.`); + await this.checkForOtherAdminRoles(keys); await this.knex.transaction(async (trx) => { const itemsService = new ItemsService('directus_roles', { @@ -77,6 +145,10 @@ export class RolesService extends ItemsService { return keys; } + deleteByQuery(query: Query, opts?: MutationOptions): Promise { + return super.deleteByQuery(query, opts); + } + /** * @deprecated Use `deleteOne` or `deleteMany` instead */ diff --git a/api/src/services/server.ts b/api/src/services/server.ts index ef81579ce5..827e6c7db4 100644 --- a/api/src/services/server.ts +++ b/api/src/services/server.ts @@ -12,8 +12,9 @@ import env from '../env'; import logger from '../logger'; import { rateLimiter } from '../middleware/rate-limiter'; import storage from '../storage'; -import { AbstractServiceOptions, Accountability, SchemaOverview } from '../types'; -import { toArray } from '../utils/to-array'; +import { AbstractServiceOptions, SchemaOverview } from '../types'; +import { Accountability } from '@directus/shared/types'; +import { toArray } from '@directus/shared/utils'; import getMailer from '../mailer'; import { SettingsService } from './settings'; diff --git a/api/src/services/specifications.ts b/api/src/services/specifications.ts index c4d21e60ed..eb4957b5d1 100644 --- a/api/src/services/specifications.ts +++ b/api/src/services/specifications.ts @@ -7,16 +7,8 @@ import { OpenAPIObject, OperationObject, PathItemObject, SchemaObject, TagObject import { version } from '../../package.json'; import getDatabase from '../database'; import env from '../env'; -import { - AbstractServiceOptions, - Accountability, - Collection, - Field, - Permission, - Relation, - SchemaOverview, - types, -} from '../types'; +import { AbstractServiceOptions, Collection, Permission, Relation, SchemaOverview } from '../types'; +import { Accountability, Field, Type } from '@directus/shared/types'; import { getRelationType } from '../utils/get-relation-type'; import { CollectionsService } from './collections'; import { FieldsService } from './fields'; @@ -459,20 +451,33 @@ class OASSpecsService implements SpecificationSubService { } private fieldTypes: Record< - typeof types[number], + Type, { type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'integer' | 'null' | undefined; format?: string; items?: any; } > = { + alias: { + type: 'string', + }, bigInteger: { type: 'integer', format: 'int64', }, + binary: { + type: 'string', + format: 'binary', + }, boolean: { type: 'boolean', }, + csv: { + type: 'array', + items: { + type: 'string', + }, + }, date: { type: 'string', format: 'date', @@ -488,6 +493,9 @@ class OASSpecsService implements SpecificationSubService { type: 'number', format: 'float', }, + hash: { + type: 'string', + }, integer: { type: 'integer', }, @@ -511,23 +519,13 @@ class OASSpecsService implements SpecificationSubService { type: 'string', format: 'timestamp', }, - binary: { - type: 'string', - format: 'binary', + unknown: { + type: undefined, }, uuid: { type: 'string', format: 'uuid', }, - csv: { - type: 'array', - items: { - type: 'string', - }, - }, - hash: { - type: 'string', - }, }; } diff --git a/api/src/services/users.ts b/api/src/services/users.ts index cc80d95992..0a90249ca9 100644 --- a/api/src/services/users.ts +++ b/api/src/services/users.ts @@ -1,11 +1,11 @@ import argon2 from 'argon2'; import jwt from 'jsonwebtoken'; import { Knex } from 'knex'; -import { clone } from 'lodash'; +import { clone, cloneDeep } from 'lodash'; import getDatabase from '../database'; import env from '../env'; +import { FailedValidationException } from '@directus/shared/exceptions'; import { - FailedValidationException, ForbiddenException, InvalidPayloadException, UnprocessableEntityException, @@ -13,13 +13,16 @@ import { } from '../exceptions'; import { RecordNotUniqueException } from '../exceptions/database/record-not-unique'; import logger from '../logger'; -import { AbstractServiceOptions, Accountability, Item, PrimaryKey, Query, SchemaOverview } from '../types'; +import { AbstractServiceOptions, Item, PrimaryKey, Query, SchemaOverview } from '../types'; +import { Accountability } from '@directus/shared/types'; import isUrlAllowed from '../utils/is-url-allowed'; -import { toArray } from '../utils/to-array'; +import { toArray } from '@directus/shared/utils'; import { AuthenticationService } from './authentication'; import { ItemsService, MutationOptions } from './items'; import { MailService } from './mail'; import { SettingsService } from './settings'; +import { stall } from '../utils/stall'; +import { performance } from 'perf_hooks'; export class UsersService extends ItemsService { knex: Knex; @@ -99,12 +102,33 @@ export class UsersService extends ItemsService { return true; } + private async checkRemainingAdminExistence(excludeKeys: PrimaryKey[]) { + // Make sure there's at least one admin user left after this deletion is done + const otherAdminUsers = await this.knex + .count('*', { as: 'count' }) + .from('directus_users') + .whereNotIn('directus_users.id', excludeKeys) + .andWhere({ 'directus_roles.admin_access': true }) + .leftJoin('directus_roles', 'directus_users.role', 'directus_roles.id') + .first(); + + const otherAdminUsersCount = +(otherAdminUsers?.count || 0); + + if (otherAdminUsersCount === 0) { + throw new UnprocessableEntityException(`You can't remove the last admin user from the role.`); + } + } + /** * Create a new user */ async createOne(data: Partial, opts?: MutationOptions): Promise { - const email = data.email.toLowerCase(); - await this.checkUniqueEmails([email]); + const email = data.email?.toLowerCase(); + + if (email) { + await this.checkUniqueEmails([email]); + } + return await this.service.createOne(data, opts); } @@ -129,6 +153,14 @@ export class UsersService extends ItemsService { } async updateOne(key: PrimaryKey, data: Partial, opts?: MutationOptions): Promise { + if (data.role) { + const newRole = await this.knex.select('admin_access').from('directus_roles').where('id', data.role).first(); + + if (newRole && !newRole.admin_access) { + await this.checkRemainingAdminExistence([key]); + } + } + const email = data.email?.toLowerCase(); if (email) { @@ -147,6 +179,14 @@ export class UsersService extends ItemsService { } async updateMany(keys: PrimaryKey[], data: Partial, opts?: MutationOptions): Promise { + if (data.role) { + const newRole = await this.knex.select('admin_access').from('directus_roles').where('id', data.role).first(); + + if (newRole && !newRole.admin_access) { + await this.checkRemainingAdminExistence(keys); + } + } + const email = data.email?.toLowerCase(); if (email) { @@ -165,6 +205,29 @@ export class UsersService extends ItemsService { } async updateByQuery(query: Query, data: Partial, opts?: MutationOptions): Promise { + if (data.role) { + const newRole = await this.knex.select('admin_access').from('directus_roles').where('id', data.role).first(); + + if (newRole && !newRole.admin_access) { + // This is duplicated a touch, but we need to know the keys first + // Not authenticated: + const itemsService = new ItemsService('directus_users', { + knex: this.knex, + schema: this.schema, + }); + + const readQuery = cloneDeep(query); + readQuery.fields = ['id']; + + // We read the IDs of the items based on the query, and then run `updateMany`. `updateMany` does it's own + // permissions check for the keys, so we don't have to make this an authenticated read + const itemsToUpdate = await itemsService.readByQuery(readQuery); + const keys = itemsToUpdate.map((item) => item.id); + + await this.checkRemainingAdminExistence(keys); + } + } + const email = data.email?.toLowerCase(); if (email) { @@ -183,20 +246,7 @@ export class UsersService extends ItemsService { } async deleteOne(key: PrimaryKey, opts?: MutationOptions): Promise { - // Make sure there's at least one admin user left after this deletion is done - const otherAdminUsers = await this.knex - .count('*', { as: 'count' }) - .from('directus_users') - .whereNot('directus_users.id', key) - .andWhere({ 'directus_roles.admin_access': true }) - .leftJoin('directus_roles', 'directus_users.role', 'directus_roles.id') - .first(); - - const otherAdminUsersCount = +(otherAdminUsers?.count || 0); - - if (otherAdminUsersCount === 0) { - throw new UnprocessableEntityException(`You can't delete the last admin user.`); - } + await this.checkRemainingAdminExistence([key]); await this.service.deleteOne(key, opts); @@ -204,26 +254,32 @@ export class UsersService extends ItemsService { } async deleteMany(keys: PrimaryKey[], opts?: MutationOptions): Promise { - // Make sure there's at least one admin user left after this deletion is done - const otherAdminUsers = await this.knex - .count('*', { as: 'count' }) - .from('directus_users') - .whereNotIn('directus_users.id', keys) - .andWhere({ 'directus_roles.admin_access': true }) - .leftJoin('directus_roles', 'directus_users.role', 'directus_roles.id') - .first(); - - const otherAdminUsersCount = +(otherAdminUsers?.count || 0); - - if (otherAdminUsersCount === 0) { - throw new UnprocessableEntityException(`You can't delete the last admin user.`); - } + await this.checkRemainingAdminExistence(keys); await this.service.deleteMany(keys, opts); return keys; } + async deleteByQuery(query: Query, opts?: MutationOptions): Promise { + const primaryKeyField = this.schema.collections[this.collection].primary; + const readQuery = cloneDeep(query); + readQuery.fields = [primaryKeyField]; + + // Not authenticated: + const itemsService = new ItemsService(this.collection, { + knex: this.knex, + schema: this.schema, + }); + + const itemsToDelete = await itemsService.readByQuery(readQuery); + const keys: PrimaryKey[] = itemsToDelete.map((item: Item) => item[primaryKeyField]); + + if (keys.length === 0) return []; + + return await this.deleteMany(keys, opts); + } + async inviteUser(email: string | string[], role: string, url: string | null, subject?: string | null): Promise { const emails = toArray(email); @@ -292,8 +348,14 @@ export class UsersService extends ItemsService { } async requestPasswordReset(email: string, url: string | null, subject?: string | null): Promise { + const STALL_TIME = 500; + const timeStart = performance.now(); + const user = await this.knex.select('id').from('directus_users').where({ email }).first(); - if (!user) throw new ForbiddenException(); + if (!user) { + await stall(STALL_TIME, timeStart); + throw new ForbiddenException(); + } const mailService = new MailService({ schema: this.schema, @@ -322,6 +384,8 @@ export class UsersService extends ItemsService { }, }, }); + + await stall(STALL_TIME, timeStart); } async resetPassword(token: string, password: string): Promise { diff --git a/api/src/services/utils.ts b/api/src/services/utils.ts index d7a8cf6806..4fbb6b4fbe 100644 --- a/api/src/services/utils.ts +++ b/api/src/services/utils.ts @@ -2,7 +2,8 @@ import { Knex } from 'knex'; import getDatabase from '../database'; import { systemCollectionRows } from '../database/system-data/collections'; import { ForbiddenException, InvalidPayloadException } from '../exceptions'; -import { AbstractServiceOptions, Accountability, PrimaryKey, SchemaOverview } from '../types'; +import { AbstractServiceOptions, PrimaryKey, SchemaOverview } from '../types'; +import { Accountability } from '@directus/shared/types'; export class UtilsService { knex: Knex; diff --git a/api/src/start.ts b/api/src/start.ts index 392567b8ca..8228c8e4ee 100644 --- a/api/src/start.ts +++ b/api/src/start.ts @@ -1,6 +1,8 @@ import emitter, { emitAsyncSafe } from './emitter'; import env from './env'; import logger from './logger'; +import checkForUpdate from 'update-check'; +import pkg from '../package.json'; // If this file is called directly using node, start the server if (require.main === module) { @@ -18,6 +20,16 @@ export default async function start(): Promise { server .listen(port, () => { + checkForUpdate(pkg) + .then((update) => { + if (update) { + logger.warn(`Update available: ${pkg.version} -> ${update.latest}`); + } + }) + .catch(() => { + // No need to log/warn here. The update message is only an informative nice-to-have + }); + logger.info(`Server started at port ${port}`); emitAsyncSafe('server.start'); }) diff --git a/api/src/storage.ts b/api/src/storage.ts index 6864e690c3..f398714abe 100644 --- a/api/src/storage.ts +++ b/api/src/storage.ts @@ -4,7 +4,7 @@ import { GoogleCloudStorage } from '@directus/drive-gcs'; import { AmazonWebServicesS3Storage } from '@directus/drive-s3'; import env from './env'; import { getConfigFromEnv } from './utils/get-config-from-env'; -import { toArray } from './utils/to-array'; +import { toArray } from '@directus/shared/utils'; import { validateEnv } from './utils/validate-env'; validateEnv(['STORAGE_LOCATIONS']); diff --git a/api/src/types/assets.ts b/api/src/types/assets.ts index 8fa6718c59..b97c495527 100644 --- a/api/src/types/assets.ts +++ b/api/src/types/assets.ts @@ -1,10 +1,84 @@ -export type Transformation = { +import { ResizeOptions, Sharp } from 'sharp'; + +// List of allowed sharp methods to expose. +// +// This is a literal, so we can use it to validate request parameters. +export const TransformationMethods /*: readonly (keyof Sharp)[]*/ = [ + // Output options + // https://sharp.pixelplumbing.com/api-output + 'toFormat', + 'jpeg', + 'png', + 'tiff', + 'webp', + + // Resizing + // https://sharp.pixelplumbing.com/api-resize + 'resize', + 'extend', + 'extract', + 'trim', + + // Image operations + // https://sharp.pixelplumbing.com/api-operation + 'rotate', + 'flip', + 'flop', + 'sharpen', + 'median', + 'blur', + 'flatten', + 'gamma', + 'negate', + 'normalise', + 'normalize', + 'clahe', + 'convolve', + 'threshold', + 'linear', + 'recomb', + 'modulate', + + // Color manipulation + // https://sharp.pixelplumbing.com/api-colour + 'tint', + 'greyscale', + 'grayscale', + 'toColorspace', + 'toColourspace', + + // Channel manipulation + // https://sharp.pixelplumbing.com/api-channel + 'removeAlpha', + 'ensureAlpha', + 'extractChannel', + 'bandbool', +] as const; + +// Helper types +type AllowedSharpMethods = Pick; + +export type TransformationMap = { + [M in keyof AllowedSharpMethods]: readonly [M, ...Parameters]; +}; + +export type Transformation = TransformationMap[keyof TransformationMap]; + +export type TransformationParams = { key?: string; - width?: number; // width - height?: number; // height - fit?: 'cover' | 'contain' | 'inside' | 'outside'; // fit - withoutEnlargement?: boolean; // Without Enlargement + transforms?: Transformation[]; +}; + +// Transformation preset is defined in the admin UI. +export type TransformationPreset = TransformationPresetFormat & + TransformationPresetResize & + TransformationParams & { key: string }; + +export type TransformationPresetFormat = { + format?: 'jpg' | 'jpeg' | 'png' | 'webp' | 'tiff'; quality?: number; }; -// @NOTE Keys used in Transformation should match ASSET_GENERATION_QUERY_KEYS in constants.ts +export type TransformationPresetResize = Pick; + +// @NOTE Keys used in TransformationParams should match ASSET_GENERATION_QUERY_KEYS in constants.ts diff --git a/api/src/types/collection.ts b/api/src/types/collection.ts index e589efb85e..37362d1789 100644 --- a/api/src/types/collection.ts +++ b/api/src/types/collection.ts @@ -1,5 +1,5 @@ import { Table } from 'knex-schema-inspector/dist/types/table'; -import { Field } from './field'; +import { Field } from '@directus/shared/types'; export type CollectionMeta = { collection: string; diff --git a/api/src/types/deep-partial.d.ts b/api/src/types/deep-partial.d.ts deleted file mode 100644 index bc66618da4..0000000000 --- a/api/src/types/deep-partial.d.ts +++ /dev/null @@ -1,40 +0,0 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ -/* eslint-disable @typescript-eslint/ban-types */ - -type Primitive = string | number | boolean | bigint | symbol | undefined | null; -type Builtin = Primitive | Function | Date | Error | RegExp; -type IsTuple = T extends [infer A] - ? T - : T extends [infer A, infer B] - ? T - : T extends [infer A, infer B, infer C] - ? T - : T extends [infer A, infer B, infer C, infer D] - ? T - : T extends [infer A, infer B, infer C, infer D, infer E] - ? T - : never; - -type DeepPartial = T extends Primitive | Builtin - ? T - : T extends Map - ? Map, DeepPartial> - : T extends ReadonlyMap - ? ReadonlyMap, DeepPartial> - : T extends WeakMap - ? WeakMap, DeepPartial> - : T extends Set - ? Set> - : T extends ReadonlySet - ? ReadonlySet> - : T extends WeakSet - ? WeakSet> - : T extends Array - ? T extends IsTuple - ? { [K in keyof T]?: DeepPartial } - : Array> - : T extends Promise - ? Promise> - : T extends {} - ? { [K in keyof T]?: DeepPartial } - : Partial; diff --git a/api/src/types/express.d.ts b/api/src/types/express.d.ts index 528f44f51a..1c65aa2e4e 100644 --- a/api/src/types/express.d.ts +++ b/api/src/types/express.d.ts @@ -2,7 +2,7 @@ * Custom properties on the req object in express */ -import { Accountability } from './accountability'; +import { Accountability } from '@directus/shared/types'; import { Query } from './query'; import { SchemaOverview } from './schema'; diff --git a/api/src/types/field.ts b/api/src/types/field.ts deleted file mode 100644 index d0b518a89e..0000000000 --- a/api/src/types/field.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { Column } from 'knex-schema-inspector/dist/types/column'; - -export const types = [ - 'bigInteger', - 'boolean', - 'date', - 'dateTime', - 'decimal', - 'float', - 'integer', - 'json', - 'string', - 'text', - 'time', - 'timestamp', - 'binary', - 'uuid', - 'hash', - 'csv', -] as const; - -export type FieldMeta = { - id: number; - collection: string; - field: string; - special: string[] | null; - interface: string | null; - options: Record | null; - readonly: boolean; - hidden: boolean; - sort: number | null; - width: string | null; - group: number | null; - note: string | null; - translations: null; -}; - -export type Field = { - collection: string; - field: string; - type: typeof types[number]; - schema: Column | null; - meta: FieldMeta | null; -}; diff --git a/api/src/types/files.ts b/api/src/types/files.ts index ca5ebef0c5..cc985555d9 100644 --- a/api/src/types/files.ts +++ b/api/src/types/files.ts @@ -1,4 +1,3 @@ -/** @todo finalize */ export type File = { id: string; // uuid storage: string; diff --git a/api/src/types/index.ts b/api/src/types/index.ts index 4fbf2d9510..d789825013 100644 --- a/api/src/types/index.ts +++ b/api/src/types/index.ts @@ -1,14 +1,13 @@ -export * from './accountability'; export * from './activity'; export * from './assets'; export * from './ast'; export * from './collection'; export * from './extensions'; -export * from './field'; export * from './files'; export * from './graphql'; export * from './items'; export * from './meta'; +export * from './migration'; export * from './permissions'; export * from './query'; export * from './relation'; diff --git a/api/src/types/items.ts b/api/src/types/items.ts index 6c79ddf4e1..17c9abfcf5 100644 --- a/api/src/types/items.ts +++ b/api/src/types/items.ts @@ -6,3 +6,13 @@ export type Item = Record; export type PrimaryKey = string | number; + +export type Alterations = { + create: { + [key: string]: any; + }[]; + update: { + [key: string]: any; + }[]; + delete: (number | string)[]; +}; diff --git a/api/src/types/migration.ts b/api/src/types/migration.ts new file mode 100644 index 0000000000..0819484a83 --- /dev/null +++ b/api/src/types/migration.ts @@ -0,0 +1,5 @@ +export type Migration = { + version: string; + name: string; + timestamp: Date; +}; diff --git a/api/src/types/query.ts b/api/src/types/query.ts index 319bdee40c..ad4925b275 100644 --- a/api/src/types/query.ts +++ b/api/src/types/query.ts @@ -53,5 +53,3 @@ export type FilterOperator = | 'nnull' | 'empty' | 'nempty'; - -export type ValidationOperator = 'required' | 'regex'; diff --git a/api/src/types/schema.ts b/api/src/types/schema.ts index b5f4b33a60..0bc23e94a2 100644 --- a/api/src/types/schema.ts +++ b/api/src/types/schema.ts @@ -1,4 +1,4 @@ -import { types } from './field'; +import { Type } from '@directus/shared/types'; import { Permission } from './permissions'; import { Relation } from './relation'; @@ -15,7 +15,7 @@ type CollectionsOverview = { field: string; defaultValue: any; nullable: boolean; - type: typeof types[number] | 'unknown' | 'alias'; + type: Type | 'unknown' | 'alias'; dbType: string | null; precision: number | null; scale: number | null; diff --git a/api/src/types/services.ts b/api/src/types/services.ts index a38673693a..5794f75e8c 100644 --- a/api/src/types/services.ts +++ b/api/src/types/services.ts @@ -1,6 +1,6 @@ import { Knex } from 'knex'; import { SchemaOverview } from '../types'; -import { Accountability } from './accountability'; +import { Accountability } from '@directus/shared/types'; import { Item, PrimaryKey } from './items'; import { PermissionsAction } from './permissions'; import { Query } from './query'; diff --git a/api/src/types/shims.d.ts b/api/src/types/shims.d.ts index 98fda04c9c..4ed0eeb3e9 100644 --- a/api/src/types/shims.d.ts +++ b/api/src/types/shims.d.ts @@ -3,16 +3,6 @@ declare module 'grant' { export default grant; } -declare module 'icc' { - const parse: (buf: Buffer) => Record; - export { parse }; -} - -declare module 'exif-reader' { - const exifReader: (buf: Buffer) => Record; - export default exifReader; -} - declare module 'pino-http' { import PinoHttp from '@types/pino-http'; const pinoHttp: PinoHttp; diff --git a/api/src/utils/get-ast-from-query.ts b/api/src/utils/get-ast-from-query.ts index 06e13e35d6..27e689bdaf 100644 --- a/api/src/utils/get-ast-from-query.ts +++ b/api/src/utils/get-ast-from-query.ts @@ -4,15 +4,8 @@ import { Knex } from 'knex'; import { cloneDeep, mapKeys, omitBy } from 'lodash'; -import { - Accountability, - AST, - FieldNode, - NestedCollectionNode, - PermissionsAction, - Query, - SchemaOverview, -} from '../types'; +import { Accountability } from '@directus/shared/types'; +import { AST, FieldNode, NestedCollectionNode, PermissionsAction, Query, SchemaOverview } from '../types'; import { getRelationType } from '../utils/get-relation-type'; type GetASTOptions = { diff --git a/api/src/utils/get-cache-key.ts b/api/src/utils/get-cache-key.ts index 7f0990ed43..30236ee829 100644 --- a/api/src/utils/get-cache-key.ts +++ b/api/src/utils/get-cache-key.ts @@ -1,16 +1,16 @@ import { Request } from 'express'; import url from 'url'; +import hash from 'object-hash'; export function getCacheKey(req: Request): string { const path = url.parse(req.originalUrl).pathname; - let key: string; - - if (path?.includes('/graphql')) { - key = `${req.accountability?.user || 'null'}-${path}-${JSON.stringify(req.params.query)}`; - } else { - key = `${req.accountability?.user || 'null'}-${path}-${JSON.stringify(req.sanitizedQuery)}`; - } + const info = { + user: req.accountability?.user || null, + path, + query: path?.includes('/graphql') ? req.params.query : req.sanitizedQuery, + }; + const key = hash(info); return key; } diff --git a/api/src/utils/get-email-from-profile.ts b/api/src/utils/get-email-from-profile.ts index 42da9e1fd0..2ecd4e1470 100644 --- a/api/src/utils/get-email-from-profile.ts +++ b/api/src/utils/get-email-from-profile.ts @@ -21,10 +21,15 @@ export default function getEmailFromProfile(provider: string, profile: Record { switch (localType) { case 'boolean': return GraphQLBoolean; @@ -14,6 +22,7 @@ export function getGraphQLType(localType: typeof types[number] | 'alias' | 'unkn case 'float': return GraphQLFloat; case 'csv': + return new GraphQLList(GraphQLString); case 'json': return GraphQLJSON; case 'timestamp': diff --git a/api/src/utils/get-local-type.ts b/api/src/utils/get-local-type.ts index f5759267ff..8b7fbe8eec 100644 --- a/api/src/utils/get-local-type.ts +++ b/api/src/utils/get-local-type.ts @@ -1,11 +1,9 @@ import { SchemaOverview } from '@directus/schema/dist/types/overview'; import { Column } from 'knex-schema-inspector/dist/types/column'; -import { FieldMeta, types } from '../types'; +import { FieldMeta, Type } from '@directus/shared/types'; +import getDatabase from '../database'; -/** - * Typemap graciously provided by @gpetrov - */ -const localTypeMap: Record = { +const localTypeMap: Record = { // Shared boolean: { type: 'boolean' }, tinyint: { type: 'boolean' }, @@ -51,7 +49,7 @@ const localTypeMap: Record collectionMeta.collection === collection); result.collections[collection] = { diff --git a/api/src/utils/is-url-allowed.ts b/api/src/utils/is-url-allowed.ts index 3604ebdbb7..b0d81a851f 100644 --- a/api/src/utils/is-url-allowed.ts +++ b/api/src/utils/is-url-allowed.ts @@ -1,4 +1,4 @@ -import { toArray } from './to-array'; +import { toArray } from '@directus/shared/utils'; import logger from '../logger'; /** diff --git a/api/src/utils/parse-iptc.ts b/api/src/utils/parse-iptc.ts deleted file mode 100644 index 054ef007e3..0000000000 --- a/api/src/utils/parse-iptc.ts +++ /dev/null @@ -1,51 +0,0 @@ -const IPTC_ENTRY_TYPES = new Map([ - [0x78, 'caption'], - [0x6e, 'credit'], - [0x19, 'keywords'], - [0x37, 'dateCreated'], - [0x50, 'byline'], - [0x55, 'bylineTitle'], - [0x7a, 'captionWriter'], - [0x69, 'headline'], - [0x74, 'copyright'], - [0x0f, 'category'], -]); - -const IPTC_ENTRY_MARKER = Buffer.from([0x1c, 0x02]); - -export default function parseIPTC(buffer: Buffer): Record { - if (!Buffer.isBuffer(buffer)) return {}; - - const iptc: Record = {}; - let lastIptcEntryPos = buffer.indexOf(IPTC_ENTRY_MARKER); - - while (lastIptcEntryPos !== -1) { - lastIptcEntryPos = buffer.indexOf(IPTC_ENTRY_MARKER, lastIptcEntryPos + IPTC_ENTRY_MARKER.byteLength); - - const iptcBlockTypePos = lastIptcEntryPos + IPTC_ENTRY_MARKER.byteLength; - const iptcBlockSizePos = iptcBlockTypePos + 1; - const iptcBlockDataPos = iptcBlockSizePos + 2; - - const iptcBlockType = buffer.readUInt8(iptcBlockTypePos); - const iptcBlockSize = buffer.readUInt16BE(iptcBlockSizePos); - - if (!IPTC_ENTRY_TYPES.has(iptcBlockType)) { - continue; - } - - const iptcBlockTypeId = IPTC_ENTRY_TYPES.get(iptcBlockType); - const iptcData = buffer.slice(iptcBlockDataPos, iptcBlockDataPos + iptcBlockSize).toString(); - - if (iptcBlockTypeId) { - if (iptc[iptcBlockTypeId] == null) { - iptc[iptcBlockTypeId] = iptcData; - } else if (Array.isArray(iptc[iptcBlockTypeId])) { - iptc[iptcBlockTypeId].push(iptcData); - } else { - iptc[iptcBlockTypeId] = [iptc[iptcBlockTypeId], iptcData]; - } - } - } - - return iptc; -} diff --git a/api/src/utils/sanitize-query.ts b/api/src/utils/sanitize-query.ts index 41f2942b46..de35923a92 100644 --- a/api/src/utils/sanitize-query.ts +++ b/api/src/utils/sanitize-query.ts @@ -1,7 +1,8 @@ import { flatten, get, merge, set } from 'lodash'; import logger from '../logger'; -import { Accountability, Aggregate, Filter, Meta, Query, Sort } from '../types'; -import { parseFilter } from '../utils/parse-filter'; +import { Aggregate, Filter, Meta, Query, Sort } from '../types'; +import { Accountability } from '@directus/shared/types'; +import { parseFilter } from '@directus/shared/utils'; export function sanitizeQuery(rawQuery: Record, accountability?: Accountability | null): Query { const query: Query = {}; diff --git a/api/src/utils/stall.ts b/api/src/utils/stall.ts new file mode 100644 index 0000000000..7dd55a4765 --- /dev/null +++ b/api/src/utils/stall.ts @@ -0,0 +1,36 @@ +import { performance } from 'perf_hooks'; + +/** + * Wait a specific time to meet the stall ms. Useful in cases where you need to make sure that every + * path in a function takes at least X ms (for example authenticate). + * + * @param {number} ms - Stall time to wait until + * @param {number} start - Current start time of the function + * + * @example + * + * ```js + * const STALL_TIME = 100; + * + * // Function will always take (at least) 100ms + * async function doSomething() { + * const timeStart = performance.now(); + * + * if (something === true) { + * await heavy(); + * } + * + * stall(STALL_TIME, timeStart); + * return 'result'; + * } + * ``` + */ +export async function stall(ms: number, start: number): Promise { + const now = performance.now(); + const timeElapsed = now - start; + const timeRemaining = ms - timeElapsed; + + if (timeRemaining <= 0) return; + + return new Promise((resolve) => setTimeout(resolve, timeRemaining)); +} diff --git a/api/src/utils/track.ts b/api/src/utils/track.ts index 078e15c74e..2838db2a63 100644 --- a/api/src/utils/track.ts +++ b/api/src/utils/track.ts @@ -14,7 +14,7 @@ export async function track(event: string): Promise { try { await axios.post('https://telemetry.directus.io/', info); } catch (err) { - if ('DIRECTUS_DEV' in process.env) { + if (env.NODE_ENV === 'development') { logger.error(err); } } @@ -27,7 +27,7 @@ async function getEnvInfo(event: string) { event: event, project_id: env.KEY, machine_id: await machineId(), - environment: process.env.NODE_ENV, + environment: env.NODE_ENV, stack: 'node', os: { arch: os.arch(), diff --git a/api/src/utils/transformations.ts b/api/src/utils/transformations.ts new file mode 100644 index 0000000000..7d8eac17a1 --- /dev/null +++ b/api/src/utils/transformations.ts @@ -0,0 +1,68 @@ +import { isNil } from 'lodash'; +import { + File, + Transformation, + TransformationParams, + TransformationPreset, + TransformationPresetFormat, + TransformationPresetResize, +} from '../types'; + +// Extract transforms from a preset +export function resolvePreset(input: TransformationParams | TransformationPreset, file: File): Transformation[] { + // Do the format conversion last + return [extractResize(input), ...(input.transforms ?? []), extractToFormat(input, file)].filter( + (transform): transform is Transformation => transform !== undefined + ); +} + +function extractOptions>(keys: (keyof T)[], numberKeys: (keyof T)[] = []) { + return function (input: TransformationParams | TransformationPreset): T { + return Object.entries(input).reduce( + (config, [key, value]) => + keys.includes(key as any) && isNil(value) === false + ? { + ...config, + [key]: numberKeys.includes(key as any) ? +value : value, + } + : config, + {} as T + ); + }; +} + +// Extract format transform from a preset +function extractToFormat(input: TransformationParams | TransformationPreset, file: File): Transformation | undefined { + const options = extractOptions(['format', 'quality'], ['quality'])(input); + return Object.keys(options).length > 0 + ? [ + 'toFormat', + options.format || (file.type!.split('/')[1] as any), + { + quality: options.quality, + }, + ] + : undefined; +} + +function extractResize(input: TransformationParams | TransformationPreset): Transformation | undefined { + const resizable = ['width', 'height'].some((key) => key in input); + if (!resizable) return undefined; + + return [ + 'resize', + extractOptions( + ['width', 'height', 'fit', 'withoutEnlargement'], + ['width', 'height'] + )(input), + ]; +} + +/** + * Try to extract a file format from an array of `Transformation`'s. + */ +export function maybeExtractFormat(transforms: Transformation[]): string | undefined { + const toFormats = transforms.filter((t) => t[0] === 'toFormat'); + const lastToFormat = toFormats[toFormats.length - 1]; + return lastToFormat ? lastToFormat[1]?.toString() : undefined; +} diff --git a/api/tsconfig.json b/api/tsconfig.json index 0d81c4580c..cf93da6c53 100644 --- a/api/tsconfig.json +++ b/api/tsconfig.json @@ -10,7 +10,8 @@ "strict": true, "lib": ["es2019"], "skipLibCheck": true, - "declaration": true + "declaration": true, + "resolveJsonModule": true }, "exclude": ["node_modules", "dist"] } diff --git a/app/package.json b/app/package.json index 9067dd14e8..ac0d88405f 100644 --- a/app/package.json +++ b/app/package.json @@ -1,6 +1,6 @@ { "name": "@directus/app", - "version": "9.0.0-rc.83", + "version": "9.0.0-rc.88", "private": false, "description": "Directus is an Open-Source Headless CMS & API for Managing Custom Databases", "author": "Rijk van Zanten ", @@ -23,50 +23,49 @@ "serve": "vite preview", "copy-docs-images": "rimraf public/img/docs && copyfiles -u 3 \"../docs/assets/**/*\" \"public/img/docs\" --verbose", "predev": "npm run copy-docs-images", - "prebuild": "npm run copy-docs-images", - "prepublishOnly": "npm run build" + "prebuild": "npm run copy-docs-images" }, "gitHead": "24621f3934dc77eb23441331040ed13c676ceffd", "devDependencies": { - "@directus/docs": "9.0.0-rc.83", - "@directus/extension-sdk": "9.0.0-rc.83", - "@directus/format-title": "9.0.0-rc.83", - "@directus/shared": "9.0.0-rc.83", - "@fullcalendar/core": "5.8.0", - "@fullcalendar/daygrid": "5.8.0", - "@fullcalendar/interaction": "5.8.0", - "@fullcalendar/list": "5.8.0", - "@fullcalendar/timegrid": "5.8.0", - "@popperjs/core": "2.9.2", - "@rollup/plugin-yaml": "3.0.0", + "@directus/docs": "9.0.0-rc.88", + "@directus/extension-sdk": "9.0.0-rc.88", + "@directus/format-title": "9.0.0-rc.88", + "@directus/shared": "9.0.0-rc.88", + "@fullcalendar/core": "5.9.0", + "@fullcalendar/daygrid": "5.9.0", + "@fullcalendar/interaction": "5.9.0", + "@fullcalendar/list": "5.9.0", + "@fullcalendar/timegrid": "5.9.0", + "@popperjs/core": "2.9.3", + "@rollup/plugin-yaml": "3.1.0", "@sindresorhus/slugify": "2.1.0", - "@tinymce/tinymce-vue": "4.0.3", + "@tinymce/tinymce-vue": "4.0.4", "@types/base-64": "1.0.0", - "@types/bytes": "3.1.0", - "@types/codemirror": "5.60.1", - "@types/color": "3.0.1", - "@types/diff": "5.0.0", - "@types/dompurify": "2.2.2", - "@types/lodash": "4.14.170", - "@types/markdown-it": "12.0.2", - "@types/marked": "2.0.3", + "@types/bytes": "3.1.1", + "@types/codemirror": "5.60.2", + "@types/color": "3.0.2", + "@types/diff": "5.0.1", + "@types/dompurify": "2.2.3", + "@types/lodash": "4.14.172", + "@types/markdown-it": "12.0.3", + "@types/marked": "2.0.4", "@types/mime-types": "2.1.0", "@types/ms": "0.7.31", - "@types/qrcode": "1.4.0", - "@vitejs/plugin-vue": "1.2.4", + "@types/qrcode": "1.4.1", + "@vitejs/plugin-vue": "1.3.0", "@vue/cli-plugin-babel": "4.5.13", "@vue/cli-plugin-router": "4.5.13", "@vue/cli-plugin-typescript": "4.5.13", "@vue/cli-plugin-vuex": "4.5.13", "@vue/cli-service": "4.5.13", - "@vue/compiler-sfc": "3.1.2", + "@vue/compiler-sfc": "3.1.5", "axios": "0.21.1", "base-64": "1.0.0", - "codemirror": "5.62.0", + "codemirror": "5.62.2", "copyfiles": "2.4.1", "cropperjs": "1.5.12", - "date-fns": "2.22.1", - "dompurify": "2.2.9", + "date-fns": "2.23.0", + "dompurify": "2.3.0", "escape-string-regexp": "5.0.0", "front-matter": "4.0.2", "html-entities": "2.3.2", @@ -76,18 +75,21 @@ "mime": "2.5.2", "mitt": "3.0.0", "nanoid": "3.1.23", - "pinia": "2.0.0-beta.3", + "pinia": "2.0.0-beta.5", "prettier": "2.3.2", "pretty-ms": "7.0.1", "qrcode": "1.4.4", "rimraf": "3.0.2", - "sass": "1.35.1", + "sass": "1.37.5", "tinymce": "5.8.2", - "typescript": "4.3.4", - "vite": "2.3.8", - "vue": "3.1.2", - "vue-i18n": "9.1.6", + "typescript": "4.3.5", + "vite": "2.4.4", + "vue": "3.1.5", + "vue-i18n": "9.1.7", "vue-router": "4.0.10", "vuedraggable": "4.0.3" + }, + "dependencies": { + "p-queue": "^6.6.2" } } diff --git a/app/src/api.ts b/app/src/api.ts index 017b715330..3a7e2f957e 100644 --- a/app/src/api.ts +++ b/app/src/api.ts @@ -3,6 +3,7 @@ import { useRequestsStore } from '@/stores/'; import { getRootPath } from '@/utils/get-root-path'; import axios, { AxiosError, AxiosRequestConfig, AxiosResponse } from 'axios'; import { addQueryToPath } from './utils/add-query-to-path'; +import PQueue from 'p-queue'; const api = axios.create({ baseURL: getRootPath(), @@ -12,6 +13,8 @@ const api = axios.create({ }, }); +const queue = new PQueue({ concurrency: 5, intervalCap: 5, interval: 500, carryoverConcurrencyCount: true }); + interface RequestConfig extends AxiosRequestConfig { id: string; } @@ -24,7 +27,7 @@ export interface RequestError extends AxiosError { response: Response; } -export const onRequest = (config: AxiosRequestConfig): RequestConfig => { +export const onRequest = (config: AxiosRequestConfig): Promise => { const requestsStore = useRequestsStore(); const id = requestsStore.startRequest(); @@ -33,7 +36,9 @@ export const onRequest = (config: AxiosRequestConfig): RequestConfig => { ...config, }; - return requestConfig; + return new Promise((resolve) => { + queue.add(() => resolve(requestConfig)); + }); }; export const onResponse = (response: AxiosResponse | Response): AxiosResponse | Response => { diff --git a/app/src/app.vue b/app/src/app.vue index 41daa83313..18e2b723d3 100644 --- a/app/src/app.vue +++ b/app/src/app.vue @@ -1,7 +1,7 @@ @@ -24,7 +24,6 @@ import { defineComponent, computed } from 'vue'; import useSync from '@/composables/use-sync'; export default defineComponent({ - emits: ['update:indeterminate', 'update:modelValue', 'update:value'], props: { value: { type: String, @@ -71,6 +70,7 @@ export default defineComponent({ default: null, }, }, + emits: ['update:indeterminate', 'update:modelValue', 'update:value'], setup(props, { emit }) { const internalValue = useSync(props, 'value', emit); diff --git a/app/src/components/v-chip/v-chip.vue b/app/src/components/v-chip/v-chip.vue index e5be0ac9fa..011f2870e3 100644 --- a/app/src/components/v-chip/v-chip.vue +++ b/app/src/components/v-chip/v-chip.vue @@ -19,7 +19,6 @@ import { defineComponent, ref, computed } from 'vue'; import useSizeClass, { sizeProps } from '@/composables/size-class'; export default defineComponent({ - emits: ['update:active', 'click', 'close'], props: { active: { type: Boolean, @@ -47,6 +46,7 @@ export default defineComponent({ }, ...sizeProps, }, + emits: ['update:active', 'click', 'close'], setup(props, { emit }) { const internalLocalActive = ref(true); diff --git a/app/src/components/v-detail/v-detail.vue b/app/src/components/v-detail/v-detail.vue index d3eb612a6c..fca5a29c9f 100644 --- a/app/src/components/v-detail/v-detail.vue +++ b/app/src/components/v-detail/v-detail.vue @@ -19,7 +19,6 @@ import { defineComponent, computed, ref } from 'vue'; import { i18n } from '@/lang'; export default defineComponent({ - emits: ['update:modelValue'], props: { modelValue: { type: Boolean, @@ -38,7 +37,7 @@ export default defineComponent({ default: false, }, }, - + emits: ['update:modelValue'], setup(props, { emit }) { const localActive = ref(props.startOpen); diff --git a/app/src/components/v-dialog/v-dialog.vue b/app/src/components/v-dialog/v-dialog.vue index 820d3a33f9..953dae9d55 100644 --- a/app/src/components/v-dialog/v-dialog.vue +++ b/app/src/components/v-dialog/v-dialog.vue @@ -20,7 +20,6 @@ import useShortcut from '@/composables/use-shortcut'; import { useDialogRouteLeave } from '@/composables/use-dialog-route'; export default defineComponent({ - emits: ['esc', 'update:modelValue'], props: { modelValue: { type: Boolean, @@ -36,6 +35,7 @@ export default defineComponent({ validator: (val: string) => ['center', 'right'].includes(val), }, }, + emits: ['esc', 'update:modelValue'], setup(props, { emit }) { useShortcut('escape', (event, cancelNext) => { if (internalActive.value) { diff --git a/app/src/components/v-drawer/v-drawer.vue b/app/src/components/v-drawer/v-drawer.vue index f2068a767f..65bb907227 100644 --- a/app/src/components/v-drawer/v-drawer.vue +++ b/app/src/components/v-drawer/v-drawer.vue @@ -1,5 +1,5 @@