mirror of
https://github.com/directus/directus.git
synced 2026-02-06 03:25:04 -05:00
Merge branch 'aggregation' into insights
This commit is contained in:
@@ -1,2 +1,3 @@
|
||||
node_modules
|
||||
dist
|
||||
templates
|
||||
|
||||
@@ -38,10 +38,9 @@ module.exports = {
|
||||
parser: '@typescript-eslint/parser',
|
||||
},
|
||||
extends: [
|
||||
'plugin:vue/vue3-essential',
|
||||
'plugin:vue/vue3-recommended',
|
||||
'eslint:recommended',
|
||||
'plugin:@typescript-eslint/recommended',
|
||||
'plugin:prettier-vue/recommended',
|
||||
'prettier',
|
||||
],
|
||||
rules: {
|
||||
|
||||
7
.github/CODEOWNERS
vendored
7
.github/CODEOWNERS
vendored
@@ -2,10 +2,7 @@
|
||||
|
||||
/docs/*.md @benhaynes
|
||||
|
||||
/packages/cli @WoLfulus
|
||||
/packages/sdk @WoLfulus
|
||||
/packages/gatsby-source-directus @WoLfulus
|
||||
|
||||
/packages/shared @nickrum
|
||||
/packages/extension-sdk @nickrum
|
||||
/packages/extensions-sdk @nickrum
|
||||
/packages/create-directus-extension @nickrum
|
||||
/app/vite.config.js @nickrum
|
||||
|
||||
15
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
15
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -5,12 +5,17 @@ body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: Hi, thank you for taking the time to create an issue!
|
||||
- type: markdown
|
||||
- type: checkboxes
|
||||
id: troubleshooting
|
||||
attributes:
|
||||
value: 'Before continuing, you must first have completed all [Troubleshooting Steps](https://docs.directus.io/getting-started/support/#troubleshooting-steps)'
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: Please confirm that an issue describing this problem doesn't exist already.
|
||||
label: Preflight Checklist
|
||||
options:
|
||||
- label: I have completed all [Troubleshooting Steps](https://docs.directus.io/getting-started/support/#troubleshooting-steps).
|
||||
required: true
|
||||
- label: I'm on [the latest version of Directus](https://github.com/directus/directus/releases).
|
||||
required: true
|
||||
- label: There's [no other issue](https://github.com/directus/directus/issues) that already describes my problem.
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the Bug
|
||||
|
||||
15
.github/actions/build-images/Dockerfile
vendored
15
.github/actions/build-images/Dockerfile
vendored
@@ -1,15 +0,0 @@
|
||||
FROM docker:stable
|
||||
|
||||
RUN \
|
||||
apk update && \
|
||||
apk upgrade && \
|
||||
apk add bash
|
||||
|
||||
COPY ./rootfs/ /
|
||||
|
||||
RUN \
|
||||
chmod +x /usr/bin/lib/argsf && \
|
||||
chmod +x /usr/bin/entrypoint && \
|
||||
chmod +x /usr/bin/semver
|
||||
|
||||
ENTRYPOINT ["entrypoint"]
|
||||
47
.github/actions/build-images/action.yml
vendored
47
.github/actions/build-images/action.yml
vendored
@@ -1,47 +0,0 @@
|
||||
name: "Build and publish Directus images"
|
||||
description: "GitHub Action to publish Directus container images."
|
||||
branding:
|
||||
icon: archive
|
||||
color: gray-dark
|
||||
inputs:
|
||||
repository:
|
||||
description: "Repository name"
|
||||
required: true
|
||||
registry:
|
||||
description: "Registry"
|
||||
required: true
|
||||
username:
|
||||
description: "Registry user"
|
||||
required: true
|
||||
password:
|
||||
description: "Registry password"
|
||||
required: true
|
||||
version:
|
||||
description: "Version"
|
||||
required: true
|
||||
push:
|
||||
description: "Push"
|
||||
required: false
|
||||
default: "false"
|
||||
latest:
|
||||
description: "Latest"
|
||||
required: false
|
||||
default: "false"
|
||||
runs:
|
||||
using: "docker"
|
||||
image: "Dockerfile"
|
||||
args:
|
||||
- --registry
|
||||
- ${{ inputs.registry }}
|
||||
- --repository
|
||||
- ${{ inputs.repository }}
|
||||
- --username
|
||||
- ${{ inputs.username }}
|
||||
- --password
|
||||
- ${{ inputs.password }}
|
||||
- --version
|
||||
- ${{ inputs.version }}
|
||||
- --push
|
||||
- ${{ inputs.push }}
|
||||
- --latest
|
||||
- ${{ inputs.latest }}
|
||||
@@ -1,106 +0,0 @@
|
||||
# Builder image
|
||||
FROM alpine:latest AS builder
|
||||
|
||||
ARG VERSION
|
||||
ARG REPOSITORY=directus/directus
|
||||
|
||||
# Get runtime dependencies from optional dependencies
|
||||
# defined in package.json of Directus API package
|
||||
WORKDIR /directus
|
||||
RUN apk add --no-cache jq \
|
||||
&& wget -O directus-api-package.json "https://raw.githubusercontent.com/${REPOSITORY}/${VERSION}/api/package.json" \
|
||||
&& jq '{ \
|
||||
name: "directus-project", \
|
||||
version: "1.0.0", \
|
||||
description: "Directus Project", \
|
||||
dependencies: .optionalDependencies \
|
||||
}' \
|
||||
directus-api-package.json > package.json
|
||||
|
||||
# Directus image
|
||||
FROM node:16-alpine
|
||||
|
||||
ARG VERSION
|
||||
ARG REPOSITORY=directus/directus
|
||||
|
||||
LABEL directus.version="${VERSION}"
|
||||
LABEL org.opencontainers.image.source https://github.com/${REPOSITORY}
|
||||
|
||||
# Default environment variables
|
||||
# (see https://docs.directus.io/reference/environment-variables/)
|
||||
ENV \
|
||||
PORT="8055" \
|
||||
PUBLIC_URL="/" \
|
||||
DB_CLIENT="sqlite3" \
|
||||
DB_FILENAME="/directus/database/database.sqlite" \
|
||||
RATE_LIMITER_ENABLED="false" \
|
||||
RATE_LIMITER_STORE="memory" \
|
||||
RATE_LIMITER_POINTS="25" \
|
||||
RATE_LIMITER_DURATION="1" \
|
||||
CACHE_ENABLED="false" \
|
||||
STORAGE_LOCATIONS="local" \
|
||||
STORAGE_LOCAL_PUBLIC_URL="/uploads" \
|
||||
STORAGE_LOCAL_DRIVER="local" \
|
||||
STORAGE_LOCAL_ROOT="/directus/uploads" \
|
||||
ACCESS_TOKEN_TTL="15m" \
|
||||
REFRESH_TOKEN_TTL="7d" \
|
||||
REFRESH_TOKEN_COOKIE_SECURE="false" \
|
||||
REFRESH_TOKEN_COOKIE_SAME_SITE="lax" \
|
||||
OAUTH_PROVIDERS="" \
|
||||
EXTENSIONS_PATH="/directus/extensions" \
|
||||
EMAIL_FROM="no-reply@directus.io" \
|
||||
EMAIL_TRANSPORT="sendmail" \
|
||||
EMAIL_SENDMAIL_NEW_LINE="unix" \
|
||||
EMAIL_SENDMAIL_PATH="/usr/sbin/sendmail"
|
||||
|
||||
RUN \
|
||||
# Install system dependencies
|
||||
# - 'bash' for entrypoint script
|
||||
# - 'ssmtp' to be able to send mails
|
||||
# - 'util-linux' not sure if this is required
|
||||
apk upgrade --no-cache && apk add --no-cache \
|
||||
bash \
|
||||
ssmtp \
|
||||
util-linux \
|
||||
# Install global node dependencies
|
||||
&& npm install -g \
|
||||
yargs \
|
||||
pino \
|
||||
pino-colada \
|
||||
# Create directory for Directus with corresponding ownership
|
||||
# (can be omitted on newer Docker versions since WORKDIR below will do the same)
|
||||
&& mkdir /directus && chown node:node /directus
|
||||
|
||||
# Switch to user 'node' and directory '/directus'
|
||||
USER node
|
||||
WORKDIR /directus
|
||||
|
||||
# Get package.json from builder image
|
||||
COPY --from=builder --chown=node:node /directus/package.json .
|
||||
|
||||
RUN \
|
||||
# Install Directus and runtime dependencies
|
||||
# (retry if it fails for some reason, e.g. release not published yet)
|
||||
for i in $(seq 10); do npm install "directus@${VERSION}" && break || sleep 30; done && \
|
||||
npm install \
|
||||
# Create data directories
|
||||
&& mkdir -p \
|
||||
database \
|
||||
extensions/displays \
|
||||
extensions/interfaces \
|
||||
extensions/layouts \
|
||||
extensions/modules \
|
||||
uploads
|
||||
|
||||
# Expose data directories as volumes
|
||||
VOLUME \
|
||||
/directus/database \
|
||||
/directus/extensions \
|
||||
/directus/uploads
|
||||
|
||||
# Copy rootfs files
|
||||
COPY ./rootfs /
|
||||
|
||||
EXPOSE 8055
|
||||
SHELL ["/bin/bash", "-c"]
|
||||
ENTRYPOINT ["entrypoint"]
|
||||
@@ -1,64 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
function bootstrap() {
|
||||
local warn=false
|
||||
|
||||
if [ "${KEY}" == "" ] ; then
|
||||
export KEY=$(uuidgen)
|
||||
warn=true
|
||||
fi
|
||||
|
||||
if [ "${SECRET}" == "" ] ; then
|
||||
export SECRET=$(node -e 'console.log(require("nanoid").nanoid(32))')
|
||||
warn=true
|
||||
fi
|
||||
|
||||
if [ "${warn}" == "true" ] ; then
|
||||
print --level=warn --stdin <<WARN
|
||||
>
|
||||
> WARNING!
|
||||
>
|
||||
> The KEY and SECRET environment variables are not set. Some
|
||||
> temporary variables were generated to fill the gap, but in
|
||||
> production this is going to cause problems.
|
||||
>
|
||||
> Reference:
|
||||
> https://docs.directus.io/reference/environment-variables.html
|
||||
>
|
||||
>
|
||||
WARN
|
||||
fi
|
||||
|
||||
# Create folder if using sqlite and file doesn't exist
|
||||
if [ "${DB_CLIENT}" == "sqlite3" ] ; then
|
||||
if [ "${DB_FILENAME}" == "" ] ; then
|
||||
print --level=error "Missing DB_FILENAME environment variable"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f "${DB_FILENAME}" ] ; then
|
||||
mkdir -p $(dirname ${DB_FILENAME})
|
||||
fi
|
||||
fi
|
||||
|
||||
npx directus bootstrap
|
||||
}
|
||||
|
||||
command=""
|
||||
if [ $# -eq 0 ] ; then
|
||||
command="start"
|
||||
elif [ "${1}" == "bash" ] || [ "${1}" == "shell" ] ; then
|
||||
shift
|
||||
exec bash $@
|
||||
elif [ "${1}" == "command" ] ; then
|
||||
shift
|
||||
exec $@
|
||||
else
|
||||
command="${1}"
|
||||
shift
|
||||
fi
|
||||
|
||||
bootstrap
|
||||
exec npx directus "${command}" $@
|
||||
@@ -1,48 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
// Workarounds?
|
||||
process.env.NODE_PATH = "/usr/local/lib/node_modules";
|
||||
require("module").Module._initPaths();
|
||||
|
||||
/**
|
||||
* Read lines from stdin
|
||||
*/
|
||||
async function readlines() {
|
||||
const chunks = [];
|
||||
for await (const chunk of process.stdin) {
|
||||
chunks.push(chunk);
|
||||
}
|
||||
|
||||
const lines = chunks.join("").split("\n");
|
||||
lines.pop();
|
||||
return lines;
|
||||
}
|
||||
|
||||
(async function () {
|
||||
// Logger
|
||||
const yargs = require("yargs");
|
||||
const logger = require("pino")({
|
||||
prettyPrint: process.env.LOG_STYLE !== "raw",
|
||||
prettifier: require("pino-colada"),
|
||||
level: process.env.LOG_LEVEL || "info",
|
||||
});
|
||||
|
||||
function write(...message) {
|
||||
if (level in logger) {
|
||||
logger[level](...message);
|
||||
} else {
|
||||
logger.info(...message);
|
||||
}
|
||||
}
|
||||
|
||||
const args = yargs.argv;
|
||||
const level = args.level || "info";
|
||||
const stdin = args.stdin || false;
|
||||
|
||||
if (stdin) {
|
||||
const lines = await readlines();
|
||||
lines.forEach((line) => write(line));
|
||||
} else {
|
||||
write(...args._);
|
||||
}
|
||||
})();
|
||||
@@ -1,138 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
root=$(dirname ${0})
|
||||
source ${root}/lib/argsf
|
||||
|
||||
#
|
||||
# Makes a set of tags
|
||||
#
|
||||
function make_tags() {
|
||||
local prefix=""
|
||||
local version=${1}
|
||||
|
||||
semver get major ${version} > /dev/null 2>&1
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "${version}"
|
||||
else
|
||||
if [ "${version:0:1}" == "v" ]; then
|
||||
prefix="v"
|
||||
fi
|
||||
|
||||
major="$(semver get major ${version})"
|
||||
minor="${major}.$(semver get minor ${version})"
|
||||
patch="${minor}.$(semver get patch ${version})"
|
||||
|
||||
prerel="$(semver get prerel ${version})"
|
||||
if [ "${prerel}" == "" ]; then
|
||||
is_prerel=false
|
||||
else
|
||||
is_prerel=true
|
||||
fi
|
||||
|
||||
build="$(semver get build ${version})"
|
||||
if [ "${build}" == "" ]; then
|
||||
is_build=false
|
||||
else
|
||||
is_build=true
|
||||
fi
|
||||
|
||||
if [ "${is_prerel}" == "true" ]; then
|
||||
echo "${prefix}${major}-${prerel}"
|
||||
echo "${prefix}${minor}-${prerel}"
|
||||
echo "${prefix}${patch}-${prerel}"
|
||||
if [ "${is_build}" == "true" ]; then
|
||||
echo "${prefix}${major}-${prerel}-${build}"
|
||||
fi
|
||||
else
|
||||
echo "${prefix}${major}"
|
||||
echo "${prefix}${minor}"
|
||||
echo "${prefix}${patch}"
|
||||
if [ "${is_build}" == "true" ]; then
|
||||
echo "${prefix}${patch}-${build}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
#
|
||||
# Build script
|
||||
#
|
||||
function main() {
|
||||
username=$(argument username)
|
||||
password=$(argument password)
|
||||
|
||||
push=$(argument push "false")
|
||||
latest=$(argument latest "false")
|
||||
|
||||
registry=$(argument registry "")
|
||||
registry=$(echo "${registry}" | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
repository=$(argument repository "directus/directus")
|
||||
repository=$(echo "${repository}" | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
version=$(argument version "")
|
||||
context=$(argument context ".")
|
||||
|
||||
image="${repository}"
|
||||
if [ "${registry}" != "" ]; then
|
||||
image="${registry}/${image}"
|
||||
fi
|
||||
|
||||
# Normalize tag
|
||||
if [ "${version}" == "" ]; then
|
||||
version=${GITHUB_REF##*/}
|
||||
else
|
||||
version=${version##*/}
|
||||
fi
|
||||
|
||||
if [ "${version}" == "" ]; then
|
||||
version=$(echo ${GITHUB_SHA:-"000000000000"} | cut -c1-12)
|
||||
fi
|
||||
|
||||
tags=$(make_tags ${version})
|
||||
echo "Tags = ${tags}"
|
||||
|
||||
# build image
|
||||
docker build \
|
||||
-t directus:main \
|
||||
--build-arg VERSION=${version} \
|
||||
--build-arg REPOSITORY=${repository} \
|
||||
/directus/images/main
|
||||
|
||||
# login into registry
|
||||
docker login -u "${username}" -p "${password}" "${registry}"
|
||||
|
||||
# Push latest
|
||||
# TODO: check if it's really the latest
|
||||
if [ "${latest}" == "true" ]; then
|
||||
fqin="${image}:latest"
|
||||
echo "Tagging ${fqin}"
|
||||
docker tag directus:main ${fqin}
|
||||
if [ "${push}" == "true" ]; then
|
||||
echo "Pushing tag ${fqin}"
|
||||
docker push "${fqin}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Push tags
|
||||
for tag in $tags
|
||||
do
|
||||
tag=$(echo "${tag}" | tr '[:upper:]' '[:lower:]')
|
||||
fqin="${image}:${tag}"
|
||||
echo "Tagging ${fqin}"
|
||||
docker tag directus:main "${fqin}"
|
||||
if [ "${push}" == "true" ]; then
|
||||
echo "Pushing tag ${fqin}"
|
||||
docker push "${fqin}"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Finished."
|
||||
|
||||
exit $?
|
||||
}
|
||||
|
||||
main
|
||||
exit $?
|
||||
@@ -1,98 +0,0 @@
|
||||
#
|
||||
# Arguments and Flags (argsf)
|
||||
# This is meant to work with bash shell
|
||||
# To use, source this file into your bash scripts
|
||||
#
|
||||
# Implemented by João Biondo <wolfulus@gmail.com>
|
||||
# https://github.com/WoLfulus/argsf
|
||||
#
|
||||
|
||||
declare _ARGCOUNT=$#
|
||||
declare _ARGDATA=("$@")
|
||||
declare -A _ARGMAP
|
||||
declare -A _FLAGMAP
|
||||
|
||||
for ((_arg_index_key=1;_arg_index_key<=$#;_arg_index_key++))
|
||||
do
|
||||
_arg_index_value=$(expr $_arg_index_key + 1)
|
||||
_arg_key=${!_arg_index_key}
|
||||
_arg_value=${!_arg_index_value}
|
||||
if [[ $_arg_key == *"--"* ]]; then
|
||||
if [[ $_arg_key == *" "* ]]; then
|
||||
continue
|
||||
fi
|
||||
_arg_name="${_arg_key:2}"
|
||||
_FLAGMAP[${_arg_name}]=1
|
||||
if [[ $_arg_value != *"--"* ]] || [[ $_arg_value == *" "* ]] ; then
|
||||
_ARGMAP[${_arg_name}]="$_arg_value"
|
||||
else
|
||||
_ARGMAP[${_arg_name}]=""
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
function _argument() {
|
||||
if test "${_ARGMAP[${ARG_NAME}]+isset}" ; then
|
||||
echo ${_ARGMAP[${ARG_NAME}]}
|
||||
else
|
||||
if [ ${ARG_DEFAULT} -eq 0 ]; then
|
||||
echo "Error: required argument '--${ARG_NAME}' not specified" 1>&2
|
||||
exit 1
|
||||
else
|
||||
echo ${ARG_DEFAULT_VALUE}
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function argument() {
|
||||
if [ $# -eq 1 ]; then
|
||||
ARG_NAME="$1" ARG_DEFAULT=0 ARG_DEFAULT_VALUE= _argument "${_ARGUMENT_DATA}"
|
||||
elif [ $# -eq 2 ]; then
|
||||
ARG_NAME="$1" ARG_DEFAULT=1 ARG_DEFAULT_VALUE="$2" _argument "${_ARGUMENT_DATA}"
|
||||
else
|
||||
echo "argument: invalid number of arguments" 1>&2
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
function flage() {
|
||||
if [ $# -eq 1 ]; then
|
||||
if [[ ${_FLAGMAP[$1]} ]] ; then
|
||||
echo "true"
|
||||
return 0
|
||||
elif [[ ${_FLAGMAP[no-$1]} ]] ; then
|
||||
echo "false"
|
||||
return 0
|
||||
else
|
||||
echo "true"
|
||||
return 0
|
||||
fi
|
||||
else
|
||||
echo "flag: invalid number of arguments" 1>&2
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function flagd() {
|
||||
if [ $# -eq 1 ]; then
|
||||
if [[ ${_FLAGMAP[$1]} ]] ; then
|
||||
echo "true"
|
||||
return 0
|
||||
elif [[ ${_FLAGMAP[no-$1]} ]] ; then
|
||||
echo "false"
|
||||
return 0
|
||||
else
|
||||
echo "false"
|
||||
return 0
|
||||
fi
|
||||
else
|
||||
echo "flag: invalid number of arguments" 1>&2
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function flag() {
|
||||
flagd $1
|
||||
return $?
|
||||
}
|
||||
284
.github/actions/build-images/rootfs/usr/bin/semver
vendored
284
.github/actions/build-images/rootfs/usr/bin/semver
vendored
@@ -1,284 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
#
|
||||
# Copyright (c) 2014-2015 François Saint-Jacques <fsaintjacques@gmail.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it under
|
||||
# the terms of the GNU General Public License as published by the Free Software
|
||||
# Foundation; either version 3, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
# PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
NAT='0|[1-9][0-9]*'
|
||||
ALPHANUM='[0-9]*[A-Za-z-][0-9A-Za-z-]*'
|
||||
IDENT="$NAT|$ALPHANUM"
|
||||
FIELD='[0-9A-Za-z-]+'
|
||||
|
||||
SEMVER_REGEX="\
|
||||
^[vV]?\
|
||||
($NAT)\\.($NAT)\\.($NAT)\
|
||||
(\\-(${IDENT})(\\.(${IDENT}))*)?\
|
||||
(\\+${FIELD}(\\.${FIELD})*)?$"
|
||||
|
||||
PROG=semver
|
||||
PROG_VERSION="3.0.0"
|
||||
|
||||
USAGE="\
|
||||
Usage:
|
||||
$PROG bump (major|minor|patch|release|prerel <prerel>|build <build>) <version>
|
||||
$PROG compare <version> <other_version>
|
||||
$PROG get (major|minor|patch|release|prerel|build) <version>
|
||||
$PROG --help
|
||||
$PROG --version
|
||||
Arguments:
|
||||
<version> A version must match the following regular expression:
|
||||
\"${SEMVER_REGEX}\"
|
||||
In English:
|
||||
-- The version must match X.Y.Z[-PRERELEASE][+BUILD]
|
||||
where X, Y and Z are non-negative integers.
|
||||
-- PRERELEASE is a dot separated sequence of non-negative integers and/or
|
||||
identifiers composed of alphanumeric characters and hyphens (with
|
||||
at least one non-digit). Numeric identifiers must not have leading
|
||||
zeros. A hyphen (\"-\") introduces this optional part.
|
||||
-- BUILD is a dot separated sequence of identifiers composed of alphanumeric
|
||||
characters and hyphens. A plus (\"+\") introduces this optional part.
|
||||
<other_version> See <version> definition.
|
||||
<prerel> A string as defined by PRERELEASE above.
|
||||
<build> A string as defined by BUILD above.
|
||||
Options:
|
||||
-v, --version Print the version of this tool.
|
||||
-h, --help Print this help message.
|
||||
Commands:
|
||||
bump Bump by one of major, minor, patch; zeroing or removing
|
||||
subsequent parts. \"bump prerel\" sets the PRERELEASE part and
|
||||
removes any BUILD part. \"bump build\" sets the BUILD part.
|
||||
\"bump release\" removes any PRERELEASE or BUILD parts.
|
||||
The bumped version is written to stdout.
|
||||
compare Compare <version> with <other_version>, output to stdout the
|
||||
following values: -1 if <other_version> is newer, 0 if equal, 1 if
|
||||
older. The BUILD part is not used in comparisons.
|
||||
get Extract given part of <version>, where part is one of major, minor,
|
||||
patch, prerel, build, or release.
|
||||
See also:
|
||||
https://semver.org -- Semantic Versioning 2.0.0"
|
||||
|
||||
function error {
|
||||
echo -e "$1" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
function usage-help {
|
||||
error "$USAGE"
|
||||
}
|
||||
|
||||
function usage-version {
|
||||
echo -e "${PROG}: $PROG_VERSION"
|
||||
exit 0
|
||||
}
|
||||
|
||||
function validate-version {
|
||||
local version=$1
|
||||
if [[ "$version" =~ $SEMVER_REGEX ]]; then
|
||||
# if a second argument is passed, store the result in var named by $2
|
||||
if [ "$#" -eq "2" ]; then
|
||||
local major=${BASH_REMATCH[1]}
|
||||
local minor=${BASH_REMATCH[2]}
|
||||
local patch=${BASH_REMATCH[3]}
|
||||
local prere=${BASH_REMATCH[4]}
|
||||
local build=${BASH_REMATCH[8]}
|
||||
eval "$2=(\"$major\" \"$minor\" \"$patch\" \"$prere\" \"$build\")"
|
||||
else
|
||||
echo "$version"
|
||||
fi
|
||||
else
|
||||
error "version $version does not match the semver scheme 'X.Y.Z(-PRERELEASE)(+BUILD)'. See help for more information."
|
||||
fi
|
||||
}
|
||||
|
||||
function is-nat {
|
||||
[[ "$1" =~ ^($NAT)$ ]]
|
||||
}
|
||||
|
||||
function is-null {
|
||||
[ -z "$1" ]
|
||||
}
|
||||
|
||||
function order-nat {
|
||||
[ "$1" -lt "$2" ] && { echo -1 ; return ; }
|
||||
[ "$1" -gt "$2" ] && { echo 1 ; return ; }
|
||||
echo 0
|
||||
}
|
||||
|
||||
function order-string {
|
||||
[[ $1 < $2 ]] && { echo -1 ; return ; }
|
||||
[[ $1 > $2 ]] && { echo 1 ; return ; }
|
||||
echo 0
|
||||
}
|
||||
|
||||
# given two (named) arrays containing NAT and/or ALPHANUM fields, compare them
|
||||
# one by one according to semver 2.0.0 spec. Return -1, 0, 1 if left array ($1)
|
||||
# is less-than, equal, or greater-than the right array ($2). The longer array
|
||||
# is considered greater-than the shorter if the shorter is a prefix of the longer.
|
||||
#
|
||||
function compare-fields {
|
||||
local l="$1[@]"
|
||||
local r="$2[@]"
|
||||
local leftfield=( "${!l}" )
|
||||
local rightfield=( "${!r}" )
|
||||
local left
|
||||
local right
|
||||
|
||||
local i=$(( -1 ))
|
||||
local order=$(( 0 ))
|
||||
|
||||
while true
|
||||
do
|
||||
[ $order -ne 0 ] && { echo $order ; return ; }
|
||||
|
||||
: $(( i++ ))
|
||||
left="${leftfield[$i]}"
|
||||
right="${rightfield[$i]}"
|
||||
|
||||
is-null "$left" && is-null "$right" && { echo 0 ; return ; }
|
||||
is-null "$left" && { echo -1 ; return ; }
|
||||
is-null "$right" && { echo 1 ; return ; }
|
||||
|
||||
is-nat "$left" && is-nat "$right" && { order=$(order-nat "$left" "$right") ; continue ; }
|
||||
is-nat "$left" && { echo -1 ; return ; }
|
||||
is-nat "$right" && { echo 1 ; return ; }
|
||||
{ order=$(order-string "$left" "$right") ; continue ; }
|
||||
done
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2206 # checked by "validate"; ok to expand prerel id's into array
|
||||
function compare-version {
|
||||
local order
|
||||
validate-version "$1" V
|
||||
validate-version "$2" V_
|
||||
|
||||
# compare major, minor, patch
|
||||
|
||||
local left=( "${V[0]}" "${V[1]}" "${V[2]}" )
|
||||
local right=( "${V_[0]}" "${V_[1]}" "${V_[2]}" )
|
||||
|
||||
order=$(compare-fields left right)
|
||||
[ "$order" -ne 0 ] && { echo "$order" ; return ; }
|
||||
|
||||
# compare pre-release ids when M.m.p are equal
|
||||
|
||||
local prerel="${V[3]:1}"
|
||||
local prerel_="${V_[3]:1}"
|
||||
local left=( ${prerel//./ } )
|
||||
local right=( ${prerel_//./ } )
|
||||
|
||||
# if left and right have no pre-release part, then left equals right
|
||||
# if only one of left/right has pre-release part, that one is less than simple M.m.p
|
||||
|
||||
[ -z "$prerel" ] && [ -z "$prerel_" ] && { echo 0 ; return ; }
|
||||
[ -z "$prerel" ] && { echo 1 ; return ; }
|
||||
[ -z "$prerel_" ] && { echo -1 ; return ; }
|
||||
|
||||
# otherwise, compare the pre-release id's
|
||||
|
||||
compare-fields left right
|
||||
}
|
||||
|
||||
function command-bump {
|
||||
local new; local version; local sub_version; local command;
|
||||
|
||||
case $# in
|
||||
2) case $1 in
|
||||
major|minor|patch|release) command=$1; version=$2;;
|
||||
*) usage-help;;
|
||||
esac ;;
|
||||
3) case $1 in
|
||||
prerel|build) command=$1; sub_version=$2 version=$3 ;;
|
||||
*) usage-help;;
|
||||
esac ;;
|
||||
*) usage-help;;
|
||||
esac
|
||||
|
||||
validate-version "$version" parts
|
||||
# shellcheck disable=SC2154
|
||||
local major="${parts[0]}"
|
||||
local minor="${parts[1]}"
|
||||
local patch="${parts[2]}"
|
||||
local prere="${parts[3]}"
|
||||
local build="${parts[4]}"
|
||||
|
||||
case "$command" in
|
||||
major) new="$((major + 1)).0.0";;
|
||||
minor) new="${major}.$((minor + 1)).0";;
|
||||
patch) new="${major}.${minor}.$((patch + 1))";;
|
||||
release) new="${major}.${minor}.${patch}";;
|
||||
prerel) new=$(validate-version "${major}.${minor}.${patch}-${sub_version}");;
|
||||
build) new=$(validate-version "${major}.${minor}.${patch}${prere}+${sub_version}");;
|
||||
*) usage-help ;;
|
||||
esac
|
||||
|
||||
echo "$new"
|
||||
exit 0
|
||||
}
|
||||
|
||||
function command-compare {
|
||||
local v; local v_;
|
||||
|
||||
case $# in
|
||||
2) v=$(validate-version "$1"); v_=$(validate-version "$2") ;;
|
||||
*) usage-help ;;
|
||||
esac
|
||||
|
||||
set +u # need unset array element to evaluate to null
|
||||
compare-version "$v" "$v_"
|
||||
exit 0
|
||||
}
|
||||
|
||||
|
||||
# shellcheck disable=SC2034
|
||||
function command-get {
|
||||
local part version
|
||||
|
||||
if [[ "$#" -ne "2" ]] || [[ -z "$1" ]] || [[ -z "$2" ]]; then
|
||||
usage-help
|
||||
exit 0
|
||||
fi
|
||||
|
||||
part="$1"
|
||||
version="$2"
|
||||
|
||||
validate-version "$version" parts
|
||||
local major="${parts[0]}"
|
||||
local minor="${parts[1]}"
|
||||
local patch="${parts[2]}"
|
||||
local prerel="${parts[3]:1}"
|
||||
local build="${parts[4]:1}"
|
||||
local release="${major}.${minor}.${patch}"
|
||||
|
||||
case "$part" in
|
||||
major|minor|patch|release|prerel|build) echo "${!part}" ;;
|
||||
*) usage-help ;;
|
||||
esac
|
||||
|
||||
exit 0
|
||||
}
|
||||
|
||||
case $# in
|
||||
0) echo "Unknown command: $*"; usage-help;;
|
||||
esac
|
||||
|
||||
case $1 in
|
||||
--help|-h) echo -e "$USAGE"; exit 0;;
|
||||
--version|-v) usage-version ;;
|
||||
bump) shift; command-bump "$@";;
|
||||
get) shift; command-get "$@";;
|
||||
compare) shift; command-compare "$@";;
|
||||
*) echo "Unknown arguments: $*"; usage-help;;
|
||||
esac
|
||||
39
.github/workflows/build-images.yml
vendored
39
.github/workflows/build-images.yml
vendored
@@ -1,39 +0,0 @@
|
||||
name: build-images
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Sleep for 30 seconds
|
||||
uses: jakejarvis/wait-action@master
|
||||
with:
|
||||
time: '30s'
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Build GitHub Container Registry
|
||||
uses: ./.github/actions/build-images
|
||||
with:
|
||||
registry: "ghcr.io"
|
||||
repository: "${{ github.repository }}"
|
||||
username: "${{ secrets.REGISTRY_USERNAME }}"
|
||||
password: "${{ secrets.REGISTRY_PASSWORD }}"
|
||||
version: "${{ github.ref }}"
|
||||
latest: "true"
|
||||
push: "true"
|
||||
|
||||
- name: Build Docker Hub
|
||||
uses: ./.github/actions/build-images
|
||||
with:
|
||||
registry: "docker.io"
|
||||
repository: "${{ github.repository }}"
|
||||
username: "${{ secrets.DOCKERHUB_USERNAME }}"
|
||||
password: "${{ secrets.DOCKERHUB_PASSWORD }}"
|
||||
version: "${{ github.ref }}"
|
||||
latest: "true"
|
||||
push: "true"
|
||||
24
.github/workflows/create-release.yml
vendored
24
.github/workflows/create-release.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: create-release
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
uses: actions/create-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.REPOSITORY_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ github.ref }}
|
||||
release_name: ${{ github.ref }}
|
||||
body: |
|
||||
Directus ${{ github.ref }}
|
||||
draft: false
|
||||
prerelease: false
|
||||
@@ -1,4 +1,4 @@
|
||||
name: Run e2e tests
|
||||
name: E2E
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -10,12 +10,13 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
db: ['mssql', 'mysql', 'postgres', 'maria', 'sqlite3']
|
||||
node-version: ['12-alpine', '14-alpine', '16-alpine']
|
||||
# node-version: ['12-alpine', '14-alpine', '16-alpine']
|
||||
node-version: ['16-alpine']
|
||||
env:
|
||||
CACHED_IMAGE: ghcr.io/directus/directus-e2e-test-cache:${{ matrix.node-version }}
|
||||
steps:
|
||||
- name: Cancel Previous Runs
|
||||
uses: styfle/cancel-workflow-action@0.9.0
|
||||
uses: styfle/cancel-workflow-action@0.9.1
|
||||
with:
|
||||
access_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Login to GitHub Container Registry
|
||||
42
.github/workflows/e2e.yml
vendored
Normal file
42
.github/workflows/e2e.yml
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
name: E2E
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
db: ['postgres']
|
||||
node-version: ['16-alpine']
|
||||
env:
|
||||
CACHED_IMAGE: ghcr.io/directus/directus-e2e-test-cache:${{ matrix.node-version }}
|
||||
steps:
|
||||
- name: Cancel Previous Runs
|
||||
uses: styfle/cancel-workflow-action@0.9.1
|
||||
with:
|
||||
access_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
- name: restore node_modules cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: |
|
||||
node_modules
|
||||
**/node_modules
|
||||
key: ${{ runner.os }}-${{ hashFiles('**/package-lock.json') }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
npm install
|
||||
- name: Build
|
||||
run: |
|
||||
npm run build
|
||||
- name: Run tests
|
||||
env:
|
||||
TEST_NODE_VERSION: ${{ matrix.node-version }}
|
||||
TEST_DB: ${{ matrix.db }}
|
||||
run: npm run test:e2e
|
||||
2
.github/workflows/lint.yml
vendored
2
.github/workflows/lint.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Cancel Previous Runs
|
||||
uses: styfle/cancel-workflow-action@0.9.0
|
||||
uses: styfle/cancel-workflow-action@0.9.1
|
||||
with:
|
||||
access_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
|
||||
154
.github/workflows/release.yml
vendored
Normal file
154
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,154 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
env:
|
||||
GHCR_IMAGE: ghcr.io/${{ github.repository }}
|
||||
DOCKERHUB_IMAGE: ${{ github.repository }}
|
||||
|
||||
jobs:
|
||||
create-release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
uses: actions/create-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ github.ref }}
|
||||
release_name: ${{ github.ref }}
|
||||
body: |
|
||||
Directus ${{ github.ref }}
|
||||
draft: false
|
||||
prerelease: false
|
||||
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16.x'
|
||||
|
||||
# See https://github.com/npm/cli/issues/3637
|
||||
- run: npm i -g npm@7.20.2
|
||||
|
||||
- uses: c-hive/gha-npm-cache@v1
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
- run: node docker/pack
|
||||
|
||||
- name: Cache build artifacts
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: '**/dist'
|
||||
key: build-artifacts-${{ github.sha }}
|
||||
|
||||
publish-npm:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Restore build artifacts
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: '**/dist'
|
||||
key: build-artifacts-${{ github.sha }}
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16.x'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
|
||||
# See https://github.com/npm/cli/issues/3637
|
||||
- run: npm i -g npm@7.20.2
|
||||
|
||||
- run: npm ci
|
||||
|
||||
- run: npx lerna publish from-git --no-verify-access --yes
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
build-images:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Restore build artifacts
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: '**/dist'
|
||||
key: build-artifacts-${{ github.sha }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: |
|
||||
${{ env.DOCKERHUB_IMAGE }}
|
||||
${{ env.GHCR_IMAGE }}
|
||||
# Remove this once v9 is released
|
||||
flavor: |
|
||||
latest=true
|
||||
tags: |
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
if: ${{ env.DOCKERHUB_IMAGE }}
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@v1
|
||||
if: ${{ env.GHCR_IMAGE }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: './docker/Dockerfile'
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache-new
|
||||
|
||||
# Temp fix
|
||||
# https://github.com/docker/build-push-action/issues/252
|
||||
# https://github.com/moby/buildkit/issues/1896
|
||||
- name: Move cache
|
||||
run: |
|
||||
rm -rf /tmp/.buildx-cache
|
||||
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
||||
24
.github/workflows/sync-dockerhub-readme.yml
vendored
Normal file
24
.github/workflows/sync-dockerhub-readme.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
name: Sync Readme to Docker Hub
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths: # ensures this workflow only runs when the readme.md or this file changes.
|
||||
- 'readme.md'
|
||||
- '.github/workflows/sync-dockerhub-readme.yml'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
sync-dockerhub-readme:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Sync Readme to Docker Hub
|
||||
uses: peter-evans/dockerhub-description@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
readme-filepath: ./readme.md
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -17,3 +17,4 @@ dist
|
||||
app/public/img/docs/*
|
||||
*.tsbuildinfo
|
||||
.e2e-containers.json
|
||||
coverage
|
||||
|
||||
26
Dockerfile
26
Dockerfile
@@ -26,33 +26,13 @@ FROM node:${NODE_VERSION}
|
||||
#ENV TNS_ADMIN /usr/lib/instantclient
|
||||
#ENV ORACLE_HOME /usr/lib/instantclient
|
||||
|
||||
RUN npm i -g lerna
|
||||
|
||||
WORKDIR /directus
|
||||
|
||||
COPY package*.json ./
|
||||
COPY lerna.json ./
|
||||
COPY api/package.json api/
|
||||
COPY api/cli.js api/
|
||||
COPY app/package.json app/
|
||||
COPY docs/package.json docs/
|
||||
COPY packages/create-directus-project/package.json packages/create-directus-project/
|
||||
COPY packages/create-directus-project/lib/index.js packages/create-directus-project/lib/
|
||||
COPY packages/drive/package.json packages/drive/
|
||||
COPY packages/drive-azure/package.json packages/drive-azure/
|
||||
COPY packages/drive-gcs/package.json packages/drive-gcs/
|
||||
COPY packages/drive-s3/package.json packages/drive-s3/
|
||||
COPY packages/format-title/package.json packages/format-title/
|
||||
COPY packages/gatsby-source-directus/package.json packages/gatsby-source-directus/
|
||||
COPY packages/schema/package.json packages/schema/
|
||||
COPY packages/sdk/package.json packages/sdk/
|
||||
COPY packages/specs/package.json packages/specs/
|
||||
|
||||
RUN npx lerna bootstrap
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN npm install
|
||||
|
||||
WORKDIR /directus/api
|
||||
|
||||
CMD ["sh", "-c", "node ./dist/cli/index.js bootstrap; node ./dist/start.js;"]
|
||||
CMD ["sh", "-c", "node ./cli.js bootstrap; node ./dist/start.js;"]
|
||||
EXPOSE 8055/tcp
|
||||
|
||||
2
api/.gitignore
vendored
2
api/.gitignore
vendored
@@ -10,4 +10,4 @@ test
|
||||
dist
|
||||
tmp
|
||||
keys.json
|
||||
|
||||
coverage
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
#!/usr/bin/env node
|
||||
require('./dist/cli/index.js');
|
||||
require('./dist/cli/run.js');
|
||||
|
||||
@@ -9,18 +9,58 @@ LOG_STYLE="pretty"
|
||||
####################################################################################################
|
||||
# Database
|
||||
|
||||
## PostgreSQL Example
|
||||
## These match the databases defined in the docker-compose file in the root of this repo
|
||||
|
||||
## Postgres
|
||||
DB_CLIENT="pg"
|
||||
DB_HOST="localhost"
|
||||
DB_PORT=5432
|
||||
DB_PORT=5100
|
||||
DB_DATABASE="directus"
|
||||
DB_USER="postgres"
|
||||
DB_PASSWORD="psql1234"
|
||||
DB_PASSWORD="secret"
|
||||
|
||||
## MySQL 8
|
||||
# DB_CLIENT="mysql"
|
||||
# DB_HOST="localhost"
|
||||
# DB_PORT=5101
|
||||
# DB_DATABASE="directus"
|
||||
# DB_USER="root"
|
||||
# DB_PASSWORD="secret"
|
||||
|
||||
## MariaDB
|
||||
# DB_CLIENT="mysql"
|
||||
# DB_HOST="localhost"
|
||||
# DB_PORT=5102
|
||||
# DB_DATABASE="directus"
|
||||
# DB_USER="root"
|
||||
# DB_PASSWORD="secret"
|
||||
|
||||
## MS SQL
|
||||
# DB_CLIENT="mssql"
|
||||
# DB_HOST="localhost"
|
||||
# DB_PORT=5103
|
||||
# DB_DATABASE="directus"
|
||||
# DB_USER="sa"
|
||||
# DB_PASSWORD="Test@123"
|
||||
|
||||
## OracleDB
|
||||
# DB_CLIENT="oracle"
|
||||
# DB_CONNECT_STRING="localhost:5104/XE"
|
||||
# DB_USER="secretsysuser"
|
||||
# DB_PASSWORD="secretpassword"
|
||||
|
||||
## SQLite Example
|
||||
# DB_CLIENT="sqlite3"
|
||||
# DB_FILENAME="./data.db"
|
||||
|
||||
## MySQL 5.7
|
||||
# DB_CLIENT="mysql"
|
||||
# DB_HOST="localhost"
|
||||
# DB_PORT=5102
|
||||
# DB_DATABASE="directus"
|
||||
# DB_USER="root"
|
||||
# DB_PASSWORD="secret"
|
||||
|
||||
####################################################################################################
|
||||
# Rate Limiting
|
||||
|
||||
@@ -32,45 +72,25 @@ RATE_LIMITER_DURATION=1
|
||||
RATE_LIMITER_STORE=memory
|
||||
# memory | redis | memcache
|
||||
|
||||
## Redis (see https://github.com/animir/node-rate-limiter-flexible/wiki/Redis and
|
||||
## https://www.npmjs.com/package/ioredis#connect-to-redis)
|
||||
# RATE_LIMITER_EXEC_EVENLY=false
|
||||
# RATE_LIMITER_BLOCK_DURATION=0
|
||||
# RATE_LIMITER_KEY_PREFIX=rlflx
|
||||
|
||||
# RATE_LIMITER_REDIS="redis://:authpassword@127.0.0.1:6380/4"
|
||||
# --OR--
|
||||
# RATE_LIMITER_REDIS_HOST="127.0.0.1"
|
||||
# RATE_LIMITER_REDIS_PORT="127.0.0.1"
|
||||
# RATE_LIMITER_REDIS_PASSWORD="127.0.0.1"
|
||||
# RATE_LIMITER_REDIS_DB="127.0.0.1"
|
||||
|
||||
## Memcache (see https://github.com/animir/node-rate-limiter-flexible/wiki/Memcache and
|
||||
## https://www.npmjs.com/package/memcached)
|
||||
# RATE_LIMITER_MEMCACHE='localhost:11211'
|
||||
# RATE_LIMITER_REDIS="redis://@127.0.0.1:5105"
|
||||
|
||||
# RATE_LIMITER_MEMCACHE="localhost:5109"
|
||||
####################################################################################################
|
||||
# Caching
|
||||
|
||||
CACHE_ENABLED=true
|
||||
CACHE_TTL="30m"
|
||||
CACHE_NAMESPACE="directus-cache"
|
||||
CACHE_STORE=memory
|
||||
# memory | redis | memcache
|
||||
CACHE_AUTO_PURGE=true
|
||||
|
||||
# memory | redis | memcache
|
||||
CACHE_STORE=memory
|
||||
|
||||
ASSETS_CACHE_TTL="30m"
|
||||
|
||||
# CACHE_REDIS="redis://:authpassword@127.0.0.1:6380/4"
|
||||
# --OR--
|
||||
# CACHE_REDIS_HOST="127.0.0.1"
|
||||
# CACHE_REDIS_PORT="127.0.0.1"
|
||||
# CACHE_REDIS_PASSWORD="127.0.0.1"
|
||||
# CACHE_REDIS_DB="127.0.0.1"
|
||||
# CACHE_REDIS="redis://@127.0.0.1:5105"
|
||||
|
||||
## Memcache (see https://github.com/animir/node-rate-limiter-flexible/wiki/Memcache and
|
||||
## https://www.npmjs.com/package/memcached)
|
||||
# CACHE_MEMCACHE='localhost:11211'
|
||||
# CACHE_MEMCACHE="localhost:5109"
|
||||
|
||||
####################################################################################################
|
||||
# File Storage
|
||||
@@ -103,6 +123,7 @@ ACCESS_TOKEN_TTL="15m"
|
||||
REFRESH_TOKEN_TTL="7d"
|
||||
REFRESH_TOKEN_COOKIE_SECURE="false"
|
||||
REFRESH_TOKEN_COOKIE_SAME_SITE="lax"
|
||||
REFRESH_TOKEN_COOKIE_NAME="directus_refresh_token"
|
||||
|
||||
CORS_ENABLED="true"
|
||||
CORS_ORIGIN="true"
|
||||
@@ -112,6 +133,16 @@ CORS_EXPOSED_HEADERS=Content-Range
|
||||
CORS_CREDENTIALS="true"
|
||||
CORS_MAX_AGE=18000
|
||||
|
||||
####################################################################################################
|
||||
# Argon2
|
||||
|
||||
# HASH_MEMORY_COST=81920
|
||||
# HASH_HASH_LENGTH=32
|
||||
# HASH_TIME_COST=10
|
||||
# HASH_PARALLELISM=2
|
||||
# HASH_TYPE=2
|
||||
# HASH_ASSOCIATED_DATA=foo
|
||||
|
||||
####################################################################################################
|
||||
# SSO (OAuth) Providers
|
||||
|
||||
|
||||
12
api/jest.config.js
Normal file
12
api/jest.config.js
Normal file
@@ -0,0 +1,12 @@
|
||||
const base = require('../jest.config.js');
|
||||
|
||||
require('dotenv').config();
|
||||
|
||||
module.exports = {
|
||||
...base,
|
||||
roots: ['<rootDir>/src'],
|
||||
verbose: true,
|
||||
setupFiles: ['dotenv/config'],
|
||||
testURL: process.env.TEST_URL || 'http://localhost',
|
||||
collectCoverageFrom: ['src/**/*.ts'],
|
||||
};
|
||||
104
api/package.json
104
api/package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "directus",
|
||||
"version": "9.0.0-rc.83",
|
||||
"version": "9.0.0-rc.92",
|
||||
"license": "GPL-3.0-only",
|
||||
"homepage": "https://github.com/directus/directus#readme",
|
||||
"description": "Directus is a real-time API and App dashboard for managing SQL database content.",
|
||||
@@ -55,9 +55,10 @@
|
||||
"prebuild": "npm run cleanup",
|
||||
"build": "tsc --build && copyfiles \"src/**/*.*\" -e \"src/**/*.ts\" -u 1 dist",
|
||||
"cleanup": "rimraf dist",
|
||||
"dev": "cross-env DIRECTUS_DEV=true NODE_ENV=development ts-node-dev --files --transpile-only --respawn --watch \".env\" --inspect --exit-child -- src/start.ts",
|
||||
"cli": "cross-env DIRECTUS_DEV=true NODE_ENV=development ts-node --script-mode --transpile-only src/cli/index.ts",
|
||||
"prepublishOnly": "npm run build"
|
||||
"dev": "cross-env NODE_ENV=development SERVE_APP=false ts-node-dev --files --transpile-only --respawn --watch \".env\" --inspect --exit-child -- src/start.ts",
|
||||
"cli": "cross-env NODE_ENV=development SERVE_APP=false ts-node --script-mode --transpile-only src/cli/run.ts",
|
||||
"test": "jest --coverage",
|
||||
"test:watch": "jest --watchAll"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.20.0"
|
||||
@@ -69,19 +70,20 @@
|
||||
"example.env"
|
||||
],
|
||||
"dependencies": {
|
||||
"@directus/app": "9.0.0-rc.83",
|
||||
"@directus/drive": "9.0.0-rc.83",
|
||||
"@directus/drive-azure": "9.0.0-rc.83",
|
||||
"@directus/drive-gcs": "9.0.0-rc.83",
|
||||
"@directus/drive-s3": "9.0.0-rc.83",
|
||||
"@directus/format-title": "9.0.0-rc.83",
|
||||
"@directus/schema": "9.0.0-rc.83",
|
||||
"@directus/shared": "9.0.0-rc.83",
|
||||
"@directus/specs": "9.0.0-rc.83",
|
||||
"@directus/app": "9.0.0-rc.92",
|
||||
"@directus/drive": "9.0.0-rc.92",
|
||||
"@directus/drive-azure": "9.0.0-rc.92",
|
||||
"@directus/drive-gcs": "9.0.0-rc.92",
|
||||
"@directus/drive-s3": "9.0.0-rc.92",
|
||||
"@directus/extensions-sdk": "9.0.0-rc.92",
|
||||
"@directus/format-title": "9.0.0-rc.92",
|
||||
"@directus/schema": "9.0.0-rc.92",
|
||||
"@directus/shared": "9.0.0-rc.92",
|
||||
"@directus/specs": "9.0.0-rc.92",
|
||||
"@godaddy/terminus": "^4.9.0",
|
||||
"@rollup/plugin-alias": "^3.1.2",
|
||||
"@rollup/plugin-virtual": "^2.0.3",
|
||||
"argon2": "^0.28.1",
|
||||
"argon2": "^0.28.2",
|
||||
"async": "^3.2.0",
|
||||
"async-mutex": "^0.3.1",
|
||||
"atob": "^2.1.2",
|
||||
@@ -93,20 +95,20 @@
|
||||
"cookie-parser": "^1.4.5",
|
||||
"cors": "^2.8.5",
|
||||
"csv-parser": "^3.0.0",
|
||||
"date-fns": "^2.21.1",
|
||||
"date-fns": "^2.22.1",
|
||||
"deep-map": "^2.0.0",
|
||||
"destroy": "^1.0.4",
|
||||
"dotenv": "^10.0.0",
|
||||
"eventemitter2": "^6.4.3",
|
||||
"execa": "^5.1.1",
|
||||
"exif-reader": "^1.0.3",
|
||||
"exifr": "^7.1.2",
|
||||
"express": "^4.17.1",
|
||||
"express-session": "^1.17.2",
|
||||
"flat": "^5.0.2",
|
||||
"fs-extra": "^10.0.0",
|
||||
"grant": "^5.4.14",
|
||||
"graphql": "^15.5.0",
|
||||
"graphql-compose": "^9.0.1",
|
||||
"icc": "^2.0.0",
|
||||
"inquirer": "^8.1.1",
|
||||
"joi": "^17.3.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
@@ -115,7 +117,7 @@
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"keyv": "^4.0.3",
|
||||
"knex": "^0.95.6",
|
||||
"knex-schema-inspector": "^1.5.7",
|
||||
"knex-schema-inspector": "1.6.0",
|
||||
"liquidjs": "^9.25.0",
|
||||
"lodash": "^4.17.21",
|
||||
"macos-release": "^2.4.1",
|
||||
@@ -125,21 +127,25 @@
|
||||
"node-cron": "^3.0.0",
|
||||
"node-machine-id": "^1.1.12",
|
||||
"nodemailer": "^6.6.1",
|
||||
"object-hash": "^2.2.0",
|
||||
"openapi3-ts": "^2.0.0",
|
||||
"ora": "^5.4.0",
|
||||
"otplib": "^12.0.1",
|
||||
"pino": "^6.11.3",
|
||||
"pino": "6.13.2",
|
||||
"pino-colada": "^2.1.0",
|
||||
"pino-http": "^5.5.0",
|
||||
"pino-http": "5.7.0",
|
||||
"prettier": "^2.3.1",
|
||||
"qs": "^6.9.4",
|
||||
"rate-limiter-flexible": "^2.2.2",
|
||||
"resolve-cwd": "^3.0.0",
|
||||
"rollup": "^2.52.1",
|
||||
"sharp": "^0.28.3",
|
||||
"sharp": "^0.29.0",
|
||||
"stream-json": "^1.7.1",
|
||||
"supertest": "^6.1.6",
|
||||
"update-check": "^1.5.4",
|
||||
"uuid": "^8.3.2",
|
||||
"uuid-validate": "0.0.3"
|
||||
"uuid-validate": "0.0.3",
|
||||
"wellknown": "^0.5.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@keyv/redis": "^2.1.2",
|
||||
@@ -151,43 +157,49 @@
|
||||
"memcached": "^2.2.2",
|
||||
"mysql": "^2.18.1",
|
||||
"nodemailer-mailgun-transport": "^2.1.3",
|
||||
"oracledb": "^5.0.0",
|
||||
"pg": "^8.6.0",
|
||||
"sqlite3": "^5.0.2",
|
||||
"tedious": "^11.0.8"
|
||||
"tedious": "^12.0.0"
|
||||
},
|
||||
"gitHead": "24621f3934dc77eb23441331040ed13c676ceffd",
|
||||
"devDependencies": {
|
||||
"@types/async": "3.2.6",
|
||||
"@types/async": "3.2.7",
|
||||
"@types/atob": "2.1.2",
|
||||
"@types/body-parser": "1.19.0",
|
||||
"@types/busboy": "0.2.3",
|
||||
"@types/body-parser": "1.19.1",
|
||||
"@types/busboy": "0.2.4",
|
||||
"@types/cookie-parser": "1.4.2",
|
||||
"@types/cors": "2.8.10",
|
||||
"@types/cors": "2.8.12",
|
||||
"@types/destroy": "1.0.0",
|
||||
"@types/express": "4.17.12",
|
||||
"@types/express": "4.17.13",
|
||||
"@types/express-pino-logger": "4.0.2",
|
||||
"@types/express-session": "1.17.3",
|
||||
"@types/fs-extra": "9.0.11",
|
||||
"@types/inquirer": "7.3.2",
|
||||
"@types/js-yaml": "4.0.1",
|
||||
"@types/json2csv": "5.0.2",
|
||||
"@types/jsonwebtoken": "8.5.2",
|
||||
"@types/keyv": "3.1.1",
|
||||
"@types/lodash": "4.14.170",
|
||||
"@types/mime-types": "2.1.0",
|
||||
"@types/express-session": "1.17.4",
|
||||
"@types/flat": "^5.0.2",
|
||||
"@types/fs-extra": "9.0.12",
|
||||
"@types/inquirer": "8.1.1",
|
||||
"@types/jest": "27.0.1",
|
||||
"@types/js-yaml": "4.0.3",
|
||||
"@types/json2csv": "5.0.3",
|
||||
"@types/jsonwebtoken": "8.5.5",
|
||||
"@types/keyv": "3.1.3",
|
||||
"@types/lodash": "4.14.172",
|
||||
"@types/mime-types": "2.1.1",
|
||||
"@types/ms": "0.7.31",
|
||||
"@types/node": "15.12.2",
|
||||
"@types/node-cron": "2.0.3",
|
||||
"@types/nodemailer": "6.4.2",
|
||||
"@types/qs": "6.9.6",
|
||||
"@types/sharp": "0.28.3",
|
||||
"@types/stream-json": "1.7.0",
|
||||
"@types/uuid": "8.3.0",
|
||||
"@types/node-cron": "2.0.4",
|
||||
"@types/nodemailer": "6.4.4",
|
||||
"@types/object-hash": "2.2.0",
|
||||
"@types/qs": "6.9.7",
|
||||
"@types/sharp": "0.29.1",
|
||||
"@types/stream-json": "1.7.1",
|
||||
"@types/supertest": "2.0.11",
|
||||
"@types/uuid": "8.3.1",
|
||||
"@types/uuid-validate": "0.0.1",
|
||||
"@types/wellknown": "0.5.1",
|
||||
"copyfiles": "2.4.1",
|
||||
"cross-env": "7.0.3",
|
||||
"ts-node-dev": "1.1.7",
|
||||
"typescript": "4.3.4"
|
||||
"jest": "27.2.0",
|
||||
"ts-jest": "27.0.5",
|
||||
"ts-node-dev": "1.1.8",
|
||||
"typescript": "4.4.3"
|
||||
}
|
||||
}
|
||||
|
||||
6
api/src/__mocks__/cache.ts
Normal file
6
api/src/__mocks__/cache.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export const cache = {
|
||||
get: jest.fn().mockResolvedValue(undefined),
|
||||
set: jest.fn().mockResolvedValue(true),
|
||||
};
|
||||
|
||||
export const getCache = jest.fn().mockReturnValue({ cache });
|
||||
@@ -4,14 +4,6 @@ import fse from 'fs-extra';
|
||||
import path from 'path';
|
||||
import qs from 'qs';
|
||||
|
||||
import { emitAsyncSafe } from './emitter';
|
||||
import { initializeExtensions, registerExtensionEndpoints, registerExtensionHooks } from './extensions';
|
||||
import { InvalidPayloadException } from './exceptions';
|
||||
import { isInstalled, validateDBConnection } from './database';
|
||||
import { register as registerWebhooks } from './webhooks';
|
||||
import env from './env';
|
||||
import logger, { expressLogger } from './logger';
|
||||
|
||||
import activityRouter from './controllers/activity';
|
||||
import assetsRouter from './controllers/assets';
|
||||
import authRouter from './controllers/auth';
|
||||
@@ -35,11 +27,15 @@ import settingsRouter from './controllers/settings';
|
||||
import usersRouter from './controllers/users';
|
||||
import utilsRouter from './controllers/utils';
|
||||
import webhooksRouter from './controllers/webhooks';
|
||||
|
||||
import { checkIP } from './middleware/check-ip';
|
||||
import { session } from './middleware/session';
|
||||
import { isInstalled, validateDatabaseConnection, validateDatabaseExtensions, validateMigrations } from './database';
|
||||
import { emitAsyncSafe } from './emitter';
|
||||
import env from './env';
|
||||
import { InvalidPayloadException } from './exceptions';
|
||||
import { initializeExtensions, registerExtensionEndpoints, registerExtensionHooks } from './extensions';
|
||||
import logger, { expressLogger } from './logger';
|
||||
import authenticate from './middleware/authenticate';
|
||||
import cache from './middleware/cache';
|
||||
import { checkIP } from './middleware/check-ip';
|
||||
import cors from './middleware/cors';
|
||||
import errorHandler from './middleware/error-handler';
|
||||
import extractToken from './middleware/extract-token';
|
||||
@@ -49,17 +45,35 @@ import schema from './middleware/schema';
|
||||
|
||||
import { track } from './utils/track';
|
||||
import { validateEnv } from './utils/validate-env';
|
||||
import { validateStorage } from './utils/validate-storage';
|
||||
import { register as registerWebhooks } from './webhooks';
|
||||
import { session } from './middleware/session';
|
||||
import { flushCaches } from './cache';
|
||||
import { Url } from './utils/url';
|
||||
|
||||
export default async function createApp(): Promise<express.Application> {
|
||||
validateEnv(['KEY', 'SECRET']);
|
||||
|
||||
await validateDBConnection();
|
||||
if (!new Url(env.PUBLIC_URL).isAbsolute()) {
|
||||
logger.warn('PUBLIC_URL should be a full URL');
|
||||
}
|
||||
|
||||
await validateStorage();
|
||||
|
||||
await validateDatabaseConnection();
|
||||
await validateDatabaseExtensions();
|
||||
|
||||
if ((await isInstalled()) === false) {
|
||||
logger.error(`Database doesn't have Directus tables installed.`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if ((await validateMigrations()) === false) {
|
||||
logger.warn(`Database migrations have not all been run`);
|
||||
}
|
||||
|
||||
await flushCaches();
|
||||
|
||||
await initializeExtensions();
|
||||
|
||||
registerExtensionHooks();
|
||||
@@ -105,21 +119,24 @@ export default async function createApp(): Promise<express.Application> {
|
||||
app.use(cors);
|
||||
}
|
||||
|
||||
if (!('DIRECTUS_DEV' in process.env)) {
|
||||
app.get('/', (req, res, next) => {
|
||||
if (env.ROOT_REDIRECT) {
|
||||
res.redirect(env.ROOT_REDIRECT);
|
||||
} else {
|
||||
next();
|
||||
}
|
||||
});
|
||||
|
||||
if (env.SERVE_APP) {
|
||||
const adminPath = require.resolve('@directus/app/dist/index.html');
|
||||
const publicUrl = env.PUBLIC_URL.endsWith('/') ? env.PUBLIC_URL : env.PUBLIC_URL + '/';
|
||||
const adminUrl = new Url(env.PUBLIC_URL).addPath('admin');
|
||||
|
||||
// Set the App's base path according to the APIs public URL
|
||||
let html = fse.readFileSync(adminPath, 'utf-8');
|
||||
html = html.replace(/<meta charset="utf-8" \/>/, `<meta charset="utf-8" />\n\t\t<base href="${publicUrl}admin/">`);
|
||||
|
||||
app.get('/', (req, res, next) => {
|
||||
if (env.ROOT_REDIRECT) {
|
||||
res.redirect(env.ROOT_REDIRECT);
|
||||
} else {
|
||||
next();
|
||||
}
|
||||
});
|
||||
html = html.replace(
|
||||
/<meta charset="utf-8" \/>/,
|
||||
`<meta charset="utf-8" />\n\t\t<base href="${adminUrl.toString({ rootRelative: true })}/">`
|
||||
);
|
||||
|
||||
app.get('/admin', (req, res) => res.send(html));
|
||||
app.use('/admin', express.static(path.join(adminPath, '..')));
|
||||
@@ -169,12 +186,13 @@ export default async function createApp(): Promise<express.Application> {
|
||||
app.use('/relations', relationsRouter);
|
||||
app.use('/revisions', revisionsRouter);
|
||||
app.use('/roles', rolesRouter);
|
||||
app.use('/server/', serverRouter);
|
||||
app.use('/server', serverRouter);
|
||||
app.use('/settings', settingsRouter);
|
||||
app.use('/users', usersRouter);
|
||||
app.use('/utils', utilsRouter);
|
||||
app.use('/webhooks', webhooksRouter);
|
||||
app.use('/custom', customRouter);
|
||||
|
||||
app.use(customRouter);
|
||||
|
||||
// Register custom hooks / endpoints
|
||||
await emitAsyncSafe('routes.custom.init.before', { app });
|
||||
|
||||
@@ -12,17 +12,23 @@ export function getCache(): { cache: Keyv | null; schemaCache: Keyv | null } {
|
||||
if (env.CACHE_ENABLED === true && cache === null) {
|
||||
validateEnv(['CACHE_NAMESPACE', 'CACHE_TTL', 'CACHE_STORE']);
|
||||
cache = getKeyvInstance(ms(env.CACHE_TTL as string));
|
||||
cache.on('error', (err) => logger.error(err));
|
||||
cache.on('error', (err) => logger.warn(err, `[cache] ${err}`));
|
||||
}
|
||||
|
||||
if (env.CACHE_SCHEMA !== false && schemaCache === null) {
|
||||
schemaCache = getKeyvInstance(typeof env.CACHE_SCHEMA === 'string' ? ms(env.CACHE_SCHEMA) : undefined);
|
||||
schemaCache.on('error', (err) => logger.error(err));
|
||||
schemaCache.on('error', (err) => logger.warn(err, `[cache] ${err}`));
|
||||
}
|
||||
|
||||
return { cache, schemaCache };
|
||||
}
|
||||
|
||||
export async function flushCaches(): Promise<void> {
|
||||
const { schemaCache, cache } = getCache();
|
||||
await schemaCache?.clear();
|
||||
await cache?.clear();
|
||||
}
|
||||
|
||||
function getKeyvInstance(ttl: number | undefined): Keyv {
|
||||
switch (env.CACHE_STORE) {
|
||||
case 'redis':
|
||||
@@ -43,7 +49,10 @@ function getConfig(store: 'memory' | 'redis' | 'memcache' = 'memory', ttl: numbe
|
||||
|
||||
if (store === 'redis') {
|
||||
const KeyvRedis = require('@keyv/redis');
|
||||
config.store = new KeyvRedis(env.CACHE_REDIS || getConfigFromEnv('CACHE_REDIS_'));
|
||||
|
||||
config.store = new KeyvRedis(env.CACHE_REDIS || getConfigFromEnv('CACHE_REDIS_'), {
|
||||
commandTimeout: 500,
|
||||
});
|
||||
}
|
||||
|
||||
if (store === 'memcache') {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { Knex } from 'knex';
|
||||
import { nanoid } from 'nanoid';
|
||||
import runMigrations from '../../../database/migrations/run';
|
||||
import installDatabase from '../../../database/seeds/run';
|
||||
@@ -5,19 +6,16 @@ import env from '../../../env';
|
||||
import logger from '../../../logger';
|
||||
import { getSchema } from '../../../utils/get-schema';
|
||||
import { RolesService, UsersService, SettingsService } from '../../../services';
|
||||
import getDatabase, { isInstalled, hasDatabaseConnection } from '../../../database';
|
||||
import getDatabase, { isInstalled, validateDatabaseConnection, hasDatabaseConnection } from '../../../database';
|
||||
import { SchemaOverview } from '../../../types';
|
||||
|
||||
export default async function bootstrap({ skipAdminInit }: { skipAdminInit?: boolean }): Promise<void> {
|
||||
logger.info('Initializing bootstrap...');
|
||||
|
||||
if ((await isDatabaseAvailable()) === false) {
|
||||
logger.error(`Can't connect to the database`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const database = getDatabase();
|
||||
|
||||
await waitForDatabase(database);
|
||||
|
||||
if ((await isInstalled()) === false) {
|
||||
logger.info('Installing Directus system tables...');
|
||||
|
||||
@@ -48,19 +46,20 @@ export default async function bootstrap({ skipAdminInit }: { skipAdminInit?: boo
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
async function isDatabaseAvailable() {
|
||||
async function waitForDatabase(database: Knex) {
|
||||
const tries = 5;
|
||||
const secondsBetweenTries = 5;
|
||||
|
||||
for (let i = 0; i < tries; i++) {
|
||||
if (await hasDatabaseConnection()) {
|
||||
if (await hasDatabaseConnection(database)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, secondsBetweenTries * 1000));
|
||||
}
|
||||
|
||||
return false;
|
||||
// This will throw and exit the process if the database is not available
|
||||
await validateDatabaseConnection(database);
|
||||
}
|
||||
|
||||
async function createDefaultAdmin(schema: SchemaOverview) {
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
/* eslint-disable no-console */
|
||||
|
||||
import getDatabase from '../../../database';
|
||||
import logger from '../../../logger';
|
||||
|
||||
export default async function count(collection: string): Promise<void> {
|
||||
const database = getDatabase();
|
||||
|
||||
if (!collection) {
|
||||
console.error('Collection is required');
|
||||
logger.error('Collection is required');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@@ -14,11 +13,11 @@ export default async function count(collection: string): Promise<void> {
|
||||
const records = await database(collection).count('*', { as: 'count' });
|
||||
const count = Number(records[0].count);
|
||||
|
||||
console.log(count);
|
||||
process.stdout.write(`${count}\n`);
|
||||
database.destroy();
|
||||
process.exit(0);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
} catch (err: any) {
|
||||
logger.error(err);
|
||||
database.destroy();
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
/* eslint-disable no-console */
|
||||
|
||||
import runMigrations from '../../../database/migrations/run';
|
||||
import installSeeds from '../../../database/seeds/run';
|
||||
import getDatabase from '../../../database';
|
||||
import logger from '../../../logger';
|
||||
|
||||
export default async function start(): Promise<void> {
|
||||
const database = getDatabase();
|
||||
@@ -12,8 +11,8 @@ export default async function start(): Promise<void> {
|
||||
await runMigrations(database, 'latest');
|
||||
database.destroy();
|
||||
process.exit(0);
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
} catch (err: any) {
|
||||
logger.error(err);
|
||||
database.destroy();
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@@ -1,25 +1,24 @@
|
||||
/* eslint-disable no-console */
|
||||
|
||||
import run from '../../../database/migrations/run';
|
||||
import getDatabase from '../../../database';
|
||||
import logger from '../../../logger';
|
||||
|
||||
export default async function migrate(direction: 'latest' | 'up' | 'down'): Promise<void> {
|
||||
const database = getDatabase();
|
||||
|
||||
try {
|
||||
console.log('✨ Running migrations...');
|
||||
logger.info('Running migrations...');
|
||||
|
||||
await run(database, direction);
|
||||
|
||||
if (direction === 'down') {
|
||||
console.log('✨ Downgrade successful');
|
||||
logger.info('Downgrade successful');
|
||||
} else {
|
||||
console.log('✨ Database up to date');
|
||||
logger.info('Database up to date');
|
||||
}
|
||||
database.destroy();
|
||||
process.exit();
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
} catch (err: any) {
|
||||
logger.error(err);
|
||||
database.destroy();
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
/* eslint-disable no-console */
|
||||
|
||||
import argon2 from 'argon2';
|
||||
import chalk from 'chalk';
|
||||
import execa from 'execa';
|
||||
import inquirer from 'inquirer';
|
||||
@@ -13,6 +10,7 @@ import createDBConnection, { Credentials } from '../../utils/create-db-connectio
|
||||
import createEnv from '../../utils/create-env';
|
||||
import { drivers, getDriverForClient } from '../../utils/drivers';
|
||||
import { databaseQuestions } from './questions';
|
||||
import { generateHash } from '../../../utils/generate-hash';
|
||||
|
||||
export default async function init(): Promise<void> {
|
||||
const rootPath = process.cwd();
|
||||
@@ -48,20 +46,17 @@ export default async function init(): Promise<void> {
|
||||
try {
|
||||
await runSeed(db);
|
||||
await runMigrations(db, 'latest');
|
||||
} catch (err) {
|
||||
console.log();
|
||||
console.log('Something went wrong while seeding the database:');
|
||||
console.log();
|
||||
console.log(`${chalk.red(`[${err.code || 'Error'}]`)} ${err.message}`);
|
||||
console.log();
|
||||
console.log('Please try again');
|
||||
console.log();
|
||||
} catch (err: any) {
|
||||
process.stdout.write('\nSomething went wrong while seeding the database:\n');
|
||||
process.stdout.write(`\n${chalk.red(`[${err.code || 'Error'}]`)} ${err.message}\n`);
|
||||
process.stdout.write('\nPlease try again\n\n');
|
||||
|
||||
attemptsRemaining--;
|
||||
|
||||
if (attemptsRemaining > 0) {
|
||||
return await trySeed();
|
||||
} else {
|
||||
console.log(`Couldn't seed the database. Exiting.`);
|
||||
process.stdout.write("Couldn't seed the database. Exiting.\n");
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
@@ -71,10 +66,7 @@ export default async function init(): Promise<void> {
|
||||
|
||||
await createEnv(dbClient, credentials!, rootPath);
|
||||
|
||||
console.log();
|
||||
console.log();
|
||||
|
||||
console.log(`Create your first admin user:`);
|
||||
process.stdout.write('\nCreate your first admin user:\n\n');
|
||||
|
||||
const firstUser = await inquirer.prompt([
|
||||
{
|
||||
@@ -95,7 +87,7 @@ export default async function init(): Promise<void> {
|
||||
},
|
||||
]);
|
||||
|
||||
firstUser.password = await argon2.hash(firstUser.password);
|
||||
firstUser.password = await generateHash(firstUser.password);
|
||||
|
||||
const userID = uuidV4();
|
||||
const roleID = uuidV4();
|
||||
@@ -120,15 +112,11 @@ export default async function init(): Promise<void> {
|
||||
|
||||
await db.destroy();
|
||||
|
||||
console.log(`
|
||||
Your project has been created at ${chalk.green(rootPath)}.
|
||||
|
||||
The configuration can be found in ${chalk.green(rootPath + '/.env')}
|
||||
|
||||
Start Directus by running:
|
||||
${chalk.blue('cd')} ${rootPath}
|
||||
${chalk.blue('npx directus')} start
|
||||
`);
|
||||
process.stdout.write(`\nYour project has been created at ${chalk.green(rootPath)}.\n`);
|
||||
process.stdout.write(`\nThe configuration can be found in ${chalk.green(rootPath + '/.env')}\n`);
|
||||
process.stdout.write(`\nStart Directus by running:\n`);
|
||||
process.stdout.write(` ${chalk.blue('cd')} ${rootPath}\n`);
|
||||
process.stdout.write(` ${chalk.blue('npx directus')} start\n`);
|
||||
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
@@ -50,6 +50,13 @@ const password = (): Record<string, string> => ({
|
||||
mask: '*',
|
||||
});
|
||||
|
||||
const encrypt = (): Record<string, string | boolean> => ({
|
||||
type: 'confirm',
|
||||
name: 'options__encrypt',
|
||||
message: 'Encrypt Connection:',
|
||||
default: false,
|
||||
});
|
||||
|
||||
const ssl = (): Record<string, string | boolean> => ({
|
||||
type: 'confirm',
|
||||
name: 'ssl',
|
||||
@@ -62,5 +69,5 @@ export const databaseQuestions = {
|
||||
mysql: [host, port, database, user, password],
|
||||
pg: [host, port, database, user, password, ssl],
|
||||
oracledb: [host, port, database, user, password],
|
||||
mssql: [host, port, database, user, password],
|
||||
mssql: [host, port, database, user, password, encrypt],
|
||||
};
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
/* eslint-disable no-console */
|
||||
|
||||
import { getSchema } from '../../../utils/get-schema';
|
||||
import { RolesService } from '../../../services';
|
||||
import getDatabase from '../../../database';
|
||||
import logger from '../../../logger';
|
||||
|
||||
export default async function rolesCreate({ role: name, admin }: { role: string; admin: boolean }): Promise<void> {
|
||||
const database = getDatabase();
|
||||
|
||||
if (!name) {
|
||||
console.error('Name is required');
|
||||
logger.error('Name is required');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@@ -17,11 +16,11 @@ export default async function rolesCreate({ role: name, admin }: { role: string;
|
||||
const service = new RolesService({ schema: schema, knex: database });
|
||||
|
||||
const id = await service.createOne({ name, admin_access: admin });
|
||||
console.log(id);
|
||||
process.stdout.write(`${String(id)}\n`);
|
||||
database.destroy();
|
||||
process.exit(0);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
} catch (err: any) {
|
||||
logger.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
/* eslint-disable no-console */
|
||||
|
||||
import { getSchema } from '../../../utils/get-schema';
|
||||
import { UsersService } from '../../../services';
|
||||
import getDatabase from '../../../database';
|
||||
import logger from '../../../logger';
|
||||
|
||||
export default async function usersCreate({
|
||||
email,
|
||||
@@ -16,7 +15,7 @@ export default async function usersCreate({
|
||||
const database = getDatabase();
|
||||
|
||||
if (!email || !password || !role) {
|
||||
console.error('Email, password, role are required');
|
||||
logger.error('Email, password, role are required');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@@ -25,11 +24,11 @@ export default async function usersCreate({
|
||||
const service = new UsersService({ schema, knex: database });
|
||||
|
||||
const id = await service.createOne({ email, password, role, status: 'active' });
|
||||
console.log(id);
|
||||
process.stdout.write(`${String(id)}\n`);
|
||||
database.destroy();
|
||||
process.exit(0);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
} catch (err: any) {
|
||||
logger.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,35 +1,34 @@
|
||||
/* eslint-disable no-console */
|
||||
|
||||
import argon2 from 'argon2';
|
||||
import { getSchema } from '../../../utils/get-schema';
|
||||
import { generateHash } from '../../../utils/generate-hash';
|
||||
import { UsersService } from '../../../services';
|
||||
import getDatabase from '../../../database';
|
||||
import logger from '../../../logger';
|
||||
|
||||
export default async function usersPasswd({ email, password }: { email?: string; password?: string }): Promise<void> {
|
||||
const database = getDatabase();
|
||||
|
||||
if (!email || !password) {
|
||||
console.error('Email and password are required');
|
||||
logger.error('Email and password are required');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
const passwordHashed = await argon2.hash(password);
|
||||
const passwordHashed = await generateHash(password);
|
||||
const schema = await getSchema();
|
||||
const service = new UsersService({ schema, knex: database });
|
||||
|
||||
const user = await service.knex.select('id').from('directus_users').where({ email }).first();
|
||||
if (user) {
|
||||
await service.knex('directus_users').update({ password: passwordHashed }).where({ id: user.id });
|
||||
console.log(`Password is updated for user ${user.id}`);
|
||||
logger.info(`Password is updated for user ${user.id}`);
|
||||
} else {
|
||||
console.log('No such user by this email');
|
||||
logger.error('No such user by this email');
|
||||
}
|
||||
|
||||
await database.destroy();
|
||||
process.exit(user ? 0 : 1);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
} catch (err: any) {
|
||||
logger.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
62
api/src/cli/index.test.ts
Normal file
62
api/src/cli/index.test.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import { Command } from 'commander';
|
||||
import { Extension } from '@directus/shared/types';
|
||||
import { createCli } from '.';
|
||||
|
||||
jest.mock('../env', () => ({
|
||||
...jest.requireActual('../env').default,
|
||||
LOG_LEVEL: 'silent',
|
||||
EXTENSIONS_PATH: '',
|
||||
SERVE_APP: false,
|
||||
}));
|
||||
|
||||
jest.mock('@directus/shared/utils/node/get-extensions', () => ({
|
||||
getPackageExtensions: jest.fn(() => Promise.resolve([])),
|
||||
getLocalExtensions: jest.fn(() => Promise.resolve([customCliExtension])),
|
||||
}));
|
||||
|
||||
jest.mock(`/hooks/custom-cli/index.js`, () => () => customCliHook, { virtual: true });
|
||||
|
||||
const customCliExtension: Extension = {
|
||||
path: `/hooks/custom-cli`,
|
||||
name: 'custom-cli',
|
||||
type: 'hook',
|
||||
entrypoint: 'index.js',
|
||||
local: true,
|
||||
root: true,
|
||||
};
|
||||
|
||||
const beforeHook = jest.fn();
|
||||
const afterAction = jest.fn();
|
||||
const afterHook = jest.fn(({ program }: { program: Command }) => program.command('custom').action(afterAction));
|
||||
const customCliHook = { 'cli.init.before': beforeHook, 'cli.init.after': afterHook };
|
||||
|
||||
const writeOut = jest.fn();
|
||||
const writeErr = jest.fn();
|
||||
|
||||
const setup = async () => {
|
||||
const program = await createCli();
|
||||
program.exitOverride();
|
||||
program.configureOutput({ writeOut, writeErr });
|
||||
return program;
|
||||
};
|
||||
|
||||
beforeEach(jest.clearAllMocks);
|
||||
|
||||
describe('cli hooks', () => {
|
||||
test('should call hooks before and after creating the cli', async () => {
|
||||
const program = await setup();
|
||||
|
||||
expect(beforeHook).toHaveBeenCalledTimes(1);
|
||||
expect(beforeHook).toHaveBeenCalledWith({ program });
|
||||
|
||||
expect(afterHook).toHaveBeenCalledTimes(1);
|
||||
expect(afterHook).toHaveBeenCalledWith({ program });
|
||||
});
|
||||
|
||||
test('should be able to add a custom cli command', async () => {
|
||||
const program = await setup();
|
||||
program.parseAsync(['custom'], { from: 'user' });
|
||||
|
||||
expect(afterAction).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
@@ -1,9 +1,7 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/* eslint-disable no-console */
|
||||
|
||||
import { program } from 'commander';
|
||||
import { Command } from 'commander';
|
||||
import start from '../start';
|
||||
import { emitAsyncSafe } from '../emitter';
|
||||
import { initializeExtensions, registerExtensionHooks } from '../extensions';
|
||||
import bootstrap from './commands/bootstrap';
|
||||
import count from './commands/count';
|
||||
import dbInstall from './commands/database/install';
|
||||
@@ -15,61 +13,69 @@ import usersPasswd from './commands/users/passwd';
|
||||
|
||||
const pkg = require('../../package.json');
|
||||
|
||||
program.name('directus').usage('[command] [options]');
|
||||
program.version(pkg.version, '-v, --version');
|
||||
export async function createCli(): Promise<Command> {
|
||||
const program = new Command();
|
||||
|
||||
program.command('start').description('Start the Directus API').action(start);
|
||||
program.command('init').description('Create a new Directus Project').action(init);
|
||||
await initializeExtensions();
|
||||
registerExtensionHooks();
|
||||
|
||||
const dbCommand = program.command('database');
|
||||
dbCommand.command('install').description('Install the database').action(dbInstall);
|
||||
dbCommand
|
||||
.command('migrate:latest')
|
||||
.description('Upgrade the database')
|
||||
.action(() => dbMigrate('latest'));
|
||||
dbCommand
|
||||
.command('migrate:up')
|
||||
.description('Upgrade the database')
|
||||
.action(() => dbMigrate('up'));
|
||||
dbCommand
|
||||
.command('migrate:down')
|
||||
.description('Downgrade the database')
|
||||
.action(() => dbMigrate('down'));
|
||||
await emitAsyncSafe('cli.init.before', { program });
|
||||
|
||||
const usersCommand = program.command('users');
|
||||
program.name('directus').usage('[command] [options]');
|
||||
program.version(pkg.version, '-v, --version');
|
||||
|
||||
usersCommand
|
||||
.command('create')
|
||||
.description('Create a new user')
|
||||
.option('--email <value>', `user's email`)
|
||||
.option('--password <value>', `user's password`)
|
||||
.option('--role <value>', `user's role`)
|
||||
.action(usersCreate);
|
||||
program.command('start').description('Start the Directus API').action(start);
|
||||
program.command('init').description('Create a new Directus Project').action(init);
|
||||
|
||||
usersCommand
|
||||
.command('passwd')
|
||||
.description('Set user password')
|
||||
.option('--email <value>', `user's email`)
|
||||
.option('--password <value>', `user's new password`)
|
||||
.action(usersPasswd);
|
||||
const dbCommand = program.command('database');
|
||||
dbCommand.command('install').description('Install the database').action(dbInstall);
|
||||
dbCommand
|
||||
.command('migrate:latest')
|
||||
.description('Upgrade the database')
|
||||
.action(() => dbMigrate('latest'));
|
||||
dbCommand
|
||||
.command('migrate:up')
|
||||
.description('Upgrade the database')
|
||||
.action(() => dbMigrate('up'));
|
||||
dbCommand
|
||||
.command('migrate:down')
|
||||
.description('Downgrade the database')
|
||||
.action(() => dbMigrate('down'));
|
||||
|
||||
const rolesCommand = program.command('roles');
|
||||
rolesCommand
|
||||
.command('create')
|
||||
.description('Create a new role')
|
||||
.option('--role <value>', `name for the role`)
|
||||
.option('--admin', `whether or not the role has admin access`)
|
||||
.action(rolesCreate);
|
||||
const usersCommand = program.command('users');
|
||||
|
||||
program.command('count <collection>').description('Count the amount of items in a given collection').action(count);
|
||||
usersCommand
|
||||
.command('create')
|
||||
.description('Create a new user')
|
||||
.option('--email <value>', `user's email`)
|
||||
.option('--password <value>', `user's password`)
|
||||
.option('--role <value>', `user's role`)
|
||||
.action(usersCreate);
|
||||
|
||||
program
|
||||
.command('bootstrap')
|
||||
.description('Initialize or update the database')
|
||||
.option('--skipAdminInit', 'Skips the creation of the default Admin Role and User')
|
||||
.action(bootstrap);
|
||||
usersCommand
|
||||
.command('passwd')
|
||||
.description('Set user password')
|
||||
.option('--email <value>', `user's email`)
|
||||
.option('--password <value>', `user's new password`)
|
||||
.action(usersPasswd);
|
||||
|
||||
program.parseAsync(process.argv).catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
const rolesCommand = program.command('roles');
|
||||
rolesCommand
|
||||
.command('create')
|
||||
.description('Create a new role')
|
||||
.option('--role <value>', `name for the role`)
|
||||
.option('--admin', `whether or not the role has admin access`)
|
||||
.action(rolesCreate);
|
||||
|
||||
program.command('count <collection>').description('Count the amount of items in a given collection').action(count);
|
||||
|
||||
program
|
||||
.command('bootstrap')
|
||||
.description('Initialize or update the database')
|
||||
.option('--skipAdminInit', 'Skips the creation of the default Admin Role and User')
|
||||
.action(bootstrap);
|
||||
|
||||
await emitAsyncSafe('cli.init.after', { program });
|
||||
|
||||
return program;
|
||||
}
|
||||
|
||||
9
api/src/cli/run.ts
Normal file
9
api/src/cli/run.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { createCli } from './index';
|
||||
|
||||
createCli()
|
||||
.then((program) => program.parseAsync(process.argv))
|
||||
.catch((err) => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -9,6 +9,7 @@ export type Credentials = {
|
||||
user?: string;
|
||||
password?: string;
|
||||
ssl?: boolean;
|
||||
options__encrypt?: boolean;
|
||||
};
|
||||
export default function createDBConnection(
|
||||
client: 'sqlite3' | 'mysql' | 'pg' | 'oracledb' | 'mssql',
|
||||
@@ -23,26 +24,26 @@ export default function createDBConnection(
|
||||
filename: filename as string,
|
||||
};
|
||||
} else {
|
||||
if (client !== 'pg') {
|
||||
const { host, port, database, user, password } = credentials as Credentials;
|
||||
const { host, port, database, user, password } = credentials as Credentials;
|
||||
|
||||
connection = {
|
||||
host: host,
|
||||
port: Number(port),
|
||||
database: database,
|
||||
user: user,
|
||||
password: password,
|
||||
};
|
||||
} else {
|
||||
const { host, port, database, user, password, ssl } = credentials as Credentials;
|
||||
connection = {
|
||||
host: host,
|
||||
port: Number(port),
|
||||
database: database,
|
||||
user: user,
|
||||
password: password,
|
||||
};
|
||||
|
||||
connection = {
|
||||
host: host,
|
||||
port: Number(port),
|
||||
database: database,
|
||||
user: user,
|
||||
password: password,
|
||||
ssl: ssl,
|
||||
if (client === 'pg') {
|
||||
const { ssl } = credentials as Credentials;
|
||||
connection['ssl'] = ssl;
|
||||
}
|
||||
|
||||
if (client === 'mssql') {
|
||||
const { options__encrypt } = credentials as Credentials;
|
||||
|
||||
(connection as Knex.MsSqlConnectionConfig)['options'] = {
|
||||
encrypt: options__encrypt,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -38,6 +38,7 @@ ACCESS_TOKEN_TTL="15m"
|
||||
REFRESH_TOKEN_TTL="7d"
|
||||
REFRESH_TOKEN_COOKIE_SECURE=false
|
||||
REFRESH_TOKEN_COOKIE_SAME_SITE="lax"
|
||||
REFRESH_TOKEN_COOKIE_NAME="directus_refresh_token"
|
||||
|
||||
####################################################################################################
|
||||
## SSO (OAuth) Providers
|
||||
|
||||
@@ -1,47 +1,45 @@
|
||||
import { Transformation } from './types';
|
||||
import { TransformationParams } from './types';
|
||||
|
||||
export const SYSTEM_ASSET_ALLOW_LIST: Transformation[] = [
|
||||
export const SYSTEM_ASSET_ALLOW_LIST: TransformationParams[] = [
|
||||
{
|
||||
key: 'system-small-cover',
|
||||
width: 64,
|
||||
height: 64,
|
||||
fit: 'cover',
|
||||
transforms: [['resize', { width: 64, height: 64, fit: 'cover' }]],
|
||||
},
|
||||
{
|
||||
key: 'system-small-contain',
|
||||
width: 64,
|
||||
fit: 'contain',
|
||||
transforms: [['resize', { width: 64, fit: 'contain' }]],
|
||||
},
|
||||
{
|
||||
key: 'system-medium-cover',
|
||||
width: 300,
|
||||
height: 300,
|
||||
fit: 'cover',
|
||||
transforms: [['resize', { width: 300, height: 300, fit: 'cover' }]],
|
||||
},
|
||||
{
|
||||
key: 'system-medium-contain',
|
||||
width: 300,
|
||||
fit: 'contain',
|
||||
transforms: [['resize', { width: 300, fit: 'contain' }]],
|
||||
},
|
||||
{
|
||||
key: 'system-large-cover',
|
||||
width: 800,
|
||||
height: 600,
|
||||
fit: 'cover',
|
||||
transforms: [['resize', { width: 800, height: 800, fit: 'cover' }]],
|
||||
},
|
||||
{
|
||||
key: 'system-large-contain',
|
||||
width: 800,
|
||||
fit: 'contain',
|
||||
transforms: [['resize', { width: 800, fit: 'contain' }]],
|
||||
},
|
||||
];
|
||||
|
||||
export const ASSET_TRANSFORM_QUERY_KEYS = ['key', 'width', 'height', 'fit', 'withoutEnlargement', 'quality'];
|
||||
export const ASSET_TRANSFORM_QUERY_KEYS = [
|
||||
'key',
|
||||
'transforms',
|
||||
'width',
|
||||
'height',
|
||||
'format',
|
||||
'fit',
|
||||
'quality',
|
||||
'withoutEnlargement',
|
||||
];
|
||||
|
||||
export const FILTER_VARIABLES = ['$NOW', '$CURRENT_USER', '$CURRENT_ROLE'];
|
||||
|
||||
export const ALIAS_TYPES = ['alias', 'o2m', 'm2m', 'm2a', 'files', 'files', 'translations'];
|
||||
|
||||
export const COLUMN_TRANSFORMS = ['year', 'month', 'day', 'weekday', 'hour', 'minute', 'second'];
|
||||
|
||||
export const REGEX_BETWEEN_PARENS = /\(([^)]+)\)/;
|
||||
|
||||
@@ -99,7 +99,7 @@ router.post(
|
||||
res.locals.payload = {
|
||||
data: record || null,
|
||||
};
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -138,7 +138,7 @@ router.patch(
|
||||
res.locals.payload = {
|
||||
data: record || null,
|
||||
};
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ import { ForbiddenException, InvalidQueryException, RangeNotSatisfiableException
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { AssetsService, PayloadService } from '../services';
|
||||
import storage from '../storage';
|
||||
import { Transformation } from '../types/assets';
|
||||
import { TransformationParams, TransformationMethods, TransformationPreset } from '../types/assets';
|
||||
import asyncHandler from '../utils/async-handler';
|
||||
|
||||
const router = Router();
|
||||
@@ -68,26 +68,63 @@ router.get(
|
||||
if ('key' in transformation && Object.keys(transformation).length > 1) {
|
||||
throw new InvalidQueryException(`You can't combine the "key" query parameter with any other transformation.`);
|
||||
}
|
||||
if ('quality' in transformation && (Number(transformation.quality) < 1 || Number(transformation.quality) > 100)) {
|
||||
throw new InvalidQueryException(`"quality" Parameter has to between 1 to 100`);
|
||||
|
||||
if ('transforms' in transformation) {
|
||||
let transforms: unknown;
|
||||
|
||||
// Try parse the JSON array
|
||||
try {
|
||||
transforms = JSON.parse(transformation['transforms'] as string);
|
||||
} catch {
|
||||
throw new InvalidQueryException(`"transforms" Parameter needs to be a JSON array of allowed transformations.`);
|
||||
}
|
||||
|
||||
// Check if it is actually an array.
|
||||
if (!Array.isArray(transforms)) {
|
||||
throw new InvalidQueryException(`"transforms" Parameter needs to be a JSON array of allowed transformations.`);
|
||||
}
|
||||
|
||||
// Check against ASSETS_TRANSFORM_MAX_OPERATIONS
|
||||
if (transforms.length > Number(env.ASSETS_TRANSFORM_MAX_OPERATIONS)) {
|
||||
throw new InvalidQueryException(
|
||||
`"transforms" Parameter is only allowed ${env.ASSETS_TRANSFORM_MAX_OPERATIONS} transformations.`
|
||||
);
|
||||
}
|
||||
|
||||
// Check the transformations are valid
|
||||
transforms.forEach((transform) => {
|
||||
const name = transform[0];
|
||||
|
||||
if (!TransformationMethods.includes(name)) {
|
||||
throw new InvalidQueryException(`"transforms" Parameter does not allow "${name}" as a transformation.`);
|
||||
}
|
||||
});
|
||||
|
||||
transformation.transforms = transforms;
|
||||
}
|
||||
|
||||
const systemKeys = SYSTEM_ASSET_ALLOW_LIST.map((transformation) => transformation.key);
|
||||
const systemKeys = SYSTEM_ASSET_ALLOW_LIST.map((transformation) => transformation.key!);
|
||||
const allKeys: string[] = [
|
||||
...systemKeys,
|
||||
...(assetSettings.storage_asset_presets || []).map((transformation: Transformation) => transformation.key),
|
||||
...(assetSettings.storage_asset_presets || []).map((transformation: TransformationParams) => transformation.key),
|
||||
];
|
||||
|
||||
// For use in the next request handler
|
||||
res.locals.shortcuts = [...SYSTEM_ASSET_ALLOW_LIST, ...(assetSettings.storage_asset_presets || [])];
|
||||
res.locals.transformation = transformation;
|
||||
|
||||
if (Object.keys(transformation).length === 0) {
|
||||
if (
|
||||
Object.keys(transformation).length === 0 ||
|
||||
('transforms' in transformation && transformation.transforms!.length === 0)
|
||||
) {
|
||||
return next();
|
||||
}
|
||||
|
||||
if (assetSettings.storage_asset_transform === 'all') {
|
||||
if (transformation.key && allKeys.includes(transformation.key as string) === false)
|
||||
if (transformation.key && allKeys.includes(transformation.key as string) === false) {
|
||||
throw new InvalidQueryException(`Key "${transformation.key}" isn't configured.`);
|
||||
}
|
||||
|
||||
return next();
|
||||
} else if (assetSettings.storage_asset_transform === 'presets') {
|
||||
if (allKeys.includes(transformation.key as string)) return next();
|
||||
@@ -107,9 +144,9 @@ router.get(
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const transformation: Transformation = res.locals.transformation.key
|
||||
? res.locals.shortcuts.find(
|
||||
(transformation: Transformation) => transformation.key === res.locals.transformation.key
|
||||
const transformation: TransformationParams | TransformationPreset = res.locals.transformation.key
|
||||
? (res.locals.shortcuts as TransformationPreset[]).find(
|
||||
(transformation) => transformation.key === res.locals.transformation.key
|
||||
)
|
||||
: res.locals.transformation;
|
||||
|
||||
|
||||
@@ -11,7 +11,8 @@ import { respond } from '../middleware/respond';
|
||||
import { AuthenticationService, UsersService } from '../services';
|
||||
import asyncHandler from '../utils/async-handler';
|
||||
import getEmailFromProfile from '../utils/get-email-from-profile';
|
||||
import { toArray } from '../utils/to-array';
|
||||
import { toArray } from '@directus/shared/utils';
|
||||
import logger from '../logger';
|
||||
|
||||
const router = Router();
|
||||
|
||||
@@ -59,7 +60,7 @@ router.post(
|
||||
}
|
||||
|
||||
if (mode === 'cookie') {
|
||||
res.cookie('directus_refresh_token', refreshToken, {
|
||||
res.cookie(env.REFRESH_TOKEN_COOKIE_NAME, refreshToken, {
|
||||
httpOnly: true,
|
||||
domain: env.REFRESH_TOKEN_COOKIE_DOMAIN,
|
||||
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
|
||||
@@ -88,7 +89,7 @@ router.post(
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const currentRefreshToken = req.body.refresh_token || req.cookies.directus_refresh_token;
|
||||
const currentRefreshToken = req.body.refresh_token || req.cookies[env.REFRESH_TOKEN_COOKIE_NAME];
|
||||
|
||||
if (!currentRefreshToken) {
|
||||
throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`);
|
||||
@@ -107,7 +108,7 @@ router.post(
|
||||
}
|
||||
|
||||
if (mode === 'cookie') {
|
||||
res.cookie('directus_refresh_token', refreshToken, {
|
||||
res.cookie(env.REFRESH_TOKEN_COOKIE_NAME, refreshToken, {
|
||||
httpOnly: true,
|
||||
domain: env.REFRESH_TOKEN_COOKIE_DOMAIN,
|
||||
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
|
||||
@@ -136,7 +137,7 @@ router.post(
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const currentRefreshToken = req.body.refresh_token || req.cookies.directus_refresh_token;
|
||||
const currentRefreshToken = req.body.refresh_token || req.cookies[env.REFRESH_TOKEN_COOKIE_NAME];
|
||||
|
||||
if (!currentRefreshToken) {
|
||||
throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`);
|
||||
@@ -144,8 +145,8 @@ router.post(
|
||||
|
||||
await authenticationService.logout(currentRefreshToken);
|
||||
|
||||
if (req.cookies.directus_refresh_token) {
|
||||
res.clearCookie('directus_refresh_token', {
|
||||
if (req.cookies[env.REFRESH_TOKEN_COOKIE_NAME]) {
|
||||
res.clearCookie(env.REFRESH_TOKEN_COOKIE_NAME, {
|
||||
httpOnly: true,
|
||||
domain: env.REFRESH_TOKEN_COOKIE_DOMAIN,
|
||||
secure: env.REFRESH_TOKEN_COOKIE_SECURE ?? false,
|
||||
@@ -161,7 +162,7 @@ router.post(
|
||||
router.post(
|
||||
'/password/request',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (!req.body.email) {
|
||||
if (typeof req.body.email !== 'string') {
|
||||
throw new InvalidPayloadException(`"email" field is required.`);
|
||||
}
|
||||
|
||||
@@ -176,10 +177,11 @@ router.post(
|
||||
try {
|
||||
await service.requestPasswordReset(req.body.email, req.body.reset_url || null);
|
||||
return next();
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
if (err instanceof InvalidPayloadException) {
|
||||
throw err;
|
||||
} else {
|
||||
logger.warn(err, `[email] ${err}`);
|
||||
return next();
|
||||
}
|
||||
}
|
||||
@@ -190,11 +192,11 @@ router.post(
|
||||
router.post(
|
||||
'/password/reset',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (!req.body.token) {
|
||||
if (typeof req.body.token !== 'string') {
|
||||
throw new InvalidPayloadException(`"token" field is required.`);
|
||||
}
|
||||
|
||||
if (!req.body.password) {
|
||||
if (typeof req.body.password !== 'string') {
|
||||
throw new InvalidPayloadException(`"password" field is required.`);
|
||||
}
|
||||
|
||||
@@ -318,8 +320,11 @@ router.get(
|
||||
authResponse = await authenticationService.authenticate({
|
||||
email,
|
||||
});
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
emitStatus('fail');
|
||||
|
||||
logger.warn(error);
|
||||
|
||||
if (redirect) {
|
||||
let reason = 'UNKNOWN_EXCEPTION';
|
||||
|
||||
@@ -340,7 +345,7 @@ router.get(
|
||||
emitStatus('success');
|
||||
|
||||
if (redirect) {
|
||||
res.cookie('directus_refresh_token', refreshToken, {
|
||||
res.cookie(env.REFRESH_TOKEN_COOKIE_NAME, refreshToken, {
|
||||
httpOnly: true,
|
||||
domain: env.REFRESH_TOKEN_COOKIE_DOMAIN,
|
||||
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
|
||||
|
||||
@@ -88,7 +88,7 @@ router.patch(
|
||||
try {
|
||||
const collection = await collectionsService.readOne(req.params.collection);
|
||||
res.locals.payload = { data: collection || null };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
@@ -3,18 +3,17 @@ import asyncHandler from '../utils/async-handler';
|
||||
import { RouteNotFoundException } from '../exceptions';
|
||||
import { listExtensions, getAppExtensionSource } from '../extensions';
|
||||
import { respond } from '../middleware/respond';
|
||||
import { depluralize } from '@directus/shared/utils';
|
||||
import { AppExtensionType, Plural } from '@directus/shared/types';
|
||||
import { APP_EXTENSION_TYPES } from '@directus/shared/constants';
|
||||
import { depluralize, isAppExtension } from '@directus/shared/utils';
|
||||
import { Plural } from '@directus/shared/types';
|
||||
|
||||
const router = Router();
|
||||
|
||||
router.get(
|
||||
'/:type',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const type = depluralize(req.params.type as Plural<AppExtensionType>);
|
||||
const type = depluralize(req.params.type as Plural<string>);
|
||||
|
||||
if (APP_EXTENSION_TYPES.includes(type) === false) {
|
||||
if (!isAppExtension(type)) {
|
||||
throw new RouteNotFoundException(req.path);
|
||||
}
|
||||
|
||||
@@ -32,9 +31,9 @@ router.get(
|
||||
router.get(
|
||||
'/:type/index.js',
|
||||
asyncHandler(async (req, res) => {
|
||||
const type = depluralize(req.params.type as Plural<AppExtensionType>);
|
||||
const type = depluralize(req.params.type as Plural<string>);
|
||||
|
||||
if (APP_EXTENSION_TYPES.includes(type) === false) {
|
||||
if (!isAppExtension(type)) {
|
||||
throw new RouteNotFoundException(req.path);
|
||||
}
|
||||
|
||||
|
||||
@@ -6,7 +6,8 @@ import validateCollection from '../middleware/collection-exists';
|
||||
import { respond } from '../middleware/respond';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { FieldsService } from '../services/fields';
|
||||
import { Field, types } from '../types';
|
||||
import { Field, Type } from '@directus/shared/types';
|
||||
import { TYPES } from '@directus/shared/constants';
|
||||
import asyncHandler from '../utils/async-handler';
|
||||
|
||||
const router = Router();
|
||||
@@ -65,7 +66,7 @@ const newFieldSchema = Joi.object({
|
||||
collection: Joi.string().optional(),
|
||||
field: Joi.string().required(),
|
||||
type: Joi.string()
|
||||
.valid(...types, ...ALIAS_TYPES)
|
||||
.valid(...TYPES, ...ALIAS_TYPES)
|
||||
.allow(null)
|
||||
.optional(),
|
||||
schema: Joi.object({
|
||||
@@ -93,14 +94,14 @@ router.post(
|
||||
throw new InvalidPayloadException(error.message);
|
||||
}
|
||||
|
||||
const field: Partial<Field> & { field: string; type: typeof types[number] | null } = req.body;
|
||||
const field: Partial<Field> & { field: string; type: Type | null } = req.body;
|
||||
|
||||
await service.createField(req.params.collection, field);
|
||||
|
||||
try {
|
||||
const createdField = await service.readOne(req.params.collection, field.field);
|
||||
res.locals.payload = { data: createdField || null };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -137,7 +138,7 @@ router.patch(
|
||||
results.push(updatedField);
|
||||
res.locals.payload = { data: results || null };
|
||||
}
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -152,7 +153,7 @@ router.patch(
|
||||
|
||||
const updateSchema = Joi.object({
|
||||
type: Joi.string()
|
||||
.valid(...types, ...ALIAS_TYPES)
|
||||
.valid(...TYPES, ...ALIAS_TYPES)
|
||||
.allow(null),
|
||||
schema: Joi.object({
|
||||
default_value: Joi.any(),
|
||||
@@ -183,7 +184,7 @@ router.patch(
|
||||
throw new InvalidPayloadException(`You need to provide "type" when providing "schema".`);
|
||||
}
|
||||
|
||||
const fieldData: Partial<Field> & { field: string; type: typeof types[number] } = req.body;
|
||||
const fieldData: Partial<Field> & { field: string; type: Type } = req.body;
|
||||
|
||||
if (!fieldData.field) fieldData.field = req.params.field;
|
||||
|
||||
@@ -192,7 +193,7 @@ router.patch(
|
||||
try {
|
||||
const updatedField = await service.readOne(req.params.collection, req.params.field);
|
||||
res.locals.payload = { data: updatedField || null };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ import { validateBatch } from '../middleware/validate-batch';
|
||||
import { FilesService, MetaService } from '../services';
|
||||
import { File, PrimaryKey } from '../types';
|
||||
import asyncHandler from '../utils/async-handler';
|
||||
import { toArray } from '../utils/to-array';
|
||||
import { toArray } from '@directus/shared/utils';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -33,7 +33,7 @@ const multipartHandler = asyncHandler(async (req, res, next) => {
|
||||
*/
|
||||
|
||||
let disk: string = toArray(env.STORAGE_LOCATIONS)[0];
|
||||
const payload: Partial<File> = {};
|
||||
let payload: Partial<File> = {};
|
||||
let fileCount = 0;
|
||||
|
||||
busboy.on('field', (fieldname: keyof File, val) => {
|
||||
@@ -55,10 +55,6 @@ const multipartHandler = asyncHandler(async (req, res, next) => {
|
||||
payload.title = formatTitle(path.parse(filename).name);
|
||||
}
|
||||
|
||||
if (req.accountability?.user) {
|
||||
payload.uploaded_by = req.accountability.user;
|
||||
}
|
||||
|
||||
const payloadWithRequiredFields: Partial<File> & {
|
||||
filename_download: string;
|
||||
type: string;
|
||||
@@ -70,11 +66,14 @@ const multipartHandler = asyncHandler(async (req, res, next) => {
|
||||
storage: payload.storage || disk,
|
||||
};
|
||||
|
||||
// Clear the payload for the next to-be-uploaded file
|
||||
payload = {};
|
||||
|
||||
try {
|
||||
const primaryKey = await service.uploadOne(fileStream, payloadWithRequiredFields, existingPrimaryKey);
|
||||
savedFiles.push(primaryKey);
|
||||
tryDone();
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
busboy.emit('error', error);
|
||||
}
|
||||
});
|
||||
@@ -128,7 +127,7 @@ router.post(
|
||||
data: record,
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -165,7 +164,7 @@ router.post(
|
||||
try {
|
||||
const record = await service.readOne(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -243,7 +242,7 @@ router.patch(
|
||||
try {
|
||||
const result = await service.readMany(keys, req.sanitizedQuery);
|
||||
res.locals.payload = { data: result || null };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -270,7 +269,7 @@ router.patch(
|
||||
try {
|
||||
const record = await service.readOne(req.params.pk, req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ router.post(
|
||||
const record = await service.readOne(savedKeys[0], req.sanitizedQuery);
|
||||
res.locals.payload = { data: record };
|
||||
}
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -114,7 +114,7 @@ router.patch(
|
||||
try {
|
||||
const result = await service.readMany(keys, req.sanitizedQuery);
|
||||
res.locals.payload = { data: result || null };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -140,7 +140,7 @@ router.patch(
|
||||
try {
|
||||
const record = await service.readOne(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
@@ -42,7 +42,7 @@ router.post(
|
||||
const result = await service.readOne(savedKeys[0], req.sanitizedQuery);
|
||||
res.locals.payload = { data: result || null };
|
||||
}
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -97,10 +97,6 @@ router.get(
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (req.params.collection.startsWith('directus_')) throw new ForbiddenException();
|
||||
|
||||
if (req.singleton) {
|
||||
throw new RouteNotFoundException(req.path);
|
||||
}
|
||||
|
||||
const service = new ItemsService(req.collection, {
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
@@ -111,6 +107,7 @@ router.get(
|
||||
res.locals.payload = {
|
||||
data: result || null,
|
||||
};
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
@@ -147,7 +144,7 @@ router.patch(
|
||||
try {
|
||||
const result = await service.readMany(keys, req.sanitizedQuery);
|
||||
res.locals.payload = { data: result };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -180,7 +177,7 @@ router.patch(
|
||||
try {
|
||||
const result = await service.readOne(updatedPrimaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: result || null };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ const notFound: RequestHandler = async (req, res, next) => {
|
||||
return next();
|
||||
}
|
||||
next(new RouteNotFoundException(req.path));
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
next(err);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -37,7 +37,7 @@ router.post(
|
||||
const item = await service.readOne(savedKeys[0], req.sanitizedQuery);
|
||||
res.locals.payload = { data: item };
|
||||
}
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -116,7 +116,7 @@ router.patch(
|
||||
try {
|
||||
const result = await service.readMany(keys, req.sanitizedQuery);
|
||||
res.locals.payload = { data: result };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -142,7 +142,7 @@ router.patch(
|
||||
try {
|
||||
const item = await service.readOne(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: item || null };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ router.post(
|
||||
const record = await service.readOne(savedKeys[0], req.sanitizedQuery);
|
||||
res.locals.payload = { data: record };
|
||||
}
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -115,7 +115,7 @@ router.patch(
|
||||
try {
|
||||
const result = await service.readMany(keys, req.sanitizedQuery);
|
||||
res.locals.payload = { data: result };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -141,7 +141,7 @@ router.patch(
|
||||
try {
|
||||
const record = await service.readOne(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: record };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
@@ -90,7 +90,7 @@ router.post(
|
||||
try {
|
||||
const createdRelation = await service.readOne(req.body.collection, req.body.field);
|
||||
res.locals.payload = { data: createdRelation || null };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -135,7 +135,7 @@ router.patch(
|
||||
try {
|
||||
const updatedField = await service.readOne(req.params.collection, req.params.field);
|
||||
res.locals.payload = { data: updatedField || null };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ router.post(
|
||||
const item = await service.readOne(savedKeys[0], req.sanitizedQuery);
|
||||
res.locals.payload = { data: item };
|
||||
}
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -106,7 +106,7 @@ router.patch(
|
||||
try {
|
||||
const result = await service.readMany(keys, req.sanitizedQuery);
|
||||
res.locals.payload = { data: result };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -132,7 +132,7 @@ router.patch(
|
||||
try {
|
||||
const item = await service.readOne(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: item || null };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ router.patch(
|
||||
try {
|
||||
const record = await service.readSingleton(req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
@@ -38,7 +38,7 @@ router.post(
|
||||
const item = await service.readOne(savedKeys[0], req.sanitizedQuery);
|
||||
res.locals.payload = { data: item };
|
||||
}
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -86,7 +86,7 @@ router.get(
|
||||
try {
|
||||
const item = await service.readOne(req.accountability.user, req.sanitizedQuery);
|
||||
res.locals.payload = { data: item || null };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
res.locals.payload = { data: { id: req.accountability.user } };
|
||||
return next();
|
||||
@@ -177,7 +177,7 @@ router.patch(
|
||||
try {
|
||||
const result = await service.readMany(keys, req.sanitizedQuery);
|
||||
res.locals.payload = { data: result };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -203,7 +203,7 @@ router.patch(
|
||||
try {
|
||||
const item = await service.readOne(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: item || null };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
@@ -2,12 +2,14 @@ import argon2 from 'argon2';
|
||||
import { Router } from 'express';
|
||||
import Joi from 'joi';
|
||||
import { nanoid } from 'nanoid';
|
||||
import { InvalidPayloadException, InvalidQueryException } from '../exceptions';
|
||||
import { ForbiddenException, InvalidPayloadException, InvalidQueryException } from '../exceptions';
|
||||
import collectionExists from '../middleware/collection-exists';
|
||||
import { respond } from '../middleware/respond';
|
||||
import { RevisionsService, UtilsService, ImportService } from '../services';
|
||||
import asyncHandler from '../utils/async-handler';
|
||||
import Busboy from 'busboy';
|
||||
import { flushCaches } from '../cache';
|
||||
import { generateHash } from '../utils/generate-hash';
|
||||
|
||||
const router = Router();
|
||||
|
||||
@@ -30,7 +32,7 @@ router.post(
|
||||
throw new InvalidPayloadException(`"string" is required`);
|
||||
}
|
||||
|
||||
const hash = await argon2.hash(req.body.string);
|
||||
const hash = await generateHash(req.body.string);
|
||||
|
||||
return res.json({ data: hash });
|
||||
})
|
||||
@@ -102,7 +104,7 @@ router.post(
|
||||
busboy.on('file', async (fieldname, fileStream, filename, encoding, mimetype) => {
|
||||
try {
|
||||
await service.import(req.params.collection, mimetype, fileStream);
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
@@ -115,4 +117,17 @@ router.post(
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/cache/clear',
|
||||
asyncHandler(async (req, res) => {
|
||||
if (req.accountability?.admin !== true) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
await flushCaches();
|
||||
|
||||
res.status(200).end();
|
||||
})
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -37,7 +37,7 @@ router.post(
|
||||
const item = await service.readOne(savedKeys[0], req.sanitizedQuery);
|
||||
res.locals.payload = { data: item };
|
||||
}
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -106,7 +106,7 @@ router.patch(
|
||||
try {
|
||||
const result = await service.readMany(keys, req.sanitizedQuery);
|
||||
res.locals.payload = { data: result };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
@@ -132,7 +132,7 @@ router.patch(
|
||||
try {
|
||||
const item = await service.readOne(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: item || null };
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
164
api/src/database/helpers/geometry.ts
Normal file
164
api/src/database/helpers/geometry.ts
Normal file
@@ -0,0 +1,164 @@
|
||||
import { Field, RawField } from '@directus/shared/types';
|
||||
import { Knex } from 'knex';
|
||||
import { stringify as geojsonToWKT, GeoJSONGeometry } from 'wellknown';
|
||||
import getDatabase from '..';
|
||||
|
||||
let geometryHelper: KnexSpatial | undefined;
|
||||
|
||||
export function getGeometryHelper(): KnexSpatial {
|
||||
if (!geometryHelper) {
|
||||
const db = getDatabase();
|
||||
const client = db.client.config.client as string;
|
||||
const constructor = {
|
||||
mysql: KnexSpatial_MySQL,
|
||||
mariadb: KnexSpatial_MySQL,
|
||||
sqlite3: KnexSpatial,
|
||||
pg: KnexSpatial_PG,
|
||||
postgres: KnexSpatial_PG,
|
||||
redshift: KnexSpatial_Redshift,
|
||||
mssql: KnexSpatial_MSSQL,
|
||||
oracledb: KnexSpatial_Oracle,
|
||||
}[client];
|
||||
if (!constructor) {
|
||||
throw new Error(`Geometry helper not implemented on ${client}.`);
|
||||
}
|
||||
geometryHelper = new constructor(db);
|
||||
}
|
||||
return geometryHelper;
|
||||
}
|
||||
|
||||
class KnexSpatial {
|
||||
constructor(protected knex: Knex) {}
|
||||
isTrue(expression: Knex.Raw) {
|
||||
return expression;
|
||||
}
|
||||
isFalse(expression: Knex.Raw) {
|
||||
return expression.wrap('NOT ', '');
|
||||
}
|
||||
createColumn(table: Knex.CreateTableBuilder, field: RawField | Field) {
|
||||
const type = field.schema?.geometry_type ?? 'geometry';
|
||||
return table.specificType(field.field, type);
|
||||
}
|
||||
asText(table: string, column: string): Knex.Raw {
|
||||
return this.knex.raw('st_astext(??.??) as ??', [table, column, column]);
|
||||
}
|
||||
fromText(text: string): Knex.Raw {
|
||||
return this.knex.raw('st_geomfromtext(?, 4326)', text);
|
||||
}
|
||||
fromGeoJSON(geojson: GeoJSONGeometry): Knex.Raw {
|
||||
return this.fromText(geojsonToWKT(geojson));
|
||||
}
|
||||
_intersects(key: string, geojson: GeoJSONGeometry): Knex.Raw {
|
||||
const geometry = this.fromGeoJSON(geojson);
|
||||
return this.knex.raw('st_intersects(??, ?)', [key, geometry]);
|
||||
}
|
||||
intersects(key: string, geojson: GeoJSONGeometry): Knex.Raw {
|
||||
return this.isTrue(this._intersects(key, geojson));
|
||||
}
|
||||
nintersects(key: string, geojson: GeoJSONGeometry): Knex.Raw {
|
||||
return this.isFalse(this._intersects(key, geojson));
|
||||
}
|
||||
_intersects_bbox(key: string, geojson: GeoJSONGeometry): Knex.Raw {
|
||||
const geometry = this.fromGeoJSON(geojson);
|
||||
return this.knex.raw('intersects(??, ?)', [key, geometry]);
|
||||
}
|
||||
intersects_bbox(key: string, geojson: GeoJSONGeometry): Knex.Raw {
|
||||
return this.isTrue(this._intersects_bbox(key, geojson));
|
||||
}
|
||||
nintersects_bbox(key: string, geojson: GeoJSONGeometry): Knex.Raw {
|
||||
return this.isFalse(this._intersects_bbox(key, geojson));
|
||||
}
|
||||
collect(table: string, column: string): Knex.Raw {
|
||||
return this.knex.raw('st_astext(st_collect(??.??))', [table, column]);
|
||||
}
|
||||
}
|
||||
|
||||
class KnexSpatial_PG extends KnexSpatial {
|
||||
createColumn(table: Knex.CreateTableBuilder, field: RawField | Field) {
|
||||
const type = field.schema?.geometry_type ?? 'geometry';
|
||||
return table.specificType(field.field, `geometry(${type})`);
|
||||
}
|
||||
_intersects_bbox(key: string, geojson: GeoJSONGeometry): Knex.Raw {
|
||||
const geometry = this.fromGeoJSON(geojson);
|
||||
return this.knex.raw('?? && ?', [key, geometry]);
|
||||
}
|
||||
}
|
||||
|
||||
class KnexSpatial_MySQL extends KnexSpatial {
|
||||
collect(table: string, column: string): Knex.Raw {
|
||||
return this.knex.raw(
|
||||
`concat('geometrycollection(', group_concat(? separator ', '), ')'`,
|
||||
this.asText(table, column)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class KnexSpatial_Redshift extends KnexSpatial {
|
||||
createColumn(table: Knex.CreateTableBuilder, field: RawField | Field) {
|
||||
const type = field.schema?.geometry_type ?? 'geometry';
|
||||
if (type !== 'geometry') field.meta!.special![1] = type;
|
||||
return table.specificType(field.field, 'geometry');
|
||||
}
|
||||
}
|
||||
|
||||
class KnexSpatial_MSSQL extends KnexSpatial {
|
||||
isTrue(expression: Knex.Raw) {
|
||||
return expression.wrap(``, ` = 1`);
|
||||
}
|
||||
isFalse(expression: Knex.Raw) {
|
||||
return expression.wrap(``, ` = 0`);
|
||||
}
|
||||
createColumn(table: Knex.CreateTableBuilder, field: RawField | Field) {
|
||||
const type = field.schema?.geometry_type ?? 'geometry';
|
||||
if (type !== 'geometry') field.meta!.special![1] = type;
|
||||
return table.specificType(field.field, 'geometry');
|
||||
}
|
||||
asText(table: string, column: string): Knex.Raw {
|
||||
return this.knex.raw('??.??.STAsText() as ??', [table, column, column]);
|
||||
}
|
||||
fromText(text: string): Knex.Raw {
|
||||
return this.knex.raw('geometry::STGeomFromText(?, 4326)', text);
|
||||
}
|
||||
_intersects(key: string, geojson: GeoJSONGeometry): Knex.Raw {
|
||||
const geometry = this.fromGeoJSON(geojson);
|
||||
return this.knex.raw('??.STIntersects(?)', [key, geometry]);
|
||||
}
|
||||
_intersects_bbox(key: string, geojson: GeoJSONGeometry): Knex.Raw {
|
||||
const geometry = this.fromGeoJSON(geojson);
|
||||
return this.knex.raw('??.STEnvelope().STIntersects(?.STEnvelope())', [key, geometry]);
|
||||
}
|
||||
collect(table: string, column: string): Knex.Raw {
|
||||
return this.knex.raw('geometry::CollectionAggregate(??.??).STAsText()', [table, column]);
|
||||
}
|
||||
}
|
||||
|
||||
class KnexSpatial_Oracle extends KnexSpatial {
|
||||
isTrue(expression: Knex.Raw) {
|
||||
return expression.wrap(``, ` = 'TRUE'`);
|
||||
}
|
||||
isFalse(expression: Knex.Raw) {
|
||||
return expression.wrap(``, ` = 'FALSE'`);
|
||||
}
|
||||
createColumn(table: Knex.CreateTableBuilder, field: RawField | Field) {
|
||||
const type = field.schema?.geometry_type ?? 'geometry';
|
||||
if (type !== 'geometry') field.meta!.special![1] = type;
|
||||
return table.specificType(field.field, 'sdo_geometry');
|
||||
}
|
||||
asText(table: string, column: string): Knex.Raw {
|
||||
return this.knex.raw('sdo_util.from_wktgeometry(??.??) as ??', [table, column, column]);
|
||||
}
|
||||
fromText(text: string): Knex.Raw {
|
||||
return this.knex.raw('sdo_geometry(?, 4326)', text);
|
||||
}
|
||||
_intersects(key: string, geojson: GeoJSONGeometry): Knex.Raw {
|
||||
const geometry = this.fromGeoJSON(geojson);
|
||||
return this.knex.raw(`sdo_overlapbdyintersect(??, ?)`, [key, geometry]);
|
||||
}
|
||||
_intersects_bbox(key: string, geojson: GeoJSONGeometry): Knex.Raw {
|
||||
const geometry = this.fromGeoJSON(geojson);
|
||||
return this.knex.raw(`sdo_overlapbdyintersect(sdo_geom.sdo_mbr(??), sdo_geom.sdo_mbr(?))`, [key, geometry]);
|
||||
}
|
||||
collect(table: string, column: string): Knex.Raw {
|
||||
return this.knex.raw(`concat('geometrycollection(', listagg(?, ', '), ')'`, this.asText(table, column));
|
||||
}
|
||||
}
|
||||
@@ -5,6 +5,10 @@ import env from '../env';
|
||||
import logger from '../logger';
|
||||
import { getConfigFromEnv } from '../utils/get-config-from-env';
|
||||
import { validateEnv } from '../utils/validate-env';
|
||||
import fse from 'fs-extra';
|
||||
import path from 'path';
|
||||
import { merge } from 'lodash';
|
||||
import { promisify } from 'util';
|
||||
|
||||
let database: Knex | null = null;
|
||||
let inspector: ReturnType<typeof SchemaInspector> | null = null;
|
||||
@@ -19,6 +23,7 @@ export default function getDatabase(): Knex {
|
||||
'DB_SEARCH_PATH',
|
||||
'DB_CONNECTION_STRING',
|
||||
'DB_POOL',
|
||||
'DB_EXCLUDE_TABLES',
|
||||
]);
|
||||
|
||||
const poolConfig = getConfigFromEnv('DB_POOL');
|
||||
@@ -50,7 +55,15 @@ export default function getDatabase(): Knex {
|
||||
searchPath: env.DB_SEARCH_PATH,
|
||||
connection: env.DB_CONNECTION_STRING || connectionConfig,
|
||||
log: {
|
||||
warn: (msg) => logger.warn(msg),
|
||||
warn: (msg) => {
|
||||
// Ignore warnings about returning not being supported in some DBs
|
||||
if (msg.startsWith('.returning()')) return;
|
||||
|
||||
// Ignore warning about MySQL not supporting TRX for DDL
|
||||
if (msg.startsWith('Transaction was implicitly committed, do not mix transactions and DDL with MySQL')) return;
|
||||
|
||||
return logger.warn(msg);
|
||||
},
|
||||
error: (msg) => logger.error(msg),
|
||||
deprecate: (msg) => logger.info(msg),
|
||||
debug: (msg) => logger.debug(msg),
|
||||
@@ -60,11 +73,24 @@ export default function getDatabase(): Knex {
|
||||
|
||||
if (env.DB_CLIENT === 'sqlite3') {
|
||||
knexConfig.useNullAsDefault = true;
|
||||
poolConfig.afterCreate = (conn: any, cb: any) => {
|
||||
conn.run('PRAGMA foreign_keys = ON', cb);
|
||||
|
||||
poolConfig.afterCreate = async (conn: any, callback: any) => {
|
||||
logger.trace('Enabling SQLite Foreign Keys support...');
|
||||
|
||||
const run = promisify(conn.run.bind(conn));
|
||||
await run('PRAGMA foreign_keys = ON');
|
||||
|
||||
callback(null, conn);
|
||||
};
|
||||
}
|
||||
|
||||
if (env.DB_CLIENT === 'mssql') {
|
||||
// This brings MS SQL in line with the other DB vendors. We shouldn't do any automatic
|
||||
// timezone conversion on the database level, especially not when other database vendors don't
|
||||
// act the same
|
||||
merge(knexConfig, { connection: { options: { useUTC: false } } });
|
||||
}
|
||||
|
||||
database = knex(knexConfig);
|
||||
|
||||
const times: Record<string, number> = {};
|
||||
@@ -94,36 +120,131 @@ export function getSchemaInspector(): ReturnType<typeof SchemaInspector> {
|
||||
return inspector;
|
||||
}
|
||||
|
||||
export async function hasDatabaseConnection(): Promise<boolean> {
|
||||
const database = getDatabase();
|
||||
export async function hasDatabaseConnection(database?: Knex): Promise<boolean> {
|
||||
database = database ?? getDatabase();
|
||||
|
||||
try {
|
||||
if (env.DB_CLIENT === 'oracledb') {
|
||||
if (getDatabaseClient(database) === 'oracle') {
|
||||
await database.raw('select 1 from DUAL');
|
||||
} else {
|
||||
await database.raw('SELECT 1');
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function validateDBConnection(): Promise<void> {
|
||||
export async function validateDatabaseConnection(database?: Knex): Promise<void> {
|
||||
database = database ?? getDatabase();
|
||||
|
||||
try {
|
||||
await hasDatabaseConnection();
|
||||
} catch (error) {
|
||||
if (getDatabaseClient(database) === 'oracle') {
|
||||
await database.raw('select 1 from DUAL');
|
||||
} else {
|
||||
await database.raw('SELECT 1');
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error(`Can't connect to the database.`);
|
||||
logger.error(error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
export function getDatabaseClient(database?: Knex): 'mysql' | 'postgres' | 'sqlite' | 'oracle' | 'mssql' {
|
||||
database = database ?? getDatabase();
|
||||
|
||||
switch (database.client.constructor.name) {
|
||||
case 'Client_MySQL':
|
||||
return 'mysql';
|
||||
case 'Client_PG':
|
||||
return 'postgres';
|
||||
case 'Client_SQLite3':
|
||||
return 'sqlite';
|
||||
case 'Client_Oracledb':
|
||||
case 'Client_Oracle':
|
||||
return 'oracle';
|
||||
case 'Client_MSSQL':
|
||||
return 'mssql';
|
||||
}
|
||||
|
||||
throw new Error(`Couldn't extract database client`);
|
||||
}
|
||||
|
||||
export async function isInstalled(): Promise<boolean> {
|
||||
const inspector = getSchemaInspector();
|
||||
|
||||
// The existence of a directus_collections table alone isn't a "proper" check to see if everything
|
||||
// is installed correctly of course, but it's safe enough to assume that this collection only
|
||||
// exists when using the installer CLI.
|
||||
// exists when Directus is properly installed.
|
||||
return await inspector.hasTable('directus_collections');
|
||||
}
|
||||
|
||||
export async function validateMigrations(): Promise<boolean> {
|
||||
const database = getDatabase();
|
||||
|
||||
try {
|
||||
let migrationFiles = await fse.readdir(path.join(__dirname, 'migrations'));
|
||||
|
||||
const customMigrationsPath = path.resolve(env.EXTENSIONS_PATH, 'migrations');
|
||||
|
||||
let customMigrationFiles =
|
||||
((await fse.pathExists(customMigrationsPath)) && (await fse.readdir(customMigrationsPath))) || [];
|
||||
|
||||
migrationFiles = migrationFiles.filter(
|
||||
(file: string) => file.startsWith('run') === false && file.endsWith('.d.ts') === false
|
||||
);
|
||||
|
||||
customMigrationFiles = customMigrationFiles.filter((file: string) => file.endsWith('.js'));
|
||||
|
||||
migrationFiles.push(...customMigrationFiles);
|
||||
|
||||
const requiredVersions = migrationFiles.map((filePath) => filePath.split('-')[0]);
|
||||
const completedVersions = (await database.select('version').from('directus_migrations')).map(
|
||||
({ version }) => version
|
||||
);
|
||||
|
||||
return requiredVersions.every((version) => completedVersions.includes(version));
|
||||
} catch (error: any) {
|
||||
logger.error(`Database migrations cannot be found`);
|
||||
logger.error(error);
|
||||
throw process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* These database extensions should be optional, so we don't throw or return any problem states when they don't
|
||||
*/
|
||||
export async function validateDatabaseExtensions(): Promise<void> {
|
||||
const database = getDatabase();
|
||||
const databaseClient = getDatabaseClient(database);
|
||||
|
||||
if (databaseClient === 'postgres') {
|
||||
let available = false;
|
||||
let installed = false;
|
||||
|
||||
const exists = await database.raw(`SELECT name FROM pg_available_extensions WHERE name = 'postgis';`);
|
||||
|
||||
if (exists.rows.length > 0) {
|
||||
available = true;
|
||||
}
|
||||
|
||||
if (available) {
|
||||
try {
|
||||
await database.raw(`SELECT PostGIS_version();`);
|
||||
installed = true;
|
||||
} catch {
|
||||
installed = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (available === false) {
|
||||
logger.warn(`PostGIS isn't installed. Geometry type support will be limited.`);
|
||||
} else if (available === true && installed === false) {
|
||||
logger.warn(
|
||||
`PostGIS is installed, but hasn't been activated on this database. Geometry type support will be limited.`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { Knex } from 'knex';
|
||||
// @ts-ignore
|
||||
import Client_Oracledb from 'knex/lib/dialects/oracledb';
|
||||
import env from '../../env';
|
||||
|
||||
async function oracleAlterUrl(knex: Knex, type: string): Promise<void> {
|
||||
await knex.raw('ALTER TABLE "directus_webhooks" ADD "url__temp" ?', [knex.raw(type)]);
|
||||
@@ -23,7 +22,7 @@ export async function up(knex: Knex): Promise<void> {
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (env.DB_CLIENT === 'oracledb') {
|
||||
if (knex.client instanceof Client_Oracledb) {
|
||||
await oracleAlterUrl(knex, 'VARCHAR2(255)');
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { Knex } from 'knex';
|
||||
// @ts-ignore
|
||||
import Client_Oracledb from 'knex/lib/dialects/oracledb';
|
||||
import env from '../../env';
|
||||
|
||||
async function oracleAlterCollections(knex: Knex, type: string): Promise<void> {
|
||||
await knex.raw('ALTER TABLE "directus_webhooks" ADD "collections__temp" ?', [knex.raw(type)]);
|
||||
@@ -23,7 +22,7 @@ export async function up(knex: Knex): Promise<void> {
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (env.DB_CLIENT === 'oracledb') {
|
||||
if (knex.client instanceof Client_Oracledb) {
|
||||
await oracleAlterCollections(knex, 'VARCHAR2(255)');
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -68,7 +68,7 @@ export async function up(knex: Knex): Promise<void> {
|
||||
await knex(constraint.many_collection)
|
||||
.update({ [constraint.many_field]: null })
|
||||
.whereIn(currentPrimaryKeyField, ids);
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
logger.error(
|
||||
`${constraint.many_collection}.${constraint.many_field} contains illegal foreign keys which couldn't be set to NULL. Please fix these references and rerun this migration to complete the upgrade.`
|
||||
);
|
||||
@@ -111,7 +111,7 @@ export async function up(knex: Knex): Promise<void> {
|
||||
builder.onDelete('SET NULL');
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
logger.warn(
|
||||
`Couldn't add foreign key constraint for ${constraint.many_collection}.${constraint.many_field}<->${constraint.one_collection}`
|
||||
);
|
||||
@@ -140,7 +140,7 @@ export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(relation.many_collection, (table) => {
|
||||
table.dropForeign([relation.many_field]);
|
||||
});
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
logger.warn(
|
||||
`Couldn't drop foreign key constraint for ${relation.many_collection}.${relation.many_field}<->${relation.one_collection}`
|
||||
);
|
||||
|
||||
@@ -99,7 +99,7 @@ export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(update.table, (table) => {
|
||||
table.dropForeign([constraint.column], existingForeignKey?.constraint_name || undefined);
|
||||
});
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
logger.warn(`Couldn't drop foreign key ${update.table}.${constraint.column}->${constraint.references}`);
|
||||
logger.warn(err);
|
||||
}
|
||||
@@ -114,7 +114,7 @@ export async function up(knex: Knex): Promise<void> {
|
||||
// Knex uses a default convention for index names: `table_column_type`
|
||||
table.dropIndex([constraint.column], `${update.table}_${constraint.column}_foreign`);
|
||||
});
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
logger.warn(
|
||||
`Couldn't clean up index for foreign key ${update.table}.${constraint.column}->${constraint.references}`
|
||||
);
|
||||
@@ -126,7 +126,7 @@ export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(update.table, (table) => {
|
||||
table.foreign(constraint.column).references(constraint.references).onDelete(constraint.on_delete);
|
||||
});
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
logger.warn(`Couldn't add foreign key to ${update.table}.${constraint.column}->${constraint.references}`);
|
||||
logger.warn(err);
|
||||
}
|
||||
@@ -141,7 +141,7 @@ export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(update.table, (table) => {
|
||||
table.dropForeign([constraint.column]);
|
||||
});
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
logger.warn(`Couldn't drop foreign key ${update.table}.${constraint.column}->${constraint.references}`);
|
||||
logger.warn(err);
|
||||
}
|
||||
@@ -156,7 +156,7 @@ export async function down(knex: Knex): Promise<void> {
|
||||
// Knex uses a default convention for index names: `table_column_type`
|
||||
table.dropIndex([constraint.column], `${update.table}_${constraint.column}_foreign`);
|
||||
});
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
logger.warn(
|
||||
`Couldn't clean up index for foreign key ${update.table}.${constraint.column}->${constraint.references}`
|
||||
);
|
||||
@@ -168,7 +168,7 @@ export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable(update.table, (table) => {
|
||||
table.foreign(constraint.column).references(constraint.references);
|
||||
});
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
logger.warn(`Couldn't add foreign key to ${update.table}.${constraint.column}->${constraint.references}`);
|
||||
logger.warn(err);
|
||||
}
|
||||
|
||||
@@ -1,12 +1,22 @@
|
||||
import { Knex } from 'knex';
|
||||
// @ts-ignore
|
||||
import Client_Oracledb from 'knex/lib/dialects/oracledb';
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (knex.client instanceof Client_Oracledb) {
|
||||
return;
|
||||
}
|
||||
|
||||
await knex.schema.alterTable('directus_files', (table) => {
|
||||
table.bigInteger('filesize').nullable().defaultTo(null).alter();
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (knex.client instanceof Client_Oracledb) {
|
||||
return;
|
||||
}
|
||||
|
||||
await knex.schema.alterTable('directus_files', (table) => {
|
||||
table.integer('filesize').nullable().defaultTo(null).alter();
|
||||
});
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
import { Knex } from 'knex';
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable('directus_fields', (table) => {
|
||||
table.json('conditions');
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable('directus_files', (table) => {
|
||||
table.dropColumn('conditions');
|
||||
});
|
||||
}
|
||||
22
api/src/database/migrations/20210721A-add-default-folder.ts
Normal file
22
api/src/database/migrations/20210721A-add-default-folder.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { Knex } from 'knex';
|
||||
import { getDefaultIndexName } from '../../utils/get-default-index-name';
|
||||
|
||||
const indexName = getDefaultIndexName('foreign', 'directus_settings', 'storage_default_folder');
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable('directus_settings', (table) => {
|
||||
table
|
||||
.uuid('storage_default_folder')
|
||||
.references('id')
|
||||
.inTable('directus_folders')
|
||||
.withKeyName(indexName)
|
||||
.onDelete('SET NULL');
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable('directus_files', (table) => {
|
||||
table.dropForeign(['storage_default_folder'], indexName);
|
||||
table.dropColumn('storage_default_folder');
|
||||
});
|
||||
}
|
||||
49
api/src/database/migrations/20210802A-replace-groups.ts
Normal file
49
api/src/database/migrations/20210802A-replace-groups.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { Knex } from 'knex';
|
||||
import logger from '../../logger';
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const dividerGroups = await knex.select('*').from('directus_fields').where('interface', '=', 'group-divider');
|
||||
|
||||
for (const dividerGroup of dividerGroups) {
|
||||
const newOptions: { showHeader: true; headerIcon?: string; headerColor?: string } = { showHeader: true };
|
||||
|
||||
if (dividerGroup.options) {
|
||||
try {
|
||||
const options =
|
||||
typeof dividerGroup.options === 'string' ? JSON.parse(dividerGroup.options) : dividerGroup.options;
|
||||
|
||||
if (options.icon) newOptions.headerIcon = options.icon;
|
||||
if (options.color) newOptions.headerColor = options.color;
|
||||
} catch (err: any) {
|
||||
logger.warn(`Couldn't convert previous options from field ${dividerGroup.collection}.${dividerGroup.field}`);
|
||||
logger.warn(err);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await knex('directus_fields')
|
||||
.update({
|
||||
interface: 'group-standard',
|
||||
options: JSON.stringify(newOptions),
|
||||
})
|
||||
.where('id', '=', dividerGroup.id);
|
||||
} catch (err: any) {
|
||||
logger.warn(`Couldn't update ${dividerGroup.collection}.${dividerGroup.field} to new group interface`);
|
||||
logger.warn(err);
|
||||
}
|
||||
}
|
||||
|
||||
await knex('directus_fields')
|
||||
.update({
|
||||
interface: 'group-standard',
|
||||
})
|
||||
.where({ interface: 'group-raw' });
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex('directus_fields')
|
||||
.update({
|
||||
interface: 'group-raw',
|
||||
})
|
||||
.where('interface', '=', 'group-standard');
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
import { Knex } from 'knex';
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable('directus_fields', (table) => {
|
||||
table.boolean('required').defaultTo(false);
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable('directus_fields', (table) => {
|
||||
table.dropColumn('required');
|
||||
});
|
||||
}
|
||||
35
api/src/database/migrations/20210805A-update-groups.ts
Normal file
35
api/src/database/migrations/20210805A-update-groups.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { Knex } from 'knex';
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const groups = await knex.select('*').from('directus_fields').where({ interface: 'group-standard' });
|
||||
|
||||
const raw = [];
|
||||
const detail = [];
|
||||
|
||||
for (const group of groups) {
|
||||
const options = typeof group.options === 'string' ? JSON.parse(group.options) : group.options || {};
|
||||
|
||||
if (options.showHeader === true) {
|
||||
detail.push(group);
|
||||
} else {
|
||||
raw.push(group);
|
||||
}
|
||||
}
|
||||
|
||||
for (const field of raw) {
|
||||
await knex('directus_fields').update({ interface: 'group-raw' }).where({ id: field.id });
|
||||
}
|
||||
|
||||
for (const field of detail) {
|
||||
await knex('directus_fields').update({ interface: 'group-detail' }).where({ id: field.id });
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex('directus_fields')
|
||||
.update({
|
||||
interface: 'group-standard',
|
||||
})
|
||||
.where({ interface: 'group-detail' })
|
||||
.orWhere({ interface: 'group-raw' });
|
||||
}
|
||||
@@ -0,0 +1,94 @@
|
||||
import { Knex } from 'knex';
|
||||
|
||||
// Change image metadata structure to match the output from 'exifr'
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const files = await knex
|
||||
.select<{ id: number; metadata: string }[]>('id', 'metadata')
|
||||
.from('directus_files')
|
||||
.whereNotNull('metadata');
|
||||
|
||||
for (const { id, metadata } of files) {
|
||||
let prevMetadata;
|
||||
|
||||
try {
|
||||
prevMetadata = JSON.parse(metadata);
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Update only required if metadata has 'exif' data
|
||||
if (prevMetadata.exif) {
|
||||
// Get all data from 'exif' and rename the following keys:
|
||||
// - 'image' to 'ifd0'
|
||||
// - 'thumbnail to 'ifd1'
|
||||
// - 'interoperability' to 'interop'
|
||||
const newMetadata = prevMetadata.exif;
|
||||
|
||||
if (newMetadata.image) {
|
||||
newMetadata.ifd0 = newMetadata.image;
|
||||
delete newMetadata.image;
|
||||
}
|
||||
if (newMetadata.thumbnail) {
|
||||
newMetadata.ifd1 = newMetadata.thumbnail;
|
||||
delete newMetadata.thumbnail;
|
||||
}
|
||||
if (newMetadata.interoperability) {
|
||||
newMetadata.interop = newMetadata.interoperability;
|
||||
delete newMetadata.interoperability;
|
||||
}
|
||||
if (prevMetadata.icc) {
|
||||
newMetadata.icc = prevMetadata.icc;
|
||||
}
|
||||
if (prevMetadata.iptc) {
|
||||
newMetadata.iptc = prevMetadata.iptc;
|
||||
}
|
||||
|
||||
await knex('directus_files')
|
||||
.update({ metadata: JSON.stringify(newMetadata) })
|
||||
.where({ id });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const files = await knex
|
||||
.select<{ id: number; metadata: string }[]>('id', 'metadata')
|
||||
.from('directus_files')
|
||||
.whereNotNull('metadata')
|
||||
.whereNot('metadata', '{}');
|
||||
|
||||
for (const { id, metadata } of files) {
|
||||
const prevMetadata = JSON.parse(metadata);
|
||||
|
||||
// Update only required if metadata has keys other than 'icc' and 'iptc'
|
||||
if (Object.keys(prevMetadata).filter((key) => key !== 'icc' && key !== 'iptc').length > 0) {
|
||||
// Put all data under 'exif' and rename/move keys afterwards
|
||||
const newMetadata: { exif: Record<string, unknown>; icc?: unknown; iptc?: unknown } = { exif: prevMetadata };
|
||||
|
||||
if (newMetadata.exif.ifd0) {
|
||||
newMetadata.exif.image = newMetadata.exif.ifd0;
|
||||
delete newMetadata.exif.ifd0;
|
||||
}
|
||||
if (newMetadata.exif.ifd1) {
|
||||
newMetadata.exif.thumbnail = newMetadata.exif.ifd1;
|
||||
delete newMetadata.exif.ifd1;
|
||||
}
|
||||
if (newMetadata.exif.interop) {
|
||||
newMetadata.exif.interoperability = newMetadata.exif.interop;
|
||||
delete newMetadata.exif.interop;
|
||||
}
|
||||
if (newMetadata.exif.icc) {
|
||||
newMetadata.icc = newMetadata.exif.icc;
|
||||
delete newMetadata.exif.icc;
|
||||
}
|
||||
if (newMetadata.exif.iptc) {
|
||||
newMetadata.iptc = newMetadata.exif.iptc;
|
||||
delete newMetadata.exif.iptc;
|
||||
}
|
||||
|
||||
await knex('directus_files')
|
||||
.update({ metadata: JSON.stringify(newMetadata) })
|
||||
.where({ id });
|
||||
}
|
||||
}
|
||||
}
|
||||
15
api/src/database/migrations/20210811A-add-geometry-config.ts
Normal file
15
api/src/database/migrations/20210811A-add-geometry-config.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { Knex } from 'knex';
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable('directus_settings', (table) => {
|
||||
table.json('basemaps');
|
||||
table.string('mapbox_key');
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable('directus_settings', (table) => {
|
||||
table.dropColumn('basemaps');
|
||||
table.dropColumn('mapbox_key');
|
||||
});
|
||||
}
|
||||
13
api/src/database/migrations/20210831A-remove-limit-column.ts
Normal file
13
api/src/database/migrations/20210831A-remove-limit-column.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { Knex } from 'knex';
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable('directus_permissions', (table) => {
|
||||
table.dropColumn('limit');
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable('directus_permissions', (table) => {
|
||||
table.integer('limit').unsigned();
|
||||
});
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
import { Knex } from 'knex';
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable('directus_webhooks', (table) => {
|
||||
table.text('collections').notNullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable('directus_webhooks', (table) => {
|
||||
table.text('collections').alter();
|
||||
});
|
||||
}
|
||||
@@ -1,16 +1,10 @@
|
||||
/* eslint-disable no-console */
|
||||
|
||||
import formatTitle from '@directus/format-title';
|
||||
import fse from 'fs-extra';
|
||||
import { Knex } from 'knex';
|
||||
import path from 'path';
|
||||
import env from '../../env';
|
||||
|
||||
type Migration = {
|
||||
version: string;
|
||||
name: string;
|
||||
timestamp: Date;
|
||||
};
|
||||
import logger from '../../logger';
|
||||
import { Migration } from '../../types';
|
||||
|
||||
export default async function run(database: Knex, direction: 'up' | 'down' | 'latest'): Promise<void> {
|
||||
let migrationFiles = await fse.readdir(__dirname);
|
||||
@@ -67,7 +61,7 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
|
||||
|
||||
const { up } = require(nextVersion.file);
|
||||
|
||||
console.log(`✨ Applying ${nextVersion.name}...`);
|
||||
logger.info(`Applying ${nextVersion.name}...`);
|
||||
|
||||
await up(database);
|
||||
await database.insert({ version: nextVersion.version, name: nextVersion.name }).into('directus_migrations');
|
||||
@@ -88,7 +82,7 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
|
||||
|
||||
const { down } = require(migration.file);
|
||||
|
||||
console.log(`✨ Undoing ${migration.name}...`);
|
||||
logger.info(`Undoing ${migration.name}...`);
|
||||
|
||||
await down(database);
|
||||
await database('directus_migrations').delete().where({ version: migration.version });
|
||||
@@ -99,7 +93,7 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
|
||||
if (migration.completed === false) {
|
||||
const { up } = require(migration.file);
|
||||
|
||||
console.log(`✨ Applying ${migration.name}...`);
|
||||
logger.info(`Applying ${migration.name}...`);
|
||||
|
||||
await up(database);
|
||||
await database.insert({ version: migration.version, name: migration.name }).into('directus_migrations');
|
||||
|
||||
@@ -2,13 +2,15 @@ import { Knex } from 'knex';
|
||||
import { clone, cloneDeep, pick, uniq } from 'lodash';
|
||||
import { PayloadService } from '../services/payload';
|
||||
import { Item, Query, SchemaOverview } from '../types';
|
||||
import { AST, FieldNode, NestedCollectionNode } from '../types/ast';
|
||||
import { AST, FieldNode, NestedCollectionNode, M2ONode } from '../types/ast';
|
||||
import { applyFunctionToColumnName } from '../utils/apply-function-to-column-name';
|
||||
import applyQuery from '../utils/apply-query';
|
||||
import { getColumn } from '../utils/get-column';
|
||||
import { stripFunction } from '../utils/strip-function';
|
||||
import { toArray } from '../utils/to-array';
|
||||
import { toArray } from '@directus/shared/utils';
|
||||
import getDatabase from './index';
|
||||
import { isNativeGeometry } from '../utils/geometry';
|
||||
import { getGeometryHelper } from '../database/helpers/geometry';
|
||||
|
||||
type RunASTOptions = {
|
||||
/**
|
||||
@@ -58,7 +60,7 @@ export default async function runAST(
|
||||
|
||||
async function run(collection: string, children: (NestedCollectionNode | FieldNode)[], query: Query) {
|
||||
// Retrieve the database columns to select in the current AST
|
||||
const { columnsToSelect, primaryKeyField, nestedCollectionNodes } = await parseCurrentLevel(
|
||||
const { fieldNodes, primaryKeyField, nestedCollectionNodes } = await parseCurrentLevel(
|
||||
schema,
|
||||
collection,
|
||||
children,
|
||||
@@ -66,7 +68,7 @@ export default async function runAST(
|
||||
);
|
||||
|
||||
// The actual knex query builder instance. This is a promise that resolves with the raw items from the db
|
||||
const dbQuery = await getDBQuery(schema, knex, collection, columnsToSelect, query, options?.nested);
|
||||
const dbQuery = await getDBQuery(schema, knex, collection, fieldNodes, query, options?.nested);
|
||||
|
||||
const rawItems: Item | Item[] = await dbQuery;
|
||||
|
||||
@@ -117,8 +119,17 @@ async function parseCurrentLevel(
|
||||
for (const child of children) {
|
||||
if (child.type === 'field') {
|
||||
const fieldKey = stripFunction(child.name);
|
||||
|
||||
if (columnsInCollection.includes(fieldKey) || fieldKey === '*') {
|
||||
columnsToSelectInternal.push(child.name); // maintain original name here (includes functions)
|
||||
|
||||
if (query.alias) {
|
||||
columnsToSelectInternal.push(
|
||||
...Object.entries(query.alias)
|
||||
.filter(([_key, value]) => value === child.name)
|
||||
.map(([key]) => key)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
continue;
|
||||
@@ -127,7 +138,7 @@ async function parseCurrentLevel(
|
||||
if (!child.relation) continue;
|
||||
|
||||
if (child.type === 'm2o') {
|
||||
columnsToSelectInternal.push(child.relation.field);
|
||||
columnsToSelectInternal.push(child.fieldKey);
|
||||
}
|
||||
|
||||
if (child.type === 'm2a') {
|
||||
@@ -138,31 +149,62 @@ async function parseCurrentLevel(
|
||||
nestedCollectionNodes.push(child);
|
||||
}
|
||||
|
||||
/**
|
||||
* Always fetch primary key in case there's a nested relation that needs it, but only if there's
|
||||
* no aggregation / grouping going on
|
||||
const isAggregate = (query.aggregate && Object.keys(query.aggregate).length > 0) ?? false;
|
||||
|
||||
/** Always fetch primary key in case there's a nested relation that needs it. Aggregate payloads
|
||||
* can't have nested relational fields
|
||||
*/
|
||||
const hasAggregationOrGrouping = 'aggregate' in query || 'group' in query;
|
||||
if (columnsToSelectInternal.includes(primaryKeyField) === false && hasAggregationOrGrouping === false) {
|
||||
if (isAggregate === false && columnsToSelectInternal.includes(primaryKeyField) === false) {
|
||||
columnsToSelectInternal.push(primaryKeyField);
|
||||
}
|
||||
|
||||
/** Make sure select list has unique values */
|
||||
const columnsToSelect = [...new Set(columnsToSelectInternal)];
|
||||
|
||||
return { columnsToSelect, nestedCollectionNodes, primaryKeyField };
|
||||
const fieldNodes = columnsToSelect.map(
|
||||
(column: string) =>
|
||||
children.find((childNode) => childNode.fieldKey === column) ?? { type: 'field', name: column, fieldKey: column }
|
||||
) as FieldNode[];
|
||||
|
||||
return { fieldNodes, nestedCollectionNodes, primaryKeyField };
|
||||
}
|
||||
|
||||
function getColumnPreprocessor(knex: Knex, schema: SchemaOverview, table: string) {
|
||||
const helper = getGeometryHelper();
|
||||
|
||||
return function (fieldNode: FieldNode | M2ONode): Knex.Raw<string> {
|
||||
let field;
|
||||
|
||||
if (fieldNode.type === 'field') {
|
||||
field = schema.collections[table].fields[stripFunction(fieldNode.name)];
|
||||
} else {
|
||||
field = schema.collections[fieldNode.relation.collection].fields[fieldNode.relation.field];
|
||||
}
|
||||
|
||||
let alias = undefined;
|
||||
|
||||
if (fieldNode.name !== fieldNode.fieldKey) {
|
||||
alias = fieldNode.fieldKey;
|
||||
}
|
||||
|
||||
if (isNativeGeometry(field)) {
|
||||
return helper.asText(table, field.field);
|
||||
}
|
||||
|
||||
return getColumn(knex, table, fieldNode.name, alias);
|
||||
};
|
||||
}
|
||||
|
||||
function getDBQuery(
|
||||
schema: SchemaOverview,
|
||||
knex: Knex,
|
||||
table: string,
|
||||
columns: string[],
|
||||
fieldNodes: FieldNode[],
|
||||
query: Query,
|
||||
nested?: boolean
|
||||
): Knex.QueryBuilder {
|
||||
const dbQuery = knex.select(columns.map((column) => getColumn(knex, table, column))).from(table);
|
||||
|
||||
const preProcess = getColumnPreprocessor(knex, schema, table);
|
||||
const dbQuery = knex.select(fieldNodes.map(preProcess)).from(table);
|
||||
const queryCopy = clone(query);
|
||||
|
||||
queryCopy.limit = typeof queryCopy.limit === 'number' ? queryCopy.limit : 100;
|
||||
@@ -205,11 +247,19 @@ function applyParentFilters(
|
||||
});
|
||||
|
||||
if (relatedM2OisFetched === false) {
|
||||
nestedNode.children.push({ type: 'field', name: nestedNode.relation.field });
|
||||
nestedNode.children.push({
|
||||
type: 'field',
|
||||
name: nestedNode.relation.field,
|
||||
fieldKey: nestedNode.relation.field,
|
||||
});
|
||||
}
|
||||
|
||||
if (nestedNode.relation.meta?.sort_field) {
|
||||
nestedNode.children.push({ type: 'field', name: nestedNode.relation.meta.sort_field });
|
||||
nestedNode.children.push({
|
||||
type: 'field',
|
||||
name: nestedNode.relation.meta.sort_field,
|
||||
fieldKey: nestedNode.relation.meta.sort_field,
|
||||
});
|
||||
}
|
||||
|
||||
nestedNode.query = {
|
||||
@@ -387,10 +437,9 @@ function removeTemporaryFields(
|
||||
const nestedCollectionNodes: NestedCollectionNode[] = [];
|
||||
|
||||
for (const child of ast.children) {
|
||||
if (child.type === 'field') {
|
||||
fields.push(child.name);
|
||||
} else {
|
||||
fields.push(child.fieldKey);
|
||||
fields.push(child.fieldKey);
|
||||
|
||||
if (child.type !== 'field') {
|
||||
nestedCollectionNodes.push(child);
|
||||
}
|
||||
}
|
||||
@@ -402,7 +451,7 @@ function removeTemporaryFields(
|
||||
|
||||
if (operation === 'count' && aggregateFields.includes('*')) fields.push('count');
|
||||
|
||||
fields.push(...aggregateFields.map((field) => `${field}_${operation}`));
|
||||
fields.push(...aggregateFields.map((field) => `${operation}.${field}`));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,13 +3,14 @@ import yaml from 'js-yaml';
|
||||
import { Knex } from 'knex';
|
||||
import { isObject } from 'lodash';
|
||||
import path from 'path';
|
||||
import { types } from '../../types';
|
||||
import { Type, Field } from '@directus/shared/types';
|
||||
import { getGeometryHelper } from '../helpers/geometry';
|
||||
|
||||
type TableSeed = {
|
||||
table: string;
|
||||
columns: {
|
||||
[column: string]: {
|
||||
type?: typeof types[number];
|
||||
type?: Type;
|
||||
primary?: boolean;
|
||||
nullable?: boolean;
|
||||
default?: any;
|
||||
@@ -45,6 +46,8 @@ export default async function runSeed(database: Knex): Promise<void> {
|
||||
for (const [columnName, columnInfo] of Object.entries(seedData.columns)) {
|
||||
let column: Knex.ColumnBuilder;
|
||||
|
||||
if (columnInfo.type === 'alias' || columnInfo.type === 'unknown') return;
|
||||
|
||||
if (columnInfo.type === 'string') {
|
||||
column = tableBuilder.string(columnName, columnInfo.length);
|
||||
} else if (columnInfo.increments) {
|
||||
@@ -53,6 +56,9 @@ export default async function runSeed(database: Knex): Promise<void> {
|
||||
column = tableBuilder.string(columnName);
|
||||
} else if (columnInfo.type === 'hash') {
|
||||
column = tableBuilder.string(columnName, 255);
|
||||
} else if (columnInfo.type === 'geometry') {
|
||||
const helper = getGeometryHelper();
|
||||
column = helper.createColumn(tableBuilder, { field: columnName } as Field);
|
||||
} else {
|
||||
column = tableBuilder[columnInfo.type!](columnName);
|
||||
}
|
||||
|
||||
@@ -8,7 +8,6 @@ const defaults: Partial<Permission> = {
|
||||
validation: null,
|
||||
presets: null,
|
||||
fields: ['*'],
|
||||
limit: null,
|
||||
system: true,
|
||||
};
|
||||
|
||||
|
||||
@@ -8,51 +8,52 @@ defaults:
|
||||
note: null
|
||||
translations: null
|
||||
display_template: null
|
||||
accountability: 'all'
|
||||
|
||||
data:
|
||||
- collection: directus_activity
|
||||
note: Accountability logs for all events
|
||||
note: $t:directus_collection.directus_activity
|
||||
- collection: directus_collections
|
||||
icon: list_alt
|
||||
note: Additional collection configuration and metadata
|
||||
note: $t:directus_collection.directus_collections
|
||||
- collection: directus_fields
|
||||
icon: input
|
||||
note: Additional field configuration and metadata
|
||||
note: $t:directus_collection.directus_fields
|
||||
- collection: directus_files
|
||||
icon: folder
|
||||
note: Metadata for all managed file assets
|
||||
note: $t:directus_collection.directus_files
|
||||
display_template: '{{ $thumbnail }} {{ title }}'
|
||||
- collection: directus_folders
|
||||
note: Provides virtual directories for files
|
||||
note: $t:directus_collection.directus_folders
|
||||
display_template: '{{ name }}'
|
||||
- collection: directus_migrations
|
||||
note: What version of the database you're using
|
||||
note: $t:directus_collection.directus_migrations
|
||||
- collection: directus_permissions
|
||||
icon: admin_panel_settings
|
||||
note: Access permissions for each role
|
||||
note: $t:directus_collection.directus_permissions
|
||||
- collection: directus_presets
|
||||
icon: bookmark_border
|
||||
note: Presets for collection defaults and bookmarks
|
||||
note: $t:directus_collection.directus_presets
|
||||
accountability: null
|
||||
- collection: directus_relations
|
||||
icon: merge_type
|
||||
note: Relationship configuration and metadata
|
||||
note: $t:directus_collection.directus_relations
|
||||
- collection: directus_revisions
|
||||
note: Data snapshots for all activity
|
||||
note: $t:directus_collection.directus_revisions
|
||||
- collection: directus_roles
|
||||
icon: supervised_user_circle
|
||||
note: Permission groups for system users
|
||||
note: $t:directus_collection.directus_roles
|
||||
- collection: directus_sessions
|
||||
note: User session information
|
||||
note: $t:directus_collection.directus_sessions
|
||||
- collection: directus_settings
|
||||
singleton: true
|
||||
note: Project configuration options
|
||||
note: $t:directus_collection.directus_settings
|
||||
- collection: directus_users
|
||||
archive_field: status
|
||||
archive_value: archived
|
||||
unarchive_value: draft
|
||||
icon: people_alt
|
||||
note: System users for the platform
|
||||
note: $t:directus_collection.directus_users
|
||||
display_template: '{{ first_name }} {{ last_name }}'
|
||||
- collection: directus_webhooks
|
||||
note: Configuration for event-based HTTP requests
|
||||
note: $t:directus_collection.directus_webhooks
|
||||
|
||||
@@ -13,19 +13,19 @@ fields:
|
||||
defaultForeground: 'var(--foreground-normal)'
|
||||
defaultBackground: 'var(--background-normal-alt)'
|
||||
choices:
|
||||
- text: Create
|
||||
- text: $t:field_options.directus_activity.create
|
||||
value: create
|
||||
foreground: 'var(--primary)'
|
||||
background: 'var(--primary-25)'
|
||||
- text: Update
|
||||
- text: $t:field_options.directus_activity.update
|
||||
value: update
|
||||
foreground: 'var(--blue)'
|
||||
background: 'var(--blue-25)'
|
||||
- text: Delete
|
||||
- text: $t:field_options.directus_activity.delete
|
||||
value: delete
|
||||
foreground: 'var(--danger)'
|
||||
background: 'var(--danger-25)'
|
||||
- text: Login
|
||||
- text: $t:field_options.directus_activity.login
|
||||
value: authenticate
|
||||
foreground: 'var(--purple)'
|
||||
background: 'var(--purple-25)'
|
||||
|
||||
@@ -8,7 +8,7 @@ fields:
|
||||
interface: presentation-divider
|
||||
options:
|
||||
icon: box
|
||||
title: Collection Setup
|
||||
title: $t:field_options.directus_collections.collection_setup
|
||||
width: full
|
||||
|
||||
- field: collection
|
||||
@@ -32,7 +32,7 @@ fields:
|
||||
- field: color
|
||||
interface: select-color
|
||||
options:
|
||||
placeholder: Choose a color...
|
||||
placeholder: $t:field_options.directus_collections.note_placeholder
|
||||
width: half
|
||||
|
||||
- field: display_template
|
||||
@@ -45,7 +45,7 @@ fields:
|
||||
special: boolean
|
||||
interface: boolean
|
||||
options:
|
||||
label: Hide within the App
|
||||
label: $t:field_options.directus_collections.hidden_label
|
||||
width: half
|
||||
|
||||
- field: singleton
|
||||
@@ -102,7 +102,7 @@ fields:
|
||||
interface: presentation-divider
|
||||
options:
|
||||
icon: archive
|
||||
title: Archive
|
||||
title: $t:field_options.directus_collections.archive_divider
|
||||
width: full
|
||||
|
||||
- field: archive_field
|
||||
@@ -110,14 +110,14 @@ fields:
|
||||
options:
|
||||
collectionField: collection
|
||||
allowNone: true
|
||||
placeholder: Choose a field...
|
||||
placeholder: $t:field_options.directus_collections.archive_field
|
||||
width: half
|
||||
|
||||
- field: archive_app_filter
|
||||
interface: boolean
|
||||
special: boolean
|
||||
options:
|
||||
label: Enable App Archive Filter
|
||||
label: $t:field_options.directus_collections.archive_app_filter
|
||||
width: half
|
||||
|
||||
- field: archive_value
|
||||
@@ -125,7 +125,7 @@ fields:
|
||||
options:
|
||||
font: monospace
|
||||
iconRight: archive
|
||||
placeholder: Value set when archiving...
|
||||
placeholder: $t:field_options.directus_collections.archive_value
|
||||
width: half
|
||||
|
||||
- field: unarchive_value
|
||||
@@ -133,7 +133,7 @@ fields:
|
||||
options:
|
||||
font: monospace
|
||||
iconRight: unarchive
|
||||
placeholder: Value set when unarchiving...
|
||||
placeholder: $t:field_options.directus_collections.unarchive_value
|
||||
width: half
|
||||
|
||||
- field: sort_divider
|
||||
@@ -143,14 +143,14 @@ fields:
|
||||
interface: presentation-divider
|
||||
options:
|
||||
icon: sort
|
||||
title: Sort
|
||||
title: $t:field_options.directus_collections.divider
|
||||
width: full
|
||||
|
||||
- field: sort_field
|
||||
interface: system-field
|
||||
options:
|
||||
collectionField: collection
|
||||
placeholder: Choose a field...
|
||||
placeholder: $t:field_options.directus_collections.sort_field
|
||||
typeAllowList:
|
||||
- float
|
||||
- decimal
|
||||
@@ -165,7 +165,7 @@ fields:
|
||||
interface: presentation-divider
|
||||
options:
|
||||
icon: admin_panel_settings
|
||||
title: Accountability
|
||||
title: $t:field_options.directus_collections.accountability_divider
|
||||
width: full
|
||||
|
||||
- field: accountability
|
||||
|
||||
@@ -52,6 +52,12 @@ fields:
|
||||
special: boolean
|
||||
width: half
|
||||
|
||||
- collection: directus_fields
|
||||
field: required
|
||||
hidden: true
|
||||
special: boolean
|
||||
width: half
|
||||
|
||||
- collection: directus_fields
|
||||
field: sort
|
||||
width: half
|
||||
@@ -73,3 +79,8 @@ fields:
|
||||
- collection: directus_fields
|
||||
field: note
|
||||
width: half
|
||||
|
||||
- collection: directus_fields
|
||||
field: conditions
|
||||
hidden: true
|
||||
special: json
|
||||
|
||||
@@ -10,14 +10,14 @@ fields:
|
||||
interface: input
|
||||
options:
|
||||
iconRight: title
|
||||
placeholder: A unique title...
|
||||
placeholder: $t:field_options.directus_files.title
|
||||
width: full
|
||||
|
||||
- field: description
|
||||
interface: input-multiline
|
||||
width: full
|
||||
options:
|
||||
placeholder: An optional description...
|
||||
placeholder: $t:field_options.directus_files.description
|
||||
|
||||
- field: tags
|
||||
interface: tags
|
||||
@@ -35,7 +35,7 @@ fields:
|
||||
interface: input
|
||||
options:
|
||||
iconRight: place
|
||||
placeholder: An optional location...
|
||||
placeholder: $t:field_options.directus_files.location
|
||||
width: half
|
||||
|
||||
- field: storage
|
||||
@@ -49,7 +49,7 @@ fields:
|
||||
interface: presentation-divider
|
||||
options:
|
||||
icon: insert_drive_file
|
||||
title: File Naming
|
||||
title: $t:field_options.directus_files.storage_divider
|
||||
special:
|
||||
- alias
|
||||
- no-data
|
||||
@@ -59,7 +59,7 @@ fields:
|
||||
interface: input
|
||||
options:
|
||||
iconRight: publish
|
||||
placeholder: Name on disk storage...
|
||||
placeholder: $t:field_options.directus_files.filename_disk
|
||||
readonly: true
|
||||
width: half
|
||||
|
||||
@@ -67,7 +67,7 @@ fields:
|
||||
interface: input
|
||||
options:
|
||||
iconRight: get_app
|
||||
placeholder: Name when downloading...
|
||||
placeholder: $t:field_options.directus_files.filename_download
|
||||
width: half
|
||||
|
||||
- field: metadata
|
||||
@@ -106,6 +106,7 @@ fields:
|
||||
display: user
|
||||
width: half
|
||||
hidden: true
|
||||
special: user-created
|
||||
|
||||
- field: uploaded_on
|
||||
display: datetime
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import fse from 'fs-extra';
|
||||
import { merge } from 'lodash';
|
||||
import path from 'path';
|
||||
import { FieldMeta } from '../../../types';
|
||||
import { FieldMeta } from '@directus/shared/types';
|
||||
import { requireYAML } from '../../../utils/require-yaml';
|
||||
|
||||
const defaults = requireYAML(require.resolve('./_defaults.yaml'));
|
||||
|
||||
@@ -15,9 +15,6 @@ fields:
|
||||
- field: role
|
||||
width: half
|
||||
|
||||
- field: limit
|
||||
width: half
|
||||
|
||||
- field: collection
|
||||
width: half
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ fields:
|
||||
- field: name
|
||||
interface: input
|
||||
options:
|
||||
placeholder: The unique name for this role...
|
||||
placeholder: $t:field_options.directus_roles.name
|
||||
width: half
|
||||
|
||||
- field: icon
|
||||
@@ -20,7 +20,7 @@ fields:
|
||||
- field: description
|
||||
interface: input
|
||||
options:
|
||||
placeholder: A description of this role...
|
||||
placeholder: $t:field_options.directus_roles.description
|
||||
width: full
|
||||
|
||||
- field: app_access
|
||||
@@ -36,7 +36,7 @@ fields:
|
||||
- field: ip_access
|
||||
interface: tags
|
||||
options:
|
||||
placeholder: Add allowed IP addresses, leave empty to allow all...
|
||||
placeholder: $t:field_options.directus_roles.ip_access
|
||||
special: csv
|
||||
width: full
|
||||
|
||||
@@ -60,13 +60,13 @@ fields:
|
||||
template: '{{ name }}'
|
||||
addLabel: Add New Module...
|
||||
fields:
|
||||
- name: Icon
|
||||
- name: $t:field_options.directus_roles.fields.icon_name
|
||||
field: icon
|
||||
type: string
|
||||
meta:
|
||||
interface: select-icon
|
||||
width: half
|
||||
- name: Name
|
||||
- name: $t:field_options.directus_roles.fields.name_name
|
||||
field: name
|
||||
type: string
|
||||
meta:
|
||||
@@ -74,8 +74,8 @@ fields:
|
||||
width: half
|
||||
options:
|
||||
iconRight: title
|
||||
placeholder: Enter a title...
|
||||
- name: Link
|
||||
placeholder:
|
||||
- name: $t:field_options.directus_roles.fields.link_name
|
||||
field: link
|
||||
type: string
|
||||
meta:
|
||||
@@ -83,7 +83,7 @@ fields:
|
||||
width: full
|
||||
options:
|
||||
iconRight: link
|
||||
placeholder: Relative or absolute URL...
|
||||
placeholder: $t:field_options.directus_roles.fields.link_placeholder
|
||||
special: json
|
||||
width: full
|
||||
|
||||
@@ -91,9 +91,9 @@ fields:
|
||||
interface: list
|
||||
options:
|
||||
template: '{{ group_name }}'
|
||||
addLabel: Add New Group...
|
||||
addLabel: $t:field_options.directus_roles.collection_list.group_name_addLabel
|
||||
fields:
|
||||
- name: Group Name
|
||||
- name: $t:field_options.directus_roles.collection_list.fields.group_name
|
||||
field: group_name
|
||||
type: string
|
||||
meta:
|
||||
@@ -101,10 +101,10 @@ fields:
|
||||
interface: input
|
||||
options:
|
||||
iconRight: title
|
||||
placeholder: Label this group...
|
||||
placeholder: $t:field_options.directus_roles.collection_list.fields.group_placeholder
|
||||
schema:
|
||||
is_nullable: false
|
||||
- name: Type
|
||||
- name: $t:field_options.directus_roles.collection_list.fields.type_name
|
||||
field: accordion
|
||||
type: string
|
||||
schema:
|
||||
@@ -115,21 +115,21 @@ fields:
|
||||
options:
|
||||
choices:
|
||||
- value: always_open
|
||||
text: Always Open
|
||||
text: $t:field_options.directus_roles.collection_list.fields.choices_always
|
||||
- value: start_open
|
||||
text: Start Open
|
||||
text: $t:field_options.directus_roles.collection_list.fields.choices_start_open
|
||||
- value: start_collapsed
|
||||
text: Start Collapsed
|
||||
- name: Collections
|
||||
text: $t:field_options.directus_roles.collection_list.fields.choices_start_collapsed
|
||||
- name: $t:field_options.directus_roles.collections_name
|
||||
field: collections
|
||||
type: JSON
|
||||
meta:
|
||||
interface: list
|
||||
options:
|
||||
addLabel: Add New Collection...
|
||||
addLabel: $t:field_options.directus_roles.collections_addLabel
|
||||
template: '{{ collection }}'
|
||||
fields:
|
||||
- name: Collection
|
||||
- name: $t:field_options.directus_roles.collections_name
|
||||
field: collection
|
||||
type: string
|
||||
meta:
|
||||
|
||||
@@ -8,7 +8,7 @@ fields:
|
||||
interface: input
|
||||
options:
|
||||
iconRight: title
|
||||
placeholder: My project...
|
||||
placeholder: $t:field_options.directus_settings.project_name_placeholder
|
||||
translations:
|
||||
language: en-US
|
||||
translations: Name
|
||||
@@ -26,7 +26,7 @@ fields:
|
||||
|
||||
- field: project_color
|
||||
interface: select-color
|
||||
note: Login & Logo Background
|
||||
note: $t:field_options.directus_settings.project_logo_note
|
||||
translations:
|
||||
language: en-US
|
||||
translations: Brand Color
|
||||
@@ -44,7 +44,7 @@ fields:
|
||||
interface: presentation-divider
|
||||
options:
|
||||
icon: public
|
||||
title: Public Pages
|
||||
title: $t:fields.directus_settings.public_pages
|
||||
special:
|
||||
- alias
|
||||
- no-data
|
||||
@@ -67,14 +67,14 @@ fields:
|
||||
- field: public_note
|
||||
interface: input-multiline
|
||||
options:
|
||||
placeholder: A short, public message that supports markdown formatting...
|
||||
placeholder: $t:field_options.directus_settings.public_note_placeholder
|
||||
width: full
|
||||
|
||||
- field: security_divider
|
||||
interface: presentation-divider
|
||||
options:
|
||||
icon: security
|
||||
title: Security
|
||||
title: $t:security
|
||||
special:
|
||||
- alias
|
||||
- no-data
|
||||
@@ -85,11 +85,11 @@ fields:
|
||||
options:
|
||||
choices:
|
||||
- value: null
|
||||
text: None – Not Recommended
|
||||
text: $t:field_options.directus_settings.auth_password_policy.none_text
|
||||
- value: '/^.{8,}$/'
|
||||
text: Weak – Minimum 8 Characters
|
||||
text: $t:field_options.directus_settings.auth_password_policy.weak_text
|
||||
- value: "/(?=^.{8,}$)(?=.*\\d)(?=.*[a-z])(?=.*[A-Z])(?=.*[!@#$%^&*()_+}{';'?>.<,])(?!.*\\s).*$/"
|
||||
text: Strong – Upper / Lowercase / Numbers / Special
|
||||
text: $t:field_options.directus_settings.auth_password_policy.strong_text
|
||||
allowOther: true
|
||||
width: half
|
||||
|
||||
@@ -104,7 +104,7 @@ fields:
|
||||
interface: presentation-divider
|
||||
options:
|
||||
icon: storage
|
||||
title: Files & Thumbnails
|
||||
title: $t:fields.directus_settings.files_and_thumbnails
|
||||
special:
|
||||
- alias
|
||||
- no-data
|
||||
@@ -115,7 +115,7 @@ fields:
|
||||
options:
|
||||
fields:
|
||||
- field: key
|
||||
name: Key
|
||||
name: $t:key
|
||||
type: string
|
||||
schema:
|
||||
is_nullable: false
|
||||
@@ -124,7 +124,7 @@ fields:
|
||||
options:
|
||||
slug: true
|
||||
onlyOnCreate: false
|
||||
width: half
|
||||
width: full
|
||||
- field: fit
|
||||
name: Fit
|
||||
type: string
|
||||
@@ -135,16 +135,16 @@ fields:
|
||||
options:
|
||||
choices:
|
||||
- value: contain
|
||||
text: Contain (preserve aspect ratio)
|
||||
text: $t:field_options.directus_settings.storage_asset_presets.fit.contain_text
|
||||
- value: cover
|
||||
text: Cover (forces exact size)
|
||||
text: $t:field_options.directus_settings.storage_asset_presets.fit.cover_text
|
||||
- value: inside
|
||||
text: Fit inside
|
||||
text: $t:field_options.directus_settings.storage_asset_presets.fit.fit_text
|
||||
- value: outside
|
||||
text: Fit outside
|
||||
text: $t:field_options.directus_settings.storage_asset_presets.fit.outside_text
|
||||
width: half
|
||||
- field: width
|
||||
name: Width
|
||||
name: $t:width
|
||||
type: integer
|
||||
schema:
|
||||
is_nullable: false
|
||||
@@ -152,7 +152,7 @@ fields:
|
||||
interface: input
|
||||
width: half
|
||||
- field: height
|
||||
name: Height
|
||||
name: $t:height
|
||||
type: integer
|
||||
schema:
|
||||
is_nullable: false
|
||||
@@ -161,7 +161,7 @@ fields:
|
||||
width: half
|
||||
- field: quality
|
||||
type: integer
|
||||
name: Quality
|
||||
name: $t:quality
|
||||
schema:
|
||||
default_value: 80
|
||||
is_nullable: false
|
||||
@@ -173,6 +173,7 @@ fields:
|
||||
step: 1
|
||||
width: half
|
||||
- field: withoutEnlargement
|
||||
name: Upscaling
|
||||
type: boolean
|
||||
schema:
|
||||
default_value: false
|
||||
@@ -180,7 +181,51 @@ fields:
|
||||
interface: boolean
|
||||
width: half
|
||||
options:
|
||||
label: Don't upscale images
|
||||
label: $t:no_upscale
|
||||
- field: format
|
||||
name: Format
|
||||
type: string
|
||||
schema:
|
||||
is_nullable: false
|
||||
default_value: ''
|
||||
meta:
|
||||
interface: select-dropdown
|
||||
options:
|
||||
allowNone: true
|
||||
choices:
|
||||
- value: jpeg
|
||||
text: JPEG
|
||||
- value: png
|
||||
text: PNG
|
||||
- value: webp
|
||||
text: WebP
|
||||
- value: tiff
|
||||
text: Tiff
|
||||
width: half
|
||||
- field: transforms
|
||||
name: $t:field_options.directus_settings.additional_transforms
|
||||
type: json
|
||||
schema:
|
||||
is_nullable: false
|
||||
default_value: []
|
||||
meta:
|
||||
note: $t:field_options.directus_settings.transforms_note
|
||||
|
||||
interface: json
|
||||
options:
|
||||
template: >
|
||||
[
|
||||
["blur", 45],
|
||||
["grayscale"],
|
||||
["extend", { "right": 500, "background": "rgb(255, 0, 0)" }]
|
||||
]
|
||||
placeholder: >
|
||||
[
|
||||
["blur", 45],
|
||||
["grayscale"],
|
||||
["extend", { "right": 500, "background": "rgb(255, 0, 0)" }]
|
||||
]
|
||||
width: full
|
||||
template: '{{key}}'
|
||||
special: json
|
||||
width: full
|
||||
@@ -190,18 +235,23 @@ fields:
|
||||
options:
|
||||
choices:
|
||||
- value: all
|
||||
text: All
|
||||
text: $t:all
|
||||
- value: none
|
||||
text: None
|
||||
text: $t:none
|
||||
- value: presets
|
||||
text: Presets Only
|
||||
text: $t:presets_only
|
||||
width: half
|
||||
|
||||
- field: storage_default_folder
|
||||
interface: system-folder
|
||||
width: half
|
||||
note: $t:interfaces.system-folder.field_hint
|
||||
|
||||
- field: overrides_divider
|
||||
interface: presentation-divider
|
||||
options:
|
||||
icon: brush
|
||||
title: App Overrides
|
||||
title: $t:fields.directus_settings.overrides
|
||||
special:
|
||||
- alias
|
||||
- no-data
|
||||
@@ -213,3 +263,72 @@ fields:
|
||||
language: css
|
||||
lineNumber: true
|
||||
width: full
|
||||
|
||||
- field: map_divider
|
||||
interface: presentation-divider
|
||||
options:
|
||||
icon: map
|
||||
title: $t:maps
|
||||
special:
|
||||
- alias
|
||||
- no-data
|
||||
width: full
|
||||
|
||||
- field: mapbox_key
|
||||
interface: input
|
||||
options:
|
||||
icon: key
|
||||
title: $t:field_options.directus_settings.mapbox_key
|
||||
placeholder: $t:field_options.directus_settings.mapbox_placeholder
|
||||
iconLeft: vpn_key
|
||||
font: monospace
|
||||
width: half
|
||||
|
||||
- field: basemaps
|
||||
interface: list
|
||||
special: json
|
||||
options:
|
||||
template: '{{name}}'
|
||||
fields:
|
||||
- field: name
|
||||
name: $t:name
|
||||
schema:
|
||||
is_nullable: false
|
||||
meta:
|
||||
interface: text-input
|
||||
options:
|
||||
placeholder: Enter the basemap name...
|
||||
- field: type
|
||||
name: $t:type
|
||||
meta:
|
||||
interface: select-dropdown
|
||||
options:
|
||||
choices:
|
||||
- value: raster
|
||||
text: $t:field_options.directus_settings.basemaps_raster
|
||||
- value: tile
|
||||
text: $t:field_options.directus_settings.basemaps_tile
|
||||
- value: style
|
||||
text: $t:field_options.directus_settings.basemaps_style
|
||||
- field: url
|
||||
name: $t:url
|
||||
schema:
|
||||
is_nullable: false
|
||||
meta:
|
||||
interface: text-input
|
||||
options:
|
||||
placeholder: http://{a-c}.tile.openstreetmap.org/{z}/{x}/{y}.png
|
||||
- field: tileSize
|
||||
name: $t:tile_size
|
||||
schema:
|
||||
is_nullable: true
|
||||
meta:
|
||||
interface: input
|
||||
options:
|
||||
placeholder: '512'
|
||||
conditions:
|
||||
- name: typeNeqRaster
|
||||
rule:
|
||||
type:
|
||||
_neq: 'raster'
|
||||
hidden: true
|
||||
|
||||
@@ -64,7 +64,7 @@ fields:
|
||||
interface: presentation-divider
|
||||
options:
|
||||
icon: face
|
||||
title: User Preferences
|
||||
title: $t:fields.directus_users.user_preferences
|
||||
special:
|
||||
- alias
|
||||
- no-data
|
||||
@@ -79,11 +79,11 @@ fields:
|
||||
options:
|
||||
choices:
|
||||
- value: auto
|
||||
text: Automatic (Based on System)
|
||||
text: $t:fields.directus_users.theme_auto
|
||||
- value: light
|
||||
text: Light Mode
|
||||
text: $t:fields.directus_users.theme_light
|
||||
- value: dark
|
||||
text: Dark Mode
|
||||
text: $t:fields.directus_users.theme_dark
|
||||
width: half
|
||||
|
||||
- field: tfa_secret
|
||||
@@ -95,7 +95,7 @@ fields:
|
||||
interface: presentation-divider
|
||||
options:
|
||||
icon: verified_user
|
||||
title: Admin Options
|
||||
title: $t:fields.directus_users.admin_options
|
||||
color: '#E35169'
|
||||
special:
|
||||
- alias
|
||||
@@ -106,15 +106,15 @@ fields:
|
||||
interface: select-dropdown
|
||||
options:
|
||||
choices:
|
||||
- text: Draft
|
||||
- text: $t:fields.directus_users.status_draft
|
||||
value: draft
|
||||
- text: Invited
|
||||
- text: $t:fields.directus_users.status_invited
|
||||
value: invited
|
||||
- text: Active
|
||||
- text: $t:fields.directus_users.status_active
|
||||
value: active
|
||||
- text: Suspended
|
||||
- text: $t:fields.directus_users.status_suspended
|
||||
value: suspended
|
||||
- text: Archived
|
||||
- text: $t:fields.directus_users.status_archived
|
||||
value: archived
|
||||
width: half
|
||||
|
||||
@@ -132,7 +132,7 @@ fields:
|
||||
interface: token
|
||||
options:
|
||||
iconRight: vpn_key
|
||||
placeholder: Enter a secure access token...
|
||||
placeholder: $t:fields.directus_users.token_placeholder
|
||||
width: full
|
||||
|
||||
- field: id
|
||||
|
||||
@@ -38,26 +38,26 @@ fields:
|
||||
defaultBackground: 'var(--background-normal-alt)'
|
||||
showAsDot: true
|
||||
choices:
|
||||
- text: Active
|
||||
- text: $t:active
|
||||
value: active
|
||||
foreground: 'var(--primary-10)'
|
||||
background: 'var(--primary)'
|
||||
- text: Inactive
|
||||
- text: $t:inactive
|
||||
value: inactive
|
||||
foreground: 'var(--foreground-normal)'
|
||||
background: 'var(--background-normal-alt)'
|
||||
options:
|
||||
choices:
|
||||
- text: Active
|
||||
- text: $t:active
|
||||
value: active
|
||||
- text: Inactive
|
||||
- text: $t:inactive
|
||||
value: inactive
|
||||
width: half
|
||||
|
||||
- field: data
|
||||
interface: boolean
|
||||
options:
|
||||
label: Send Event Data
|
||||
label: $t:fields.directus_webhooks.data_label
|
||||
special: boolean
|
||||
width: half
|
||||
display: boolean
|
||||
@@ -66,7 +66,7 @@ fields:
|
||||
interface: presentation-divider
|
||||
options:
|
||||
icon: api
|
||||
title: Triggers
|
||||
title: $t:fields.directus_webhooks.triggers
|
||||
special:
|
||||
- alias
|
||||
- no-data
|
||||
@@ -76,11 +76,11 @@ fields:
|
||||
interface: select-multiple-checkbox
|
||||
options:
|
||||
choices:
|
||||
- text: Create
|
||||
- text: $t:create
|
||||
value: create
|
||||
- text: Update
|
||||
- text: $t:update
|
||||
value: update
|
||||
- text: Delete
|
||||
- text: $t:delete_label
|
||||
value: delete
|
||||
special: csv
|
||||
width: full
|
||||
@@ -89,19 +89,19 @@ fields:
|
||||
defaultForeground: 'var(--foreground-normal)'
|
||||
defaultBackground: 'var(--background-normal-alt)'
|
||||
choices:
|
||||
- text: Create
|
||||
- text: $t:create
|
||||
value: create
|
||||
foreground: 'var(--primary)'
|
||||
background: 'var(--primary-25)'
|
||||
- text: Update
|
||||
- text: $t:update
|
||||
value: update
|
||||
foreground: 'var(--blue)'
|
||||
background: 'var(--blue-25)'
|
||||
- text: Delete
|
||||
- text: $t:delete_label
|
||||
value: delete
|
||||
foreground: 'var(--danger)'
|
||||
background: 'var(--danger-25)'
|
||||
- text: Login
|
||||
- text: $t:login
|
||||
value: authenticate
|
||||
foreground: 'var(--purple)'
|
||||
background: 'var(--purple-25)'
|
||||
|
||||
@@ -18,7 +18,7 @@ const emitter = new EventEmitter2({
|
||||
export async function emitAsyncSafe(name: string, ...args: any[]): Promise<any> {
|
||||
try {
|
||||
return await emitter.emitAsync(name, ...args);
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
logger.warn(`An error was thrown while executing hook "${name}"`);
|
||||
logger.warn(err);
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ import fs from 'fs';
|
||||
import { clone, toNumber, toString } from 'lodash';
|
||||
import path from 'path';
|
||||
import { requireYAML } from './utils/require-yaml';
|
||||
import { toArray } from './utils/to-array';
|
||||
import { toArray } from '@directus/shared/utils';
|
||||
|
||||
const acceptedEnvTypes = ['string', 'number', 'regex', 'array'];
|
||||
|
||||
@@ -16,9 +16,11 @@ const defaults: Record<string, any> = {
|
||||
CONFIG_PATH: path.resolve(process.cwd(), '.env'),
|
||||
|
||||
PORT: 8055,
|
||||
PUBLIC_URL: 'http://localhost:8055',
|
||||
PUBLIC_URL: '/',
|
||||
MAX_PAYLOAD_SIZE: '100kb',
|
||||
|
||||
DB_EXCLUDE_TABLES: 'spatial_ref_sys',
|
||||
|
||||
STORAGE_LOCATIONS: 'local',
|
||||
STORAGE_LOCAL_DRIVER: 'local',
|
||||
STORAGE_LOCAL_ROOT: './uploads',
|
||||
@@ -34,6 +36,7 @@ const defaults: Record<string, any> = {
|
||||
REFRESH_TOKEN_TTL: '7d',
|
||||
REFRESH_TOKEN_COOKIE_SECURE: false,
|
||||
REFRESH_TOKEN_COOKIE_SAME_SITE: 'lax',
|
||||
REFRESH_TOKEN_COOKIE_NAME: 'directus_refresh_token',
|
||||
|
||||
ROOT_REDIRECT: './admin',
|
||||
|
||||
@@ -64,9 +67,12 @@ const defaults: Record<string, any> = {
|
||||
|
||||
TELEMETRY: true,
|
||||
|
||||
ASSETS_CACHE_TTL: '30m',
|
||||
ASSETS_CACHE_TTL: '30d',
|
||||
ASSETS_TRANSFORM_MAX_CONCURRENT: 1,
|
||||
ASSETS_TRANSFORM_IMAGE_MAX_DIMENSION: 6000,
|
||||
ASSETS_TRANSFORM_MAX_OPERATIONS: 5,
|
||||
|
||||
SERVE_APP: true,
|
||||
};
|
||||
|
||||
// Allows us to force certain environment variable into a type, instead of relying
|
||||
@@ -170,6 +176,8 @@ function getEnvironmentValueByType(envVariableString: string) {
|
||||
return new RegExp(envVariableValue);
|
||||
case 'string':
|
||||
return envVariableValue;
|
||||
case 'json':
|
||||
return tryJSON(envVariableValue);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -181,14 +189,14 @@ function processValues(env: Record<string, any>) {
|
||||
// and store it in the variable with the same name but without '_FILE' at the end
|
||||
let newKey;
|
||||
if (key.length > 5 && key.endsWith('_FILE')) {
|
||||
newKey = key.slice(0, -5);
|
||||
if (newKey in env) {
|
||||
throw new Error(
|
||||
`Duplicate environment variable encountered: you can't use "${newKey}" and "${key}" simultaneously.`
|
||||
);
|
||||
}
|
||||
try {
|
||||
value = fs.readFileSync(value, { encoding: 'utf8' });
|
||||
newKey = key.slice(0, -5);
|
||||
if (newKey in env) {
|
||||
throw new Error(
|
||||
`Duplicate environment variable encountered: you can't use "${key}" and "${newKey}" simultaneously.`
|
||||
);
|
||||
}
|
||||
key = newKey;
|
||||
} catch {
|
||||
throw new Error(`Failed to read value from file "${value}", defined in environment variable "${key}".`);
|
||||
@@ -214,6 +222,9 @@ function processValues(env: Record<string, any>) {
|
||||
case 'array':
|
||||
env[key] = toArray(value);
|
||||
break;
|
||||
case 'json':
|
||||
env[key] = tryJSON(value);
|
||||
break;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
@@ -247,6 +258,14 @@ function processValues(env: Record<string, any>) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (String(value).includes(',')) {
|
||||
env[key] = toArray(value);
|
||||
}
|
||||
|
||||
// Try converting the value to a JS object. This allows JSON objects to be passed for nested
|
||||
// config flags, or custom param names (that aren't camelCased)
|
||||
env[key] = tryJSON(value);
|
||||
|
||||
// If '_FILE' variable hasn't been processed yet, store it as it is (string)
|
||||
if (newKey) {
|
||||
env[key] = value;
|
||||
@@ -255,3 +274,11 @@ function processValues(env: Record<string, any>) {
|
||||
|
||||
return env;
|
||||
}
|
||||
|
||||
function tryJSON(value: any) {
|
||||
try {
|
||||
return JSON.parse(value);
|
||||
} catch {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { BaseException } from '../base';
|
||||
import { BaseException } from '@directus/shared/exceptions';
|
||||
|
||||
type Exceptions = {
|
||||
collection: string;
|
||||
|
||||
@@ -46,7 +46,7 @@ async function uniqueViolation(error: MSSQLError) {
|
||||
* information_schema when this happens
|
||||
*/
|
||||
|
||||
const betweenQuotes = /'([^']+)'/;
|
||||
const betweenQuotes = /'([^']+)'/g;
|
||||
const betweenParens = /\(([^)]+)\)/g;
|
||||
|
||||
const quoteMatches = error.message.match(betweenQuotes);
|
||||
@@ -54,21 +54,35 @@ async function uniqueViolation(error: MSSQLError) {
|
||||
|
||||
if (!quoteMatches || !parenMatches) return error;
|
||||
|
||||
const keyName = quoteMatches[1];
|
||||
const keyName = quoteMatches[1]?.slice(1, -1);
|
||||
|
||||
const database = getDatabase();
|
||||
let collection = quoteMatches[0]?.slice(1, -1);
|
||||
let field: string | null = null;
|
||||
|
||||
const constraintUsage = await database
|
||||
.select('*')
|
||||
.from('INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE')
|
||||
.where({
|
||||
CONSTRAINT_NAME: keyName,
|
||||
})
|
||||
.first();
|
||||
if (keyName) {
|
||||
const database = getDatabase();
|
||||
|
||||
const collection = constraintUsage.TABLE_NAME;
|
||||
const field = constraintUsage.COLUMN_NAME;
|
||||
const invalid = parenMatches[parenMatches.length - 1].slice(1, -1);
|
||||
const constraintUsage = await database
|
||||
.select('sys.columns.name as field', database.raw('OBJECT_NAME(??) as collection', ['sys.columns.object_id']))
|
||||
.from('sys.indexes')
|
||||
.innerJoin('sys.index_columns', (join) => {
|
||||
join
|
||||
.on('sys.indexes.object_id', '=', 'sys.index_columns.object_id')
|
||||
.andOn('sys.indexes.index_id', '=', 'sys.index_columns.index_id');
|
||||
})
|
||||
.innerJoin('sys.columns', (join) => {
|
||||
join
|
||||
.on('sys.index_columns.object_id', '=', 'sys.columns.object_id')
|
||||
.andOn('sys.index_columns.column_id', '=', 'sys.columns.column_id');
|
||||
})
|
||||
.where('sys.indexes.name', '=', keyName)
|
||||
.first();
|
||||
|
||||
collection = constraintUsage?.collection;
|
||||
field = constraintUsage?.field;
|
||||
}
|
||||
|
||||
const invalid = parenMatches[parenMatches.length - 1]?.slice(1, -1);
|
||||
|
||||
return new RecordNotUniqueException(field, {
|
||||
collection,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { BaseException } from '../base';
|
||||
import { BaseException } from '@directus/shared/exceptions';
|
||||
|
||||
type Extensions = {
|
||||
collection: string;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { BaseException } from '../base';
|
||||
import { BaseException } from '@directus/shared/exceptions';
|
||||
|
||||
type Exceptions = {
|
||||
collection: string;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { BaseException } from '../base';
|
||||
import { BaseException } from '@directus/shared/exceptions';
|
||||
|
||||
type Extensions = {
|
||||
collection: string;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user