mirror of
https://github.com/directus/directus.git
synced 2026-01-23 12:48:10 -05:00
Merge branch 'spec-updates' into main
This commit is contained in:
@@ -11,3 +11,11 @@ trim_trailing_whitespace = true
|
||||
[{package.json,*.yml,*.yaml}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[Dockerfile]
|
||||
indent_size = 2
|
||||
indent_style = tab
|
||||
|
||||
[Makefile]
|
||||
indent_size = 2
|
||||
indent_style = tab
|
||||
|
||||
25
.github/actions/Makefile
vendored
Normal file
25
.github/actions/Makefile
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
SHELL=bash
|
||||
|
||||
version=v9.0.0-beta.1
|
||||
tag=$(version)
|
||||
cmd=
|
||||
user=directus
|
||||
registry=ghcr.io
|
||||
repository=directus/next
|
||||
|
||||
.PHONY: build
|
||||
|
||||
build-images:
|
||||
docker build \
|
||||
--build-arg VERSION=$(version) \
|
||||
--build-arg REPOSITORY=$(repository) \
|
||||
-t directus:temp \
|
||||
-f ./build-images/rootfs/directus/images/main/Dockerfile \
|
||||
./build-images/rootfs/directus/images/main
|
||||
|
||||
docker tag directus:temp $(registry)/$(repository):$(version)
|
||||
docker tag directus:temp $(registry)/$(repository):$(tag)
|
||||
docker image rm directus:temp
|
||||
|
||||
test-image:
|
||||
docker run --rm -it $(registry)/$(repository):$(tag) $(cmd)
|
||||
13
.github/actions/build-images/.editorconfig
vendored
Normal file
13
.github/actions/build-images/.editorconfig
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
indent_size = 2
|
||||
indent_style = space
|
||||
insert_final_newline = true
|
||||
tab_width = 2
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[Makefile]
|
||||
indent_style = tab
|
||||
15
.github/actions/build-images/Dockerfile
vendored
Normal file
15
.github/actions/build-images/Dockerfile
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
FROM docker:stable
|
||||
|
||||
RUN \
|
||||
apk update && \
|
||||
apk upgrade && \
|
||||
apk add bash
|
||||
|
||||
COPY ./rootfs/ /
|
||||
|
||||
RUN \
|
||||
chmod +x /usr/bin/lib/argsf && \
|
||||
chmod +x /usr/bin/entrypoint && \
|
||||
chmod +x /usr/bin/semver
|
||||
|
||||
ENTRYPOINT ["entrypoint"]
|
||||
41
.github/actions/build-images/action.yml
vendored
Normal file
41
.github/actions/build-images/action.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
name: "Build and publish Directus images"
|
||||
description: "GitHub Action to publish Directus container images."
|
||||
branding:
|
||||
icon: archive
|
||||
color: gray-dark
|
||||
inputs:
|
||||
repository:
|
||||
description: "Repository name"
|
||||
required: true
|
||||
registry:
|
||||
description: "Registry"
|
||||
required: true
|
||||
username:
|
||||
description: "Registry user"
|
||||
required: true
|
||||
password:
|
||||
description: "Registry password"
|
||||
required: true
|
||||
version:
|
||||
description: "Version"
|
||||
required: true
|
||||
push:
|
||||
description: "Push"
|
||||
required: false
|
||||
default: "false"
|
||||
runs:
|
||||
using: "docker"
|
||||
image: "Dockerfile"
|
||||
args:
|
||||
- --registry
|
||||
- ${{ inputs.registry }}
|
||||
- --repository
|
||||
- ${{ inputs.repository }}
|
||||
- --username
|
||||
- ${{ inputs.username }}
|
||||
- --password
|
||||
- ${{ inputs.password }}
|
||||
- --version
|
||||
- ${{ inputs.version }}
|
||||
- --push
|
||||
- ${{ inputs.push }}
|
||||
2
.github/actions/build-images/rootfs/directus/images/main/.dockerignore
vendored
Normal file
2
.github/actions/build-images/rootfs/directus/images/main/.dockerignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
.dockerignore
|
||||
Dockerfile
|
||||
13
.github/actions/build-images/rootfs/directus/images/main/.editorconfig
vendored
Normal file
13
.github/actions/build-images/rootfs/directus/images/main/.editorconfig
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
indent_size = 2
|
||||
indent_style = space
|
||||
insert_final_newline = true
|
||||
tab_width = 2
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[Makefile]
|
||||
indent_style = tab
|
||||
94
.github/actions/build-images/rootfs/directus/images/main/Dockerfile
vendored
Normal file
94
.github/actions/build-images/rootfs/directus/images/main/Dockerfile
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
#
|
||||
# Builder
|
||||
#
|
||||
|
||||
FROM node:14-alpine AS builder
|
||||
|
||||
ARG VERSION
|
||||
|
||||
RUN \
|
||||
apk update && \
|
||||
apk upgrade && \
|
||||
apk add jq
|
||||
|
||||
WORKDIR /directus
|
||||
|
||||
COPY package.json .
|
||||
RUN \
|
||||
jq ".dependencies.directus = \"^${VERSION}\"" package.json > updated.json && \
|
||||
mv updated.json package.json
|
||||
|
||||
RUN cat package.json
|
||||
|
||||
#
|
||||
# Image
|
||||
#
|
||||
FROM node:14-alpine
|
||||
|
||||
ARG VERSION
|
||||
ARG REPOSITORY=directus/directus
|
||||
|
||||
LABEL directus.version="${VERSION}"
|
||||
LABEL org.opencontainers.image.source https://github.com/${REPOSITORY}
|
||||
|
||||
ENV \
|
||||
PORT="8055" \
|
||||
PUBLIC_URL="/" \
|
||||
DB_CLIENT="sqlite3" \
|
||||
DB_FILENAME="/directus/database/database.sqlite" \
|
||||
RATE_LIMITER_ENABLED="false" \
|
||||
RATE_LIMITER_STORE="memory" \
|
||||
RATE_LIMITER_POINTS="25" \
|
||||
RATE_LIMITER_DURATION="1" \
|
||||
CACHE_ENABLED="false" \
|
||||
STORAGE_LOCATIONS="local" \
|
||||
STORAGE_LOCAL_PUBLIC_URL="/uploads" \
|
||||
STORAGE_LOCAL_DRIVER="local" \
|
||||
STORAGE_LOCAL_ROOT="/directus/uploads" \
|
||||
ACCESS_TOKEN_TTL="15m" \
|
||||
REFRESH_TOKEN_TTL="7d" \
|
||||
REFRESH_TOKEN_COOKIE_SECURE="false" \
|
||||
REFRESH_TOKEN_COOKIE_SAME_SITE="lax" \
|
||||
OAUTH_PROVIDERS="" \
|
||||
EXTENSIONS_PATH="/directus/extensions" \
|
||||
EMAIL_FROM="no-reply@directus.io" \
|
||||
EMAIL_TRANSPORT="sendmail" \
|
||||
EMAIL_SENDMAIL_NEW_LINE="unix" \
|
||||
EMAIL_SENDMAIL_PATH="/usr/sbin/sendmail"
|
||||
|
||||
RUN \
|
||||
apk update && \
|
||||
apk upgrade && \
|
||||
apk add bash ssmtp util-linux
|
||||
|
||||
SHELL ["/bin/bash", "-c"]
|
||||
|
||||
WORKDIR /directus
|
||||
|
||||
# Global requirements
|
||||
RUN npm install -g yargs pino pino-colada
|
||||
|
||||
# Install Directus
|
||||
COPY --from=builder /directus/package.json .
|
||||
RUN npm install
|
||||
|
||||
# Copy files
|
||||
COPY ./rootfs /
|
||||
RUN chmod +x /usr/bin/entrypoint && chmod +x /usr/bin/print
|
||||
|
||||
# Create directories
|
||||
RUN \
|
||||
mkdir -p extensions/displays && \
|
||||
mkdir -p extensions/interfaces && \
|
||||
mkdir -p extensions/layouts && \
|
||||
mkdir -p extensions/modules && \
|
||||
mkdir -p database && \
|
||||
mkdir -p uploads
|
||||
|
||||
EXPOSE 8055
|
||||
VOLUME \
|
||||
/directus/database \
|
||||
/directus/extensions \
|
||||
/directus/uploads
|
||||
|
||||
ENTRYPOINT ["entrypoint"]
|
||||
22
.github/actions/build-images/rootfs/directus/images/main/package.json
vendored
Normal file
22
.github/actions/build-images/rootfs/directus/images/main/package.json
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"name": "directus-project",
|
||||
"version": "1.0.0",
|
||||
"description": "Directus Project",
|
||||
"main": "index.js",
|
||||
"scripts": {},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@keyv/redis": "^2.1.2",
|
||||
"directus": "^9.0.0-beta.1",
|
||||
"ioredis": "^4.17.3",
|
||||
"memcached": "^2.2.2",
|
||||
"mssql": "^6.2.2",
|
||||
"mysql": "^2.18.1",
|
||||
"oracledb": "^5.0.0",
|
||||
"pg": "^8.3.3",
|
||||
"sqlite3": "^5.0.0",
|
||||
"yargs": "^16.0.3"
|
||||
}
|
||||
}
|
||||
110
.github/actions/build-images/rootfs/directus/images/main/rootfs/usr/bin/entrypoint
vendored
Normal file
110
.github/actions/build-images/rootfs/directus/images/main/rootfs/usr/bin/entrypoint
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
function seed() {
|
||||
# TODO: move users to a separate check, outside database installation
|
||||
local show=false
|
||||
local email=${DIRECTUS_ADMIN_EMAIL:-"admin@example.com"}
|
||||
local password=${DIRECTUS_ADMIN_PASSWORD:-""}
|
||||
|
||||
if [ "${password}" == "" ] ; then
|
||||
password=$(node -e 'console.log(require("nanoid").nanoid(12))')
|
||||
show=true
|
||||
fi
|
||||
|
||||
print --level=info "Creating administrator role"
|
||||
local role=$(npx directus roles create --name Administrator --admin)
|
||||
|
||||
print --level=info "Creating administrator user"
|
||||
local user=$(npx directus users create --email "${email}" --password "${password}" --role "${role}")
|
||||
|
||||
if [ "${show}" == "true" ] ; then
|
||||
print --level=info --stdin <<MSG
|
||||
>
|
||||
> Email: $email
|
||||
> Password: $password
|
||||
>
|
||||
MSG
|
||||
else
|
||||
print --level=info --stdin <<MSG
|
||||
>
|
||||
> Email: $email
|
||||
> Password: <env>
|
||||
>
|
||||
MSG
|
||||
fi
|
||||
}
|
||||
|
||||
function bootstrap() {
|
||||
local warn=false
|
||||
|
||||
if [ "${KEY}" == "" ] ; then
|
||||
export KEY=$(uuidgen)
|
||||
warn=true
|
||||
fi
|
||||
|
||||
if [ "${SECRET}" == "" ] ; then
|
||||
export SECRET=$(node -e 'console.log(require("nanoid").nanoid(32))')
|
||||
warn=true
|
||||
fi
|
||||
|
||||
if [ "${warn}" == "true" ] ; then
|
||||
print --level=warn --stdin <<WARN
|
||||
>
|
||||
> WARNING!
|
||||
>
|
||||
> The KEY and SECRET environment variables are not set.
|
||||
> Some temporar
|
||||
y variables were generated to fill the gap,
|
||||
> but in production this is going to cause problems.
|
||||
>
|
||||
> Please refer to the docs at https://docs.directus.io/
|
||||
> on how and why to configure them properly
|
||||
>
|
||||
WARN
|
||||
fi
|
||||
|
||||
# Install database if using sqlite and file doesn't exist
|
||||
if [ "${DB_CLIENT}" == "sqlite3" ] ; then
|
||||
if [ "${DB_FILENAME}" == "" ] ; then
|
||||
print --level=error "Missing DB_FILENAME environment variable"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f "${DB_FILENAME}" ] ; then
|
||||
mkdir -p $(dirname ${DB_FILENAME})
|
||||
fi
|
||||
fi
|
||||
|
||||
should_seed=false
|
||||
|
||||
set +e
|
||||
npx directus database install &>/dev/null
|
||||
if [ "$?" == "0" ] ; then
|
||||
print --level=info "Database installed"
|
||||
should_seed=true
|
||||
fi
|
||||
set -e
|
||||
|
||||
if [ "${should_seed}" == "true" ] ; then
|
||||
seed
|
||||
fi
|
||||
}
|
||||
|
||||
command=""
|
||||
if [ $# -eq 0 ] ; then
|
||||
command="start"
|
||||
elif [ "${1}" == "bash" ] || [ "${1}" == "shell" ] ; then
|
||||
shift
|
||||
exec bash $@
|
||||
elif [ "${1}" == "command" ] ; then
|
||||
shift
|
||||
exec $@
|
||||
else
|
||||
command="${1}"
|
||||
shift
|
||||
fi
|
||||
|
||||
bootstrap
|
||||
exec npx directus "${command}" $@
|
||||
48
.github/actions/build-images/rootfs/directus/images/main/rootfs/usr/bin/print
vendored
Normal file
48
.github/actions/build-images/rootfs/directus/images/main/rootfs/usr/bin/print
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
// Workarounds?
|
||||
process.env.NODE_PATH = "/usr/local/lib/node_modules";
|
||||
require("module").Module._initPaths();
|
||||
|
||||
/**
|
||||
* Read lines from stdin
|
||||
*/
|
||||
async function readlines() {
|
||||
const chunks = [];
|
||||
for await (const chunk of process.stdin) {
|
||||
chunks.push(chunk);
|
||||
}
|
||||
|
||||
const lines = chunks.join("").split("\n");
|
||||
lines.pop();
|
||||
return lines;
|
||||
}
|
||||
|
||||
(async function () {
|
||||
// Logger
|
||||
const yargs = require("yargs");
|
||||
const logger = require("pino")({
|
||||
prettyPrint: process.env.LOG_STYLE !== "raw",
|
||||
prettifier: require("pino-colada"),
|
||||
level: process.env.LOG_LEVEL || "info",
|
||||
});
|
||||
|
||||
function write(...message) {
|
||||
if (level in logger) {
|
||||
logger[level](...message);
|
||||
} else {
|
||||
logger.info(...message);
|
||||
}
|
||||
}
|
||||
|
||||
const args = yargs.argv;
|
||||
const level = args.level || "info";
|
||||
const stdin = args.stdin || false;
|
||||
|
||||
if (stdin) {
|
||||
const lines = await readlines();
|
||||
lines.forEach((line) => write(line));
|
||||
} else {
|
||||
write(...args._);
|
||||
}
|
||||
})();
|
||||
138
.github/actions/build-images/rootfs/usr/bin/entrypoint
vendored
Normal file
138
.github/actions/build-images/rootfs/usr/bin/entrypoint
vendored
Normal file
@@ -0,0 +1,138 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
root=$(dirname ${0})
|
||||
source ${root}/lib/argsf
|
||||
|
||||
#
|
||||
# Makes a set of tags
|
||||
#
|
||||
function make_tags() {
|
||||
local prefix=""
|
||||
local version=${1}
|
||||
|
||||
semver get major ${version} > /dev/null 2>&1
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "${version}"
|
||||
else
|
||||
if [ "${version:0:1}" == "v" ]; then
|
||||
prefix="v"
|
||||
fi
|
||||
|
||||
major="$(semver get major ${version})"
|
||||
minor="${major}.$(semver get minor ${version})"
|
||||
patch="${minor}.$(semver get patch ${version})"
|
||||
|
||||
prerel="$(semver get prerel ${version})"
|
||||
if [ "${prerel}" == "" ]; then
|
||||
is_prerel=false
|
||||
else
|
||||
is_prerel=true
|
||||
fi
|
||||
|
||||
build="$(semver get build ${version})"
|
||||
if [ "${build}" == "" ]; then
|
||||
is_build=false
|
||||
else
|
||||
is_build=true
|
||||
fi
|
||||
|
||||
if [ "${is_prerel}" == "true" ]; then
|
||||
echo "${prefix}${major}-${prerel}"
|
||||
echo "${prefix}${minor}-${prerel}"
|
||||
echo "${prefix}${patch}-${prerel}"
|
||||
if [ "${is_build}" == "true" ]; then
|
||||
echo "${prefix}${major}-${prerel}-${build}"
|
||||
fi
|
||||
else
|
||||
echo "${prefix}${major}"
|
||||
echo "${prefix}${minor}"
|
||||
echo "${prefix}${patch}"
|
||||
if [ "${is_build}" == "true" ]; then
|
||||
echo "${prefix}${patch}-${build}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
#
|
||||
# Build script
|
||||
#
|
||||
function main() {
|
||||
username=$(argument username)
|
||||
password=$(argument password)
|
||||
|
||||
push=$(argument push "false")
|
||||
latest=$(argument latest "false")
|
||||
|
||||
registry=$(argument registry "")
|
||||
registry=$(echo "${registry}" | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
repository=$(argument repository "directus/next")
|
||||
repository=$(echo "${repository}" | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
version=$(argument version "")
|
||||
context=$(argument context ".")
|
||||
|
||||
image="${repository}"
|
||||
if [ "${registry}" != "" ]; then
|
||||
image="${registry}/${image}"
|
||||
fi
|
||||
|
||||
# Normalize tag
|
||||
if [ "${version}" == "" ]; then
|
||||
version=${GITHUB_REF##*/}
|
||||
else
|
||||
version=${version##*/}
|
||||
fi
|
||||
|
||||
if [ "${version}" == "" ]; then
|
||||
version=$(echo ${GITHUB_SHA:-"000000000000"} | cut -c1-12)
|
||||
fi
|
||||
|
||||
tags=$(make_tags ${version})
|
||||
echo "Tags = ${tags}"
|
||||
|
||||
# build image
|
||||
docker build \
|
||||
-t directus:main \
|
||||
--build-arg VERSION=${version} \
|
||||
--build-arg REPOSITORY=${repository} \
|
||||
/directus/images/main
|
||||
|
||||
# login into registry
|
||||
docker login -u "${username}" -p "${password}" "${registry}"
|
||||
|
||||
# Push latest
|
||||
# TODO: check if it's really the latest
|
||||
if [ "${latest}" == "true" ]; then
|
||||
fqin="${image}:latest"
|
||||
echo "Tagging ${fqin}"
|
||||
docker tag directus:main ${fqin}
|
||||
if [ "${push}" == "true" ]; then
|
||||
echo "Pushing tag ${fqin}"
|
||||
docker push "${fqin}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Push tags
|
||||
for tag in $tags
|
||||
do
|
||||
tag=$(echo "${tag}" | tr '[:upper:]' '[:lower:]')
|
||||
fqin="${image}:${tag}"
|
||||
echo "Tagging ${fqin}"
|
||||
docker tag directus:main "${fqin}"
|
||||
if [ "${push}" == "true" ]; then
|
||||
echo "Pushing tag ${fqin}"
|
||||
docker push "${fqin}"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Finished."
|
||||
|
||||
exit $?
|
||||
}
|
||||
|
||||
main
|
||||
exit $?
|
||||
98
.github/actions/build-images/rootfs/usr/bin/lib/argsf
vendored
Normal file
98
.github/actions/build-images/rootfs/usr/bin/lib/argsf
vendored
Normal file
@@ -0,0 +1,98 @@
|
||||
#
|
||||
# Arguments and Flags (argsf)
|
||||
# This is meant to work with bash shell
|
||||
# To use, source this file into your bash scripts
|
||||
#
|
||||
# Implemented by João Biondo <wolfulus@gmail.com>
|
||||
# https://github.com/WoLfulus/argsf
|
||||
#
|
||||
|
||||
declare _ARGCOUNT=$#
|
||||
declare _ARGDATA=("$@")
|
||||
declare -A _ARGMAP
|
||||
declare -A _FLAGMAP
|
||||
|
||||
for ((_arg_index_key=1;_arg_index_key<=$#;_arg_index_key++))
|
||||
do
|
||||
_arg_index_value=$(expr $_arg_index_key + 1)
|
||||
_arg_key=${!_arg_index_key}
|
||||
_arg_value=${!_arg_index_value}
|
||||
if [[ $_arg_key == *"--"* ]]; then
|
||||
if [[ $_arg_key == *" "* ]]; then
|
||||
continue
|
||||
fi
|
||||
_arg_name="${_arg_key:2}"
|
||||
_FLAGMAP[${_arg_name}]=1
|
||||
if [[ $_arg_value != *"--"* ]] || [[ $_arg_value == *" "* ]] ; then
|
||||
_ARGMAP[${_arg_name}]="$_arg_value"
|
||||
else
|
||||
_ARGMAP[${_arg_name}]=""
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
function _argument() {
|
||||
if test "${_ARGMAP[${ARG_NAME}]+isset}" ; then
|
||||
echo ${_ARGMAP[${ARG_NAME}]}
|
||||
else
|
||||
if [ ${ARG_DEFAULT} -eq 0 ]; then
|
||||
echo "Error: required argument '--${ARG_NAME}' not specified" 1>&2
|
||||
exit 1
|
||||
else
|
||||
echo ${ARG_DEFAULT_VALUE}
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function argument() {
|
||||
if [ $# -eq 1 ]; then
|
||||
ARG_NAME="$1" ARG_DEFAULT=0 ARG_DEFAULT_VALUE= _argument "${_ARGUMENT_DATA}"
|
||||
elif [ $# -eq 2 ]; then
|
||||
ARG_NAME="$1" ARG_DEFAULT=1 ARG_DEFAULT_VALUE="$2" _argument "${_ARGUMENT_DATA}"
|
||||
else
|
||||
echo "argument: invalid number of arguments" 1>&2
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
function flage() {
|
||||
if [ $# -eq 1 ]; then
|
||||
if [[ ${_FLAGMAP[$1]} ]] ; then
|
||||
echo "true"
|
||||
return 0
|
||||
elif [[ ${_FLAGMAP[no-$1]} ]] ; then
|
||||
echo "false"
|
||||
return 0
|
||||
else
|
||||
echo "true"
|
||||
return 0
|
||||
fi
|
||||
else
|
||||
echo "flag: invalid number of arguments" 1>&2
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function flagd() {
|
||||
if [ $# -eq 1 ]; then
|
||||
if [[ ${_FLAGMAP[$1]} ]] ; then
|
||||
echo "true"
|
||||
return 0
|
||||
elif [[ ${_FLAGMAP[no-$1]} ]] ; then
|
||||
echo "false"
|
||||
return 0
|
||||
else
|
||||
echo "false"
|
||||
return 0
|
||||
fi
|
||||
else
|
||||
echo "flag: invalid number of arguments" 1>&2
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function flag() {
|
||||
flagd $1
|
||||
return $?
|
||||
}
|
||||
284
.github/actions/build-images/rootfs/usr/bin/semver
vendored
Normal file
284
.github/actions/build-images/rootfs/usr/bin/semver
vendored
Normal file
@@ -0,0 +1,284 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
#
|
||||
# Copyright (c) 2014-2015 François Saint-Jacques <fsaintjacques@gmail.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it under
|
||||
# the terms of the GNU General Public License as published by the Free Software
|
||||
# Foundation; either version 3, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
# PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
NAT='0|[1-9][0-9]*'
|
||||
ALPHANUM='[0-9]*[A-Za-z-][0-9A-Za-z-]*'
|
||||
IDENT="$NAT|$ALPHANUM"
|
||||
FIELD='[0-9A-Za-z-]+'
|
||||
|
||||
SEMVER_REGEX="\
|
||||
^[vV]?\
|
||||
($NAT)\\.($NAT)\\.($NAT)\
|
||||
(\\-(${IDENT})(\\.(${IDENT}))*)?\
|
||||
(\\+${FIELD}(\\.${FIELD})*)?$"
|
||||
|
||||
PROG=semver
|
||||
PROG_VERSION="3.0.0"
|
||||
|
||||
USAGE="\
|
||||
Usage:
|
||||
$PROG bump (major|minor|patch|release|prerel <prerel>|build <build>) <version>
|
||||
$PROG compare <version> <other_version>
|
||||
$PROG get (major|minor|patch|release|prerel|build) <version>
|
||||
$PROG --help
|
||||
$PROG --version
|
||||
Arguments:
|
||||
<version> A version must match the following regular expression:
|
||||
\"${SEMVER_REGEX}\"
|
||||
In English:
|
||||
-- The version must match X.Y.Z[-PRERELEASE][+BUILD]
|
||||
where X, Y and Z are non-negative integers.
|
||||
-- PRERELEASE is a dot separated sequence of non-negative integers and/or
|
||||
identifiers composed of alphanumeric characters and hyphens (with
|
||||
at least one non-digit). Numeric identifiers must not have leading
|
||||
zeros. A hyphen (\"-\") introduces this optional part.
|
||||
-- BUILD is a dot separated sequence of identifiers composed of alphanumeric
|
||||
characters and hyphens. A plus (\"+\") introduces this optional part.
|
||||
<other_version> See <version> definition.
|
||||
<prerel> A string as defined by PRERELEASE above.
|
||||
<build> A string as defined by BUILD above.
|
||||
Options:
|
||||
-v, --version Print the version of this tool.
|
||||
-h, --help Print this help message.
|
||||
Commands:
|
||||
bump Bump by one of major, minor, patch; zeroing or removing
|
||||
subsequent parts. \"bump prerel\" sets the PRERELEASE part and
|
||||
removes any BUILD part. \"bump build\" sets the BUILD part.
|
||||
\"bump release\" removes any PRERELEASE or BUILD parts.
|
||||
The bumped version is written to stdout.
|
||||
compare Compare <version> with <other_version>, output to stdout the
|
||||
following values: -1 if <other_version> is newer, 0 if equal, 1 if
|
||||
older. The BUILD part is not used in comparisons.
|
||||
get Extract given part of <version>, where part is one of major, minor,
|
||||
patch, prerel, build, or release.
|
||||
See also:
|
||||
https://semver.org -- Semantic Versioning 2.0.0"
|
||||
|
||||
function error {
|
||||
echo -e "$1" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
function usage-help {
|
||||
error "$USAGE"
|
||||
}
|
||||
|
||||
function usage-version {
|
||||
echo -e "${PROG}: $PROG_VERSION"
|
||||
exit 0
|
||||
}
|
||||
|
||||
function validate-version {
|
||||
local version=$1
|
||||
if [[ "$version" =~ $SEMVER_REGEX ]]; then
|
||||
# if a second argument is passed, store the result in var named by $2
|
||||
if [ "$#" -eq "2" ]; then
|
||||
local major=${BASH_REMATCH[1]}
|
||||
local minor=${BASH_REMATCH[2]}
|
||||
local patch=${BASH_REMATCH[3]}
|
||||
local prere=${BASH_REMATCH[4]}
|
||||
local build=${BASH_REMATCH[8]}
|
||||
eval "$2=(\"$major\" \"$minor\" \"$patch\" \"$prere\" \"$build\")"
|
||||
else
|
||||
echo "$version"
|
||||
fi
|
||||
else
|
||||
error "version $version does not match the semver scheme 'X.Y.Z(-PRERELEASE)(+BUILD)'. See help for more information."
|
||||
fi
|
||||
}
|
||||
|
||||
function is-nat {
|
||||
[[ "$1" =~ ^($NAT)$ ]]
|
||||
}
|
||||
|
||||
function is-null {
|
||||
[ -z "$1" ]
|
||||
}
|
||||
|
||||
function order-nat {
|
||||
[ "$1" -lt "$2" ] && { echo -1 ; return ; }
|
||||
[ "$1" -gt "$2" ] && { echo 1 ; return ; }
|
||||
echo 0
|
||||
}
|
||||
|
||||
function order-string {
|
||||
[[ $1 < $2 ]] && { echo -1 ; return ; }
|
||||
[[ $1 > $2 ]] && { echo 1 ; return ; }
|
||||
echo 0
|
||||
}
|
||||
|
||||
# given two (named) arrays containing NAT and/or ALPHANUM fields, compare them
|
||||
# one by one according to semver 2.0.0 spec. Return -1, 0, 1 if left array ($1)
|
||||
# is less-than, equal, or greater-than the right array ($2). The longer array
|
||||
# is considered greater-than the shorter if the shorter is a prefix of the longer.
|
||||
#
|
||||
function compare-fields {
|
||||
local l="$1[@]"
|
||||
local r="$2[@]"
|
||||
local leftfield=( "${!l}" )
|
||||
local rightfield=( "${!r}" )
|
||||
local left
|
||||
local right
|
||||
|
||||
local i=$(( -1 ))
|
||||
local order=$(( 0 ))
|
||||
|
||||
while true
|
||||
do
|
||||
[ $order -ne 0 ] && { echo $order ; return ; }
|
||||
|
||||
: $(( i++ ))
|
||||
left="${leftfield[$i]}"
|
||||
right="${rightfield[$i]}"
|
||||
|
||||
is-null "$left" && is-null "$right" && { echo 0 ; return ; }
|
||||
is-null "$left" && { echo -1 ; return ; }
|
||||
is-null "$right" && { echo 1 ; return ; }
|
||||
|
||||
is-nat "$left" && is-nat "$right" && { order=$(order-nat "$left" "$right") ; continue ; }
|
||||
is-nat "$left" && { echo -1 ; return ; }
|
||||
is-nat "$right" && { echo 1 ; return ; }
|
||||
{ order=$(order-string "$left" "$right") ; continue ; }
|
||||
done
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2206 # checked by "validate"; ok to expand prerel id's into array
|
||||
function compare-version {
|
||||
local order
|
||||
validate-version "$1" V
|
||||
validate-version "$2" V_
|
||||
|
||||
# compare major, minor, patch
|
||||
|
||||
local left=( "${V[0]}" "${V[1]}" "${V[2]}" )
|
||||
local right=( "${V_[0]}" "${V_[1]}" "${V_[2]}" )
|
||||
|
||||
order=$(compare-fields left right)
|
||||
[ "$order" -ne 0 ] && { echo "$order" ; return ; }
|
||||
|
||||
# compare pre-release ids when M.m.p are equal
|
||||
|
||||
local prerel="${V[3]:1}"
|
||||
local prerel_="${V_[3]:1}"
|
||||
local left=( ${prerel//./ } )
|
||||
local right=( ${prerel_//./ } )
|
||||
|
||||
# if left and right have no pre-release part, then left equals right
|
||||
# if only one of left/right has pre-release part, that one is less than simple M.m.p
|
||||
|
||||
[ -z "$prerel" ] && [ -z "$prerel_" ] && { echo 0 ; return ; }
|
||||
[ -z "$prerel" ] && { echo 1 ; return ; }
|
||||
[ -z "$prerel_" ] && { echo -1 ; return ; }
|
||||
|
||||
# otherwise, compare the pre-release id's
|
||||
|
||||
compare-fields left right
|
||||
}
|
||||
|
||||
function command-bump {
|
||||
local new; local version; local sub_version; local command;
|
||||
|
||||
case $# in
|
||||
2) case $1 in
|
||||
major|minor|patch|release) command=$1; version=$2;;
|
||||
*) usage-help;;
|
||||
esac ;;
|
||||
3) case $1 in
|
||||
prerel|build) command=$1; sub_version=$2 version=$3 ;;
|
||||
*) usage-help;;
|
||||
esac ;;
|
||||
*) usage-help;;
|
||||
esac
|
||||
|
||||
validate-version "$version" parts
|
||||
# shellcheck disable=SC2154
|
||||
local major="${parts[0]}"
|
||||
local minor="${parts[1]}"
|
||||
local patch="${parts[2]}"
|
||||
local prere="${parts[3]}"
|
||||
local build="${parts[4]}"
|
||||
|
||||
case "$command" in
|
||||
major) new="$((major + 1)).0.0";;
|
||||
minor) new="${major}.$((minor + 1)).0";;
|
||||
patch) new="${major}.${minor}.$((patch + 1))";;
|
||||
release) new="${major}.${minor}.${patch}";;
|
||||
prerel) new=$(validate-version "${major}.${minor}.${patch}-${sub_version}");;
|
||||
build) new=$(validate-version "${major}.${minor}.${patch}${prere}+${sub_version}");;
|
||||
*) usage-help ;;
|
||||
esac
|
||||
|
||||
echo "$new"
|
||||
exit 0
|
||||
}
|
||||
|
||||
function command-compare {
|
||||
local v; local v_;
|
||||
|
||||
case $# in
|
||||
2) v=$(validate-version "$1"); v_=$(validate-version "$2") ;;
|
||||
*) usage-help ;;
|
||||
esac
|
||||
|
||||
set +u # need unset array element to evaluate to null
|
||||
compare-version "$v" "$v_"
|
||||
exit 0
|
||||
}
|
||||
|
||||
|
||||
# shellcheck disable=SC2034
|
||||
function command-get {
|
||||
local part version
|
||||
|
||||
if [[ "$#" -ne "2" ]] || [[ -z "$1" ]] || [[ -z "$2" ]]; then
|
||||
usage-help
|
||||
exit 0
|
||||
fi
|
||||
|
||||
part="$1"
|
||||
version="$2"
|
||||
|
||||
validate-version "$version" parts
|
||||
local major="${parts[0]}"
|
||||
local minor="${parts[1]}"
|
||||
local patch="${parts[2]}"
|
||||
local prerel="${parts[3]:1}"
|
||||
local build="${parts[4]:1}"
|
||||
local release="${major}.${minor}.${patch}"
|
||||
|
||||
case "$part" in
|
||||
major|minor|patch|release|prerel|build) echo "${!part}" ;;
|
||||
*) usage-help ;;
|
||||
esac
|
||||
|
||||
exit 0
|
||||
}
|
||||
|
||||
case $# in
|
||||
0) echo "Unknown command: $*"; usage-help;;
|
||||
esac
|
||||
|
||||
case $1 in
|
||||
--help|-h) echo -e "$USAGE"; exit 0;;
|
||||
--version|-v) usage-version ;;
|
||||
bump) shift; command-bump "$@";;
|
||||
get) shift; command-get "$@";;
|
||||
compare) shift; command-compare "$@";;
|
||||
*) echo "Unknown arguments: $*"; usage-help;;
|
||||
esac
|
||||
32
.github/workflows/build-images.yml
vendored
Normal file
32
.github/workflows/build-images.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
name: build-images
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Build GitHub Container Registry
|
||||
uses: ./.github/actions/build-images
|
||||
with:
|
||||
registry: "ghcr.io"
|
||||
repository: "${{ github.repository }}"
|
||||
username: "${{ secrets.REGISTRY_USERNAME }}"
|
||||
password: "${{ secrets.REGISTRY_PASSWORD }}"
|
||||
version: "${{ github.ref }}"
|
||||
push: "true"
|
||||
|
||||
- name: Build Docker Hub
|
||||
uses: ./.github/actions/build-images
|
||||
with:
|
||||
registry: "docker.io"
|
||||
repository: "${{ github.repository }}"
|
||||
username: "${{ secrets.DOCKERHUB_USERNAME }}"
|
||||
password: "${{ secrets.DOCKERHUB_PASSWORD }}"
|
||||
version: "${{ github.ref }}"
|
||||
push: "true"
|
||||
24
.github/workflows/create-release.yml
vendored
Normal file
24
.github/workflows/create-release.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
name: create-release
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
uses: actions/create-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.REPOSITORY_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ github.ref }}
|
||||
release_name: ${{ github.ref }}
|
||||
body: |
|
||||
Directus ${{ github.ref }}
|
||||
draft: false
|
||||
prerelease: false
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -2,6 +2,9 @@
|
||||
node_modules
|
||||
.vs_code
|
||||
.env
|
||||
.secrets
|
||||
npm-debug.log
|
||||
lerna-debug.log
|
||||
.nova
|
||||
*.code-workspace
|
||||
dist
|
||||
|
||||
5
api/cli.js
Executable file
5
api/cli.js
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
try {
|
||||
return require('./dist/cli/index.js');
|
||||
} catch {}
|
||||
@@ -1,8 +1,8 @@
|
||||
####################################################################################################
|
||||
# General
|
||||
|
||||
PORT=41201
|
||||
PUBLIC_URL="http://localhost:41201"
|
||||
PORT=8055
|
||||
PUBLIC_URL="http://localhost:8055"
|
||||
LOG_LEVEL="info"
|
||||
LOG_STYLE="pretty"
|
||||
|
||||
@@ -70,7 +70,7 @@ CACHE_STORE=memory # memory | redis | memcache
|
||||
|
||||
STORAGE_LOCATIONS="local" # CSV of names
|
||||
|
||||
STORAGE_LOCAL_PUBLIC_URL="http://localhost:41201/uploads"
|
||||
STORAGE_LOCAL_PUBLIC_URL="http://localhost:8055/uploads"
|
||||
STORAGE_LOCAL_DRIVER="local"
|
||||
STORAGE_LOCAL_ROOT="./uploads"
|
||||
|
||||
|
||||
4201
api/package-lock.json
generated
4201
api/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "directus",
|
||||
"version": "9.0.0-beta.1",
|
||||
"version": "9.0.0-beta.10",
|
||||
"license": "GPL-3.0-only",
|
||||
"homepage": "https://github.com/directus/next#readme",
|
||||
"description": "Directus is a real-time API and App dashboard for managing SQL database content.",
|
||||
@@ -48,11 +48,11 @@
|
||||
],
|
||||
"main": "dist/app.js",
|
||||
"bin": {
|
||||
"directus": "dist/cli/index.js"
|
||||
"directus": "cli.js"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "cross-env NODE_ENV=production node dist/server.js",
|
||||
"build": "rm -rf dist && tsc -b && copyfiles \"src/**/*.*\" -e \"src/**/*.ts\" -u 1 dist",
|
||||
"build": "rm -rf dist && tsc --build && copyfiles \"src/**/*.*\" -e \"src/**/*.ts\" -u 1 dist",
|
||||
"dev": "cross-env NODE_ENV=development LOG_LEVEL=trace ts-node-dev --files src/server.ts --respawn --watch \"src/**/*.ts\" --transpile-only",
|
||||
"cli": "cross-env NODE_ENV=development ts-node --script-mode --transpile-only src/cli/index.ts",
|
||||
"prepublishOnly": "npm run build"
|
||||
@@ -64,8 +64,9 @@
|
||||
"example.env"
|
||||
],
|
||||
"dependencies": {
|
||||
"@directus/app": "^9.0.0-beta.1",
|
||||
"@directus/app": "file:../app",
|
||||
"@directus/format-title": "^3.2.0",
|
||||
"@directus/specs": "file:../packages/spec",
|
||||
"@slynova/flydrive": "^1.0.2",
|
||||
"@slynova/flydrive-gcs": "^1.0.2",
|
||||
"@slynova/flydrive-s3": "^1.0.2",
|
||||
@@ -79,16 +80,19 @@
|
||||
"commander": "^5.1.0",
|
||||
"cookie-parser": "^1.4.5",
|
||||
"cors": "^2.8.5",
|
||||
"date-fns": "^2.16.1",
|
||||
"dotenv": "^8.2.0",
|
||||
"eventemitter2": "^6.4.3",
|
||||
"execa": "^4.0.3",
|
||||
"exif-reader": "^1.0.3",
|
||||
"express": "^4.17.1",
|
||||
"express-async-handler": "^1.1.4",
|
||||
"express-graphql": "^0.11.0",
|
||||
"express-pino-logger": "^5.0.0",
|
||||
"express-session": "^1.17.1",
|
||||
"fs-extra": "^9.0.1",
|
||||
"grant": "^5.3.0",
|
||||
"graphql": "^15.3.0",
|
||||
"icc": "^2.0.0",
|
||||
"inquirer": "^7.3.3",
|
||||
"joi": "^17.1.1",
|
||||
@@ -97,7 +101,7 @@
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"keyv": "^4.0.1",
|
||||
"knex": "^0.21.4",
|
||||
"knex-schema-inspector": "0.0.12",
|
||||
"knex-schema-inspector": "0.0.20",
|
||||
"liquidjs": "^9.14.1",
|
||||
"lodash": "^4.17.19",
|
||||
"macos-release": "^2.4.1",
|
||||
@@ -123,54 +127,14 @@
|
||||
"mssql": "^6.2.0",
|
||||
"mysql": "^2.18.1",
|
||||
"oracledb": "^5.0.0",
|
||||
"pg": "^8.3.3",
|
||||
"pg": "^8.4.0",
|
||||
"sqlite3": "^5.0.0"
|
||||
},
|
||||
"gitHead": "4476da28dbbc2824e680137aa28b2b91b5afabec",
|
||||
"devDependencies": {
|
||||
"@types/atob": "^2.1.2",
|
||||
"@types/busboy": "^0.2.3",
|
||||
"@types/clear": "^0.1.0",
|
||||
"@types/cookie-parser": "^1.4.2",
|
||||
"@types/cors": "^2.8.7",
|
||||
"@types/express": "^4.17.7",
|
||||
"@types/express-pino-logger": "^4.0.2",
|
||||
"@types/express-session": "^1.17.0",
|
||||
"@types/fs-extra": "^9.0.1",
|
||||
"@types/inquirer": "^6.5.0",
|
||||
"@types/joi": "^14.3.4",
|
||||
"@types/js-yaml": "^3.12.5",
|
||||
"@types/json2csv": "^5.0.1",
|
||||
"@types/jsonwebtoken": "^8.5.0",
|
||||
"@types/keyv": "^3.1.1",
|
||||
"@types/lodash": "^4.14.159",
|
||||
"@types/ms": "^0.7.31",
|
||||
"@types/nodemailer": "^6.4.0",
|
||||
"@types/pino": "^6.3.0",
|
||||
"@types/sharp": "^0.25.1",
|
||||
"@types/uuid": "^8.0.0",
|
||||
"@types/uuid-validate": "0.0.1",
|
||||
"concat-map": "0.0.1",
|
||||
"copyfiles": "^2.3.0",
|
||||
"copyfiles": "^2.4.0",
|
||||
"cross-env": "^7.0.2",
|
||||
"eslint": "^7.6.0",
|
||||
"eslint-plugin-prettier": "^3.1.4",
|
||||
"husky": "^4.2.5",
|
||||
"lint-staged": "^10.2.11",
|
||||
"prettier": "^2.0.5",
|
||||
"ts-node": "^8.10.2",
|
||||
"ts-node-dev": "^1.0.0-pre.56",
|
||||
"tslint": "^6.1.3",
|
||||
"typescript": "^3.9.7"
|
||||
},
|
||||
"husky": {
|
||||
"hooks": {
|
||||
"pre-commit": "npx lint-staged"
|
||||
}
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.{js,ts}": [
|
||||
"prettier --write"
|
||||
]
|
||||
},
|
||||
"gitHead": "4476da28dbbc2824e680137aa28b2b91b5afabec"
|
||||
"ts-node-dev": "^1.0.0-pre.63",
|
||||
"typescript": "^4.0.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,10 +11,10 @@ import { track } from './utils/track';
|
||||
import errorHandler from './middleware/error-handler';
|
||||
import cors from './middleware/cors';
|
||||
import rateLimiter from './middleware/rate-limiter';
|
||||
import { respond } from './middleware/respond';
|
||||
import cache from './middleware/cache';
|
||||
import extractToken from './middleware/extract-token';
|
||||
import authenticate from './middleware/authenticate';
|
||||
import responseHook from './middleware/response-hook';
|
||||
import activityRouter from './controllers/activity';
|
||||
import assetsRouter from './controllers/assets';
|
||||
import authRouter from './controllers/auth';
|
||||
@@ -34,21 +34,41 @@ import settingsRouter from './controllers/settings';
|
||||
import usersRouter from './controllers/users';
|
||||
import utilsRouter from './controllers/utils';
|
||||
import webhooksRouter from './controllers/webhooks';
|
||||
import graphqlRouter from './controllers/graphql';
|
||||
|
||||
import notFoundHandler from './controllers/not-found';
|
||||
import sanitizeQuery from './middleware/sanitize-query';
|
||||
import WebhooksService from './services/webhooks';
|
||||
import { WebhooksService } from './services/webhooks';
|
||||
import { InvalidPayloadException } from './exceptions';
|
||||
|
||||
import { registerExtensions } from './extensions';
|
||||
import emitter from './emitter';
|
||||
|
||||
validateEnv(['KEY', 'SECRET']);
|
||||
|
||||
const app = express();
|
||||
|
||||
const customRouter = express.Router();
|
||||
|
||||
app.disable('x-powered-by');
|
||||
app.set('trust proxy', true);
|
||||
|
||||
app.use(expressLogger({ logger }));
|
||||
app.use(responseHook);
|
||||
|
||||
app.use((req, res, next) => {
|
||||
bodyParser.json()(req, res, (err) => {
|
||||
if (err) {
|
||||
return next(new InvalidPayloadException(err.message));
|
||||
}
|
||||
|
||||
return next();
|
||||
});
|
||||
});
|
||||
|
||||
app.use(bodyParser.json());
|
||||
app.use(extractToken);
|
||||
|
||||
app.use((req, res, next) => {
|
||||
res.setHeader('X-Powered-By', 'Directus');
|
||||
next();
|
||||
@@ -79,6 +99,9 @@ app.use('/auth', authRouter);
|
||||
|
||||
app.use(authenticate);
|
||||
app.use(cache);
|
||||
|
||||
app.use('/graphql', graphqlRouter);
|
||||
|
||||
app.use('/activity', activityRouter);
|
||||
app.use('/assets', assetsRouter);
|
||||
app.use('/collections', collectionsRouter);
|
||||
@@ -97,9 +120,7 @@ app.use('/settings', settingsRouter);
|
||||
app.use('/users', usersRouter);
|
||||
app.use('/utils', utilsRouter);
|
||||
app.use('/webhooks', webhooksRouter);
|
||||
|
||||
app.use(respond);
|
||||
|
||||
app.use('/custom', customRouter);
|
||||
app.use(notFoundHandler);
|
||||
app.use(errorHandler);
|
||||
|
||||
@@ -107,6 +128,11 @@ app.use(errorHandler);
|
||||
const webhooksService = new WebhooksService();
|
||||
webhooksService.register();
|
||||
|
||||
// Register custom hooks / endpoints
|
||||
registerExtensions(customRouter);
|
||||
|
||||
track('serverStarted');
|
||||
|
||||
emitter.emitAsync('server.started').catch((err) => logger.warn(err));
|
||||
|
||||
export default app;
|
||||
|
||||
@@ -27,18 +27,19 @@ function getKevyInstance() {
|
||||
}
|
||||
}
|
||||
|
||||
function getConfig(
|
||||
store: 'memory' | 'redis' | 'memcache' = 'memory'
|
||||
): Options<any> {
|
||||
const config: Options<any> = { namespace: env.CACHE_NAMESPACE, ttl: ms(env.CACHE_TTL as string) };
|
||||
function getConfig(store: 'memory' | 'redis' | 'memcache' = 'memory'): Options<any> {
|
||||
const config: Options<any> = {
|
||||
namespace: env.CACHE_NAMESPACE,
|
||||
ttl: ms(env.CACHE_TTL as string),
|
||||
};
|
||||
|
||||
if (store === 'redis') {
|
||||
const Redis = require('ioredis');
|
||||
const KeyvRedis = require('@keyv/redis');
|
||||
|
||||
config.store = new KeyvRedis(new Redis(
|
||||
env.CACHE_REDIS || getConfigFromEnv('CACHE_REDIS_')
|
||||
));
|
||||
config.store = new KeyvRedis(
|
||||
new Redis(env.CACHE_REDIS || getConfigFromEnv('CACHE_REDIS_'))
|
||||
);
|
||||
}
|
||||
|
||||
if (store === 'memcache') {
|
||||
|
||||
15
api/src/cli/commands/count/index.ts
Normal file
15
api/src/cli/commands/count/index.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
export default async function rolesCreate(collection: string) {
|
||||
const database = require('../../../database/index').default;
|
||||
|
||||
if (!collection) {
|
||||
console.error('Collection is required');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const records = await database(collection).count('*', { as: 'count' });
|
||||
const count = Number(records[0].count);
|
||||
|
||||
console.log(count);
|
||||
|
||||
database.destroy();
|
||||
}
|
||||
@@ -5,7 +5,7 @@ export default async function migrate(direction: 'latest' | 'up' | 'down') {
|
||||
|
||||
try {
|
||||
await run(database, direction);
|
||||
} catch(err) {
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
process.exit(1);
|
||||
} finally {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
export default async function rolesCreate({ name, admin }: any) {
|
||||
const database = require('../../../database/index').default;
|
||||
const RolesService = require('../../../services/roles').default;
|
||||
const { RolesService } = require('../../../services/roles');
|
||||
|
||||
if (!name) {
|
||||
console.error('Name is required');
|
||||
|
||||
@@ -1,16 +1,47 @@
|
||||
import knex from 'knex';
|
||||
import logger from '../../logger';
|
||||
import { Express } from 'express';
|
||||
|
||||
export default async function start() {
|
||||
const { default: env } = require('../../env');
|
||||
const { validateDBConnection } = require('../../database');
|
||||
const database = require('../../database');
|
||||
const connection = database.default as knex;
|
||||
|
||||
await validateDBConnection();
|
||||
await database.validateDBConnection();
|
||||
|
||||
const app = require('../../app').default;
|
||||
const app: Express = require('../../app').default;
|
||||
|
||||
const port = env.PORT;
|
||||
|
||||
app.listen(port, () => {
|
||||
const server = app.listen(port, () => {
|
||||
logger.info(`Server started at port ${port}`);
|
||||
});
|
||||
|
||||
const signals: NodeJS.Signals[] = ['SIGHUP', 'SIGINT', 'SIGTERM'];
|
||||
signals.forEach((signal) => {
|
||||
process.on(signal, () =>
|
||||
server.close((err) => {
|
||||
if (err) {
|
||||
logger.error(`Failed to close server: ${err.message}`, {
|
||||
err,
|
||||
});
|
||||
process.exit(1);
|
||||
}
|
||||
logger.info('Server stopped.');
|
||||
|
||||
connection
|
||||
.destroy()
|
||||
.then(() => {
|
||||
logger.info('Database connection stopped.');
|
||||
process.exit(0);
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.info(`Failed to destroy database connections: ${err.message}`, {
|
||||
err,
|
||||
});
|
||||
process.exit(1);
|
||||
});
|
||||
})
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
export default async function usersCreate({ email, password, role }: any) {
|
||||
const database = require('../../../database/index').default;
|
||||
const UsersService = require('../../../services/users').default;
|
||||
const { UsersService } = require('../../../services/users');
|
||||
|
||||
if (!email || !password || !role) {
|
||||
console.error('Email, password, role are required');
|
||||
|
||||
@@ -10,6 +10,7 @@ import dbInstall from './commands/database/install';
|
||||
import dbMigrate from './commands/database/migrate';
|
||||
import usersCreate from './commands/users/create';
|
||||
import rolesCreate from './commands/roles/create';
|
||||
import count from './commands/count';
|
||||
|
||||
program.name('directus').usage('[command] [options]');
|
||||
program.version(pkg.version, '-v, --version');
|
||||
@@ -19,9 +20,18 @@ program.command('init').description('Create a new Directus Project').action(init
|
||||
|
||||
const dbCommand = program.command('database');
|
||||
dbCommand.command('install').description('Install the database').action(dbInstall);
|
||||
dbCommand.command('migrate:latest').description('Upgrade the database').action(() => dbMigrate('latest'));
|
||||
dbCommand.command('migrate:up').description('Upgrade the database').action(() => dbMigrate('up'));
|
||||
dbCommand.command('migrate:down').description('Downgrade the database').action(() => dbMigrate('down'));
|
||||
dbCommand
|
||||
.command('migrate:latest')
|
||||
.description('Upgrade the database')
|
||||
.action(() => dbMigrate('latest'));
|
||||
dbCommand
|
||||
.command('migrate:up')
|
||||
.description('Upgrade the database')
|
||||
.action(() => dbMigrate('up'));
|
||||
dbCommand
|
||||
.command('migrate:down')
|
||||
.description('Downgrade the database')
|
||||
.action(() => dbMigrate('down'));
|
||||
|
||||
const usersCommand = program.command('users');
|
||||
usersCommand
|
||||
@@ -34,7 +44,7 @@ usersCommand
|
||||
|
||||
const rolesCommand = program.command('roles');
|
||||
rolesCommand
|
||||
.command('create')
|
||||
.command('create')
|
||||
.storeOptionsAsProperties(false)
|
||||
.passCommandToAction(false)
|
||||
.description('Create a new role')
|
||||
@@ -42,4 +52,9 @@ rolesCommand
|
||||
.option('--admin', `whether or not the role has admin access`)
|
||||
.action(rolesCreate);
|
||||
|
||||
program
|
||||
.command('count <collection>')
|
||||
.description('Count the amount of items in a given collection')
|
||||
.action(count);
|
||||
|
||||
program.parse(process.argv);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
####################################################################################################
|
||||
## General
|
||||
|
||||
PORT=41201
|
||||
PORT=8055
|
||||
PUBLIC_URL="/"
|
||||
|
||||
####################################################################################################
|
||||
|
||||
@@ -1,11 +1,15 @@
|
||||
import express from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import ActivityService from '../services/activity';
|
||||
import MetaService from '../services/meta';
|
||||
import { ActivityService, MetaService } from '../services';
|
||||
import { Action } from '../types';
|
||||
import { ForbiddenException } from '../exceptions';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.use(useCollection('directus_activity'));
|
||||
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
@@ -22,6 +26,7 @@ router.get(
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -36,6 +41,7 @@ router.get(
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.post(
|
||||
@@ -46,19 +52,28 @@ router.post(
|
||||
const primaryKey = await service.create({
|
||||
...req.body,
|
||||
action: Action.COMMENT,
|
||||
action_by: req.accountability?.user,
|
||||
user: req.accountability?.user,
|
||||
ip: req.ip,
|
||||
user_agent: req.get('user-agent'),
|
||||
});
|
||||
|
||||
const record = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
try {
|
||||
const record = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
|
||||
res.locals.payload = {
|
||||
data: record || null,
|
||||
};
|
||||
res.locals.payload = {
|
||||
data: record || null,
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.patch(
|
||||
@@ -66,14 +81,24 @@ router.patch(
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new ActivityService({ accountability: req.accountability });
|
||||
const primaryKey = await service.update(req.body, req.params.pk);
|
||||
const record = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
|
||||
res.locals.payload = {
|
||||
data: record || null,
|
||||
};
|
||||
try {
|
||||
const record = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
|
||||
res.locals.payload = {
|
||||
data: record || null,
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
@@ -84,6 +109,7 @@ router.delete(
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -3,15 +3,18 @@ import asyncHandler from 'express-async-handler';
|
||||
import database from '../database';
|
||||
import { SYSTEM_ASSET_ALLOW_LIST, ASSET_TRANSFORM_QUERY_KEYS } from '../constants';
|
||||
import { InvalidQueryException, ForbiddenException } from '../exceptions';
|
||||
import AssetsService from '../services/assets';
|
||||
import validate from 'uuid-validate';
|
||||
import { pick } from 'lodash';
|
||||
import { Transformation } from '../types/assets';
|
||||
import storage from '../storage';
|
||||
import PayloadService from '../services/payload';
|
||||
import { PayloadService, AssetsService } from '../services';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
|
||||
const router = Router();
|
||||
|
||||
router.use(useCollection('directus_files'));
|
||||
|
||||
router.get(
|
||||
'/:pk',
|
||||
|
||||
|
||||
@@ -2,15 +2,15 @@ import { Router } from 'express';
|
||||
import session from 'express-session';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import Joi from 'joi';
|
||||
import AuthenticationService from '../services/authentication';
|
||||
import grant from 'grant';
|
||||
import getGrantConfig from '../utils/get-grant-config';
|
||||
import getEmailFromProfile from '../utils/get-email-from-profile';
|
||||
import { InvalidPayloadException } from '../exceptions/invalid-payload';
|
||||
import ms from 'ms';
|
||||
import cookieParser from 'cookie-parser';
|
||||
import env from '../env';
|
||||
import UsersService from '../services/users';
|
||||
import { UsersService, AuthenticationService } from '../services';
|
||||
import grantConfig from '../grant';
|
||||
import { RouteNotFoundException } from '../exceptions';
|
||||
import { respond } from '../middleware/respond';
|
||||
|
||||
const router = Router();
|
||||
@@ -75,7 +75,8 @@ router.post(
|
||||
|
||||
res.locals.payload = payload;
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.post(
|
||||
@@ -126,7 +127,8 @@ router.post(
|
||||
|
||||
res.locals.payload = payload;
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.post(
|
||||
@@ -153,7 +155,8 @@ router.post(
|
||||
|
||||
await authenticationService.logout(currentRefreshToken);
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.post(
|
||||
@@ -179,7 +182,8 @@ router.post(
|
||||
} finally {
|
||||
return next();
|
||||
}
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.post(
|
||||
@@ -202,22 +206,53 @@ router.post(
|
||||
const service = new UsersService({ accountability });
|
||||
await service.resetPassword(req.body.token, req.body.password);
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/oauth',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const providers = env.OAUTH_PROVIDERS.split(',').filter((p: string) => p);
|
||||
res.locals.payload = { data: providers.length > 0 ? providers : null };
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.use(
|
||||
'/sso',
|
||||
'/oauth',
|
||||
session({ secret: env.SECRET as string, saveUninitialized: false, resave: false })
|
||||
);
|
||||
|
||||
router.use(grant.express()(getGrantConfig()));
|
||||
|
||||
/**
|
||||
* @todo allow json / cookie mode in SSO
|
||||
*/
|
||||
router.get(
|
||||
'/sso/:provider/callback',
|
||||
'/oauth/:provider',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const config = { ...grantConfig };
|
||||
delete config.defaults;
|
||||
|
||||
const availableProviders = Object.keys(config);
|
||||
|
||||
if (availableProviders.includes(req.params.provider) === false) {
|
||||
throw new RouteNotFoundException(`/auth/oauth/${req.params.provider}`);
|
||||
}
|
||||
|
||||
if (req.query?.redirect && req.session) {
|
||||
req.session.redirect = req.query.redirect;
|
||||
}
|
||||
|
||||
next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.use(grant.express()(grantConfig));
|
||||
|
||||
router.get(
|
||||
'/oauth/:provider/callback',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const redirect = req.session?.redirect;
|
||||
|
||||
const accountability = {
|
||||
ip: req.ip,
|
||||
userAgent: req.get('user-agent'),
|
||||
@@ -228,20 +263,36 @@ router.get(
|
||||
accountability: accountability,
|
||||
});
|
||||
|
||||
const email = getEmailFromProfile(req.params.provider, req.session!.grant.response.profile);
|
||||
|
||||
const { accessToken, refreshToken, expires } = await authenticationService.authenticate(
|
||||
email
|
||||
const email = getEmailFromProfile(
|
||||
req.params.provider,
|
||||
req.session!.grant.response?.profile
|
||||
);
|
||||
|
||||
res.locals.payload = {
|
||||
data: { access_token: accessToken, refresh_token: refreshToken, expires },
|
||||
};
|
||||
req.session?.destroy(() => {});
|
||||
|
||||
return next();
|
||||
})
|
||||
const { accessToken, refreshToken, expires } = await authenticationService.authenticate({
|
||||
email,
|
||||
});
|
||||
|
||||
if (redirect) {
|
||||
res.cookie('directus_refresh_token', refreshToken, {
|
||||
httpOnly: true,
|
||||
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
|
||||
secure: env.REFRESH_TOKEN_COOKIE_SECURE === 'true' ? true : false,
|
||||
sameSite:
|
||||
(env.REFRESH_TOKEN_COOKIE_SAME_SITE as 'lax' | 'strict' | 'none') || 'strict',
|
||||
});
|
||||
|
||||
return res.redirect(redirect);
|
||||
} else {
|
||||
res.locals.payload = {
|
||||
data: { access_token: accessToken, refresh_token: refreshToken, expires },
|
||||
};
|
||||
|
||||
return next();
|
||||
}
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.use(respond);
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { Router } from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import CollectionsService from '../services/collections'
|
||||
import MetaService from '../services/meta';
|
||||
import { CollectionsService, MetaService } from '../services';
|
||||
import { ForbiddenException } from '../exceptions';
|
||||
import { respond } from '../middleware/respond';
|
||||
|
||||
const router = Router();
|
||||
|
||||
@@ -15,7 +16,8 @@ router.post(
|
||||
|
||||
res.locals.payload = { data: record || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -29,7 +31,8 @@ router.get(
|
||||
|
||||
res.locals.payload = { data: collections || null, meta };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -39,11 +42,21 @@ router.get(
|
||||
const collectionKey = req.params.collection.includes(',')
|
||||
? req.params.collection.split(',')
|
||||
: req.params.collection;
|
||||
const collection = await collectionsService.readByKey(collectionKey as any);
|
||||
|
||||
res.locals.payload = { data: collection || null };
|
||||
try {
|
||||
const collection = await collectionsService.readByKey(collectionKey as any);
|
||||
res.locals.payload = { data: collection || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.patch(
|
||||
@@ -54,10 +67,21 @@ router.patch(
|
||||
? req.params.collection.split(',')
|
||||
: req.params.collection;
|
||||
await collectionsService.update(req.body, collectionKey as any);
|
||||
const collection = await collectionsService.readByKey(collectionKey as any);
|
||||
res.locals.payload = { data: collection || null };
|
||||
|
||||
try {
|
||||
const collection = await collectionsService.readByKey(collectionKey as any);
|
||||
res.locals.payload = { data: collection || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
@@ -70,7 +94,8 @@ router.delete(
|
||||
await collectionsService.delete(collectionKey as any);
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import express, { Router } from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import { RouteNotFoundException } from '../exceptions';
|
||||
import ExtensionsService from '../services/extensions';
|
||||
import { listExtensions } from '../extensions';
|
||||
import env from '../env';
|
||||
import { respond } from '../middleware/respond';
|
||||
|
||||
const router = Router();
|
||||
|
||||
@@ -12,21 +13,21 @@ router.use(express.static(extensionsPath));
|
||||
router.get(
|
||||
'/:type',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new ExtensionsService();
|
||||
const typeAllowList = ['interfaces', 'layouts', 'displays', 'modules'];
|
||||
|
||||
if (typeAllowList.includes(req.params.type) === false) {
|
||||
throw new RouteNotFoundException(req.path);
|
||||
}
|
||||
|
||||
const extensions = await service.listExtensions(req.params.type);
|
||||
const extensions = await listExtensions(req.params.type);
|
||||
|
||||
res.locals.payload = {
|
||||
data: extensions,
|
||||
};
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
import { Router } from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import FieldsService from '../services/fields';
|
||||
import { FieldsService } from '../services/fields';
|
||||
import validateCollection from '../middleware/collection-exists';
|
||||
import { schemaInspector } from '../database';
|
||||
import { InvalidPayloadException, ForbiddenException } from '../exceptions';
|
||||
import Joi from 'joi';
|
||||
import { Field } from '../types/field';
|
||||
import { types } from '../types';
|
||||
import { types, Field } from '../types';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
|
||||
const router = Router();
|
||||
|
||||
router.use(useCollection('directus_fields'));
|
||||
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
@@ -18,7 +21,8 @@ router.get(
|
||||
|
||||
res.locals.payload = { data: fields || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -30,7 +34,8 @@ router.get(
|
||||
|
||||
res.locals.payload = { data: fields || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -46,17 +51,18 @@ router.get(
|
||||
|
||||
res.locals.payload = { data: field || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
const newFieldSchema = Joi.object({
|
||||
collection: Joi.string().optional(),
|
||||
field: Joi.string().required(),
|
||||
type: Joi.string().valid(...types),
|
||||
type: Joi.string().valid(...types, null),
|
||||
schema: Joi.object({
|
||||
comment: Joi.string().allow(null),
|
||||
default_value: Joi.any(),
|
||||
max_length: [Joi.number(), Joi.string()],
|
||||
max_length: [Joi.number(), Joi.string(), Joi.valid(null)],
|
||||
is_nullable: Joi.bool(),
|
||||
}).unknown(),
|
||||
/** @todo base this on default validation */
|
||||
@@ -82,11 +88,20 @@ router.post(
|
||||
|
||||
await service.createField(req.params.collection, field);
|
||||
|
||||
const createdField = await service.readOne(req.params.collection, field.field);
|
||||
try {
|
||||
const createdField = await service.readOne(req.params.collection, field.field);
|
||||
res.locals.payload = { data: createdField || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
res.locals.payload = { data: createdField || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.patch(
|
||||
@@ -95,22 +110,32 @@ router.patch(
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new FieldsService({ accountability: req.accountability });
|
||||
|
||||
if (Array.isArray(req.body) === false)
|
||||
if (Array.isArray(req.body) === false) {
|
||||
throw new InvalidPayloadException('Submitted body has to be an array.');
|
||||
|
||||
let results: any = [];
|
||||
}
|
||||
|
||||
for (const field of req.body) {
|
||||
await service.updateField(req.params.collection, field);
|
||||
|
||||
const updatedField = await service.readOne(req.params.collection, field.field);
|
||||
|
||||
results.push(updatedField);
|
||||
}
|
||||
|
||||
res.locals.payload = { data: results || null };
|
||||
try {
|
||||
let results: any = [];
|
||||
for (const field of req.body) {
|
||||
const updatedField = await service.readOne(req.params.collection, field.field);
|
||||
results.push(updatedField);
|
||||
res.locals.payload = { data: results || null };
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.patch(
|
||||
@@ -125,11 +150,20 @@ router.patch(
|
||||
|
||||
await service.updateField(req.params.collection, fieldData);
|
||||
|
||||
const updatedField = await service.readOne(req.params.collection, req.params.field);
|
||||
try {
|
||||
const updatedField = await service.readOne(req.params.collection, req.params.field);
|
||||
res.locals.payload = { data: updatedField || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
res.locals.payload = { data: updatedField || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
@@ -139,7 +173,8 @@ router.delete(
|
||||
const service = new FieldsService({ accountability: req.accountability });
|
||||
await service.deleteField(req.params.collection, req.params.field);
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -1,19 +1,22 @@
|
||||
import express from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import Busboy from 'busboy';
|
||||
import FilesService from '../services/files';
|
||||
import MetaService from '../services/meta';
|
||||
import { MetaService, FilesService } from '../services';
|
||||
import { File, PrimaryKey } from '../types';
|
||||
import formatTitle from '@directus/format-title';
|
||||
import env from '../env';
|
||||
import axios from 'axios';
|
||||
import Joi from 'joi';
|
||||
import { InvalidPayloadException } from '../exceptions';
|
||||
import { InvalidPayloadException, ForbiddenException } from '../exceptions';
|
||||
import url from 'url';
|
||||
import path from 'path';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.use(useCollection('directus_files'));
|
||||
|
||||
const multipartHandler = asyncHandler(async (req, res, next) => {
|
||||
if (req.is('multipart/form-data') === false) return next();
|
||||
|
||||
@@ -108,15 +111,28 @@ router.post(
|
||||
keys = await service.create(req.body);
|
||||
}
|
||||
|
||||
const record = await service.readByKey(keys as any, req.sanitizedQuery);
|
||||
try {
|
||||
const record = await service.readByKey(keys as any, req.sanitizedQuery);
|
||||
|
||||
res.locals.payload = {
|
||||
data: record,
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
res.locals.payload = { data: res.locals.savedFiles.length === 1 ? record[0] : record || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
const importSchema = Joi.object({
|
||||
url: Joi.string().required(),
|
||||
data: Joi.object(),
|
||||
});
|
||||
|
||||
router.post(
|
||||
@@ -142,16 +158,25 @@ router.post(
|
||||
storage: (env.STORAGE_LOCATIONS as string).split(',')[0].trim(),
|
||||
type: fileResponse.headers['content-type'],
|
||||
title: formatTitle(filename),
|
||||
...req.body,
|
||||
...(req.body.data || {}),
|
||||
};
|
||||
|
||||
delete payload.url;
|
||||
|
||||
const primaryKey = await service.upload(fileResponse.data, payload);
|
||||
const record = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
|
||||
try {
|
||||
const record = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -165,7 +190,8 @@ router.get(
|
||||
|
||||
res.locals.payload = { data: records || null, meta };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -176,7 +202,8 @@ router.get(
|
||||
const record = await service.readByKey(keys as any, req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.patch(
|
||||
@@ -193,10 +220,20 @@ router.patch(
|
||||
await service.update(req.body, keys as any);
|
||||
}
|
||||
|
||||
const record = await service.readByKey(keys as any, req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
try {
|
||||
const record = await service.readByKey(keys as any, req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
@@ -206,7 +243,8 @@ router.delete(
|
||||
const service = new FilesService({ accountability: req.accountability });
|
||||
await service.delete(keys as any);
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -1,20 +1,34 @@
|
||||
import express from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import FoldersService from '../services/folders';
|
||||
import MetaService from '../services/meta';
|
||||
import { FoldersService, MetaService } from '../services';
|
||||
import { ForbiddenException } from '../exceptions';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.use(useCollection('directus_folders'));
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new FoldersService({ accountability: req.accountability });
|
||||
const primaryKey = await service.create(req.body);
|
||||
const record = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
|
||||
res.locals.payload = { data: record || null };
|
||||
try {
|
||||
const record = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -28,7 +42,8 @@ router.get(
|
||||
|
||||
res.locals.payload = { data: records || null, meta };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -40,7 +55,8 @@ router.get(
|
||||
|
||||
res.locals.payload = { data: record || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.patch(
|
||||
@@ -49,11 +65,21 @@ router.patch(
|
||||
const service = new FoldersService({ accountability: req.accountability });
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const primaryKey = await service.update(req.body, pk as any);
|
||||
const record = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
|
||||
res.locals.payload = { data: record || null };
|
||||
try {
|
||||
const record = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
@@ -63,7 +89,8 @@ router.delete(
|
||||
const primaryKey = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
await service.delete(primaryKey as any);
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
16
api/src/controllers/graphql.ts
Normal file
16
api/src/controllers/graphql.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { Router } from 'express';
|
||||
import { graphqlHTTP } from 'express-graphql';
|
||||
import { GraphQLService } from '../services';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
|
||||
const router = Router();
|
||||
|
||||
router.use(asyncHandler(async (req, res) => {
|
||||
const service = new GraphQLService({ accountability: req.accountability });
|
||||
const schema = await service.getSchema();
|
||||
|
||||
graphqlHTTP({ schema, graphiql: true })(req, res);
|
||||
}));
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import express from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import ItemsService from '../services/items';
|
||||
import collectionExists from '../middleware/collection-exists';
|
||||
import MetaService from '../services/meta';
|
||||
import { RouteNotFoundException } from '../exceptions';
|
||||
import { ItemsService, MetaService } from '../services';
|
||||
import { RouteNotFoundException, ForbiddenException } from '../exceptions';
|
||||
import { respond } from '../middleware/respond';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -17,11 +17,21 @@ router.post(
|
||||
|
||||
const service = new ItemsService(req.collection, { accountability: req.accountability });
|
||||
const primaryKey = await service.create(req.body);
|
||||
const result = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
|
||||
res.locals.payload = { data: result || null };
|
||||
try {
|
||||
const result = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: result || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -43,6 +53,7 @@ router.get(
|
||||
};
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -62,6 +73,7 @@ router.get(
|
||||
};
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.patch(
|
||||
@@ -79,10 +91,21 @@ router.patch(
|
||||
}
|
||||
|
||||
const primaryKeys = await service.update(req.body);
|
||||
const result = await service.readByKey(primaryKeys, req.sanitizedQuery);
|
||||
res.locals.payload = { data: result || null };
|
||||
|
||||
try {
|
||||
const result = await service.readByKey(primaryKeys, req.sanitizedQuery);
|
||||
res.locals.payload = { data: result || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.patch(
|
||||
@@ -97,11 +120,21 @@ router.patch(
|
||||
const primaryKey = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
|
||||
const updatedPrimaryKey = await service.update(req.body, primaryKey as any);
|
||||
const result = await service.readByKey(updatedPrimaryKey, req.sanitizedQuery);
|
||||
|
||||
res.locals.payload = { data: result || null };
|
||||
try {
|
||||
const result = await service.readByKey(updatedPrimaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: result || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
@@ -113,6 +146,7 @@ router.delete(
|
||||
await service.delete(pk as any);
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -1,22 +1,34 @@
|
||||
import express from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import PermissionsService from '../services/permissions';
|
||||
import MetaService from '../services/meta';
|
||||
import { PermissionsService, MetaService } from '../services';
|
||||
import { clone } from 'lodash';
|
||||
import { InvalidCredentialsException } from '../exceptions';
|
||||
import { InvalidCredentialsException, ForbiddenException } from '../exceptions';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.use(useCollection('directus_permissions'));
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new PermissionsService({ accountability: req.accountability });
|
||||
const primaryKey = await service.create(req.body);
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
|
||||
res.locals.payload = { data: item || null };
|
||||
try {
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: item || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -30,7 +42,8 @@ router.get(
|
||||
|
||||
res.locals.payload = { data: item || null, meta };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -54,7 +67,8 @@ router.get(
|
||||
|
||||
res.locals.payload = { data: items || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -67,7 +81,8 @@ router.get(
|
||||
|
||||
res.locals.payload = { data: record || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.patch(
|
||||
@@ -76,11 +91,21 @@ router.patch(
|
||||
const service = new PermissionsService({ accountability: req.accountability });
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const primaryKey = await service.update(req.body, pk as any);
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
|
||||
res.locals.payload = { data: item || null };
|
||||
try {
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: item || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
@@ -90,7 +115,8 @@ router.delete(
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
await service.delete(pk as any);
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -1,20 +1,34 @@
|
||||
import express from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import PresetsService from '../services/presets';
|
||||
import MetaService from '../services/meta';
|
||||
import { PresetsService, MetaService } from '../services';
|
||||
import { ForbiddenException } from '../exceptions';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.use(useCollection('directus_presets'));
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new PresetsService({ accountability: req.accountability });
|
||||
const primaryKey = await service.create(req.body);
|
||||
const record = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
|
||||
res.locals.payload = { data: record || null };
|
||||
try {
|
||||
const record = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -28,7 +42,8 @@ router.get(
|
||||
|
||||
res.locals.payload = { data: records || null, meta };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -40,7 +55,8 @@ router.get(
|
||||
|
||||
res.locals.payload = { data: record || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.patch(
|
||||
@@ -49,11 +65,21 @@ router.patch(
|
||||
const service = new PresetsService({ accountability: req.accountability });
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const primaryKey = await service.update(req.body, pk as any);
|
||||
const record = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
|
||||
res.locals.payload = { data: record || null };
|
||||
try {
|
||||
const record = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
@@ -63,7 +89,8 @@ router.delete(
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
await service.delete(pk as any);
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -1,19 +1,34 @@
|
||||
import express from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import RelationsService from '../services/relations';
|
||||
import MetaService from '../services/meta';
|
||||
import { RelationsService, MetaService } from '../services';
|
||||
import { ForbiddenException } from '../exceptions';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.use(useCollection('directus_relations'));
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new RelationsService({ accountability: req.accountability });
|
||||
const primaryKey = await service.create(req.body);
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: item || null };
|
||||
|
||||
try {
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: item || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -27,7 +42,8 @@ router.get(
|
||||
|
||||
res.locals.payload = { data: records || null, meta };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -38,7 +54,8 @@ router.get(
|
||||
const record = await service.readByKey(pk as any, req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.patch(
|
||||
@@ -47,10 +64,21 @@ router.patch(
|
||||
const service = new RelationsService({ accountability: req.accountability });
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const primaryKey = await service.update(req.body, pk as any);
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: item || null };
|
||||
|
||||
try {
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: item || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
@@ -60,7 +88,8 @@ router.delete(
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
await service.delete(pk as any);
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
import express from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import RevisionsService from '../services/revisions';
|
||||
import MetaService from '../services/meta';
|
||||
import { RevisionsService, MetaService } from '../services';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.use(useCollection('directus_revisions'));
|
||||
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
@@ -16,7 +19,8 @@ router.get(
|
||||
|
||||
res.locals.payload = { data: records || null, meta };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -27,7 +31,8 @@ router.get(
|
||||
const record = await service.readByKey(pk as any, req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -1,19 +1,34 @@
|
||||
import express from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import RolesService from '../services/roles';
|
||||
import MetaService from '../services/meta';
|
||||
import { RolesService, MetaService } from '../services';
|
||||
import { ForbiddenException } from '../exceptions';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.use(useCollection('directus_roles'));
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new RolesService({ accountability: req.accountability });
|
||||
const primaryKey = await service.create(req.body);
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: item || null };
|
||||
|
||||
try {
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: item || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -27,7 +42,8 @@ router.get(
|
||||
|
||||
res.locals.payload = { data: records || null, meta };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -38,7 +54,8 @@ router.get(
|
||||
const record = await service.readByKey(pk as any, req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.patch(
|
||||
@@ -47,10 +64,21 @@ router.patch(
|
||||
const service = new RolesService({ accountability: req.accountability });
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const primaryKey = await service.update(req.body, pk as any);
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: item || null };
|
||||
|
||||
try {
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: item || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
@@ -60,7 +88,8 @@ router.delete(
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
await service.delete(pk as any);
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -1,15 +1,32 @@
|
||||
import { Router } from 'express';
|
||||
import ServerService from '../services/server';
|
||||
import { ServerService } from '../services';
|
||||
import { SpecificationService } from '../services';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import { respond } from '../middleware/respond';
|
||||
|
||||
const router = Router();
|
||||
|
||||
router.get(
|
||||
'/specs/oas',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new SpecificationService({ accountability: req.accountability });
|
||||
res.locals.payload = await service.oas.generate();
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get('/ping', (req, res) => res.send('pong'));
|
||||
|
||||
router.get('/info', (req, res, next) => {
|
||||
const service = new ServerService({ accountability: req.accountability });
|
||||
const data = service.serverInfo();
|
||||
res.locals.payload = data;
|
||||
return next();
|
||||
});
|
||||
router.get(
|
||||
'/info',
|
||||
(req, res, next) => {
|
||||
const service = new ServerService({ accountability: req.accountability });
|
||||
const data = service.serverInfo();
|
||||
res.locals.payload = data;
|
||||
return next();
|
||||
},
|
||||
respond
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -1,9 +1,14 @@
|
||||
import express from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import SettingsService from '../services/settings';
|
||||
import { SettingsService } from '../services';
|
||||
import { ForbiddenException } from '../exceptions';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.use(useCollection('directus_settings'));
|
||||
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
@@ -11,7 +16,8 @@ router.get(
|
||||
const records = await service.readSingleton(req.sanitizedQuery);
|
||||
res.locals.payload = { data: records || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.patch(
|
||||
@@ -19,11 +25,21 @@ router.patch(
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new SettingsService({ accountability: req.accountability });
|
||||
await service.upsertSingleton(req.body);
|
||||
const record = await service.readSingleton(req.sanitizedQuery);
|
||||
|
||||
res.locals.payload = { data: record || null };
|
||||
try {
|
||||
const record = await service.readSingleton(req.sanitizedQuery);
|
||||
res.locals.payload = { data: record || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -1,22 +1,40 @@
|
||||
import express from 'express';
|
||||
import argon2 from 'argon2';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import Joi from 'joi';
|
||||
import { InvalidPayloadException, InvalidCredentialsException } from '../exceptions';
|
||||
import UsersService from '../services/users';
|
||||
import MetaService from '../services/meta';
|
||||
import AuthService from '../services/authentication';
|
||||
import {
|
||||
InvalidPayloadException,
|
||||
InvalidCredentialsException,
|
||||
ForbiddenException,
|
||||
} from '../exceptions';
|
||||
import { UsersService, MetaService, AuthenticationService } from '../services';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.use(useCollection('directus_users'));
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new UsersService({ accountability: req.accountability });
|
||||
const primaryKey = await service.create(req.body);
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: item || null };
|
||||
|
||||
try {
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: item || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -30,7 +48,8 @@ router.get(
|
||||
|
||||
res.locals.payload = { data: item || null, meta };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -45,7 +64,8 @@ router.get(
|
||||
|
||||
res.locals.payload = { data: item || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -57,7 +77,8 @@ router.get(
|
||||
const items = await service.readByKey(pk as any, req.sanitizedQuery);
|
||||
res.locals.payload = { data: items || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.patch(
|
||||
@@ -73,7 +94,8 @@ router.patch(
|
||||
|
||||
res.locals.payload = { data: item || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.patch(
|
||||
@@ -91,7 +113,8 @@ router.patch(
|
||||
await service.update({ last_page: req.body.last_page }, req.accountability.user);
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.patch(
|
||||
@@ -101,11 +124,20 @@ router.patch(
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const primaryKey = await service.update(req.body, pk as any);
|
||||
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
try {
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: item || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
res.locals.payload = { data: item || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
@@ -116,7 +148,8 @@ router.delete(
|
||||
await service.delete(pk as any);
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
const inviteSchema = Joi.object({
|
||||
@@ -133,7 +166,8 @@ router.post(
|
||||
const service = new UsersService({ accountability: req.accountability });
|
||||
await service.inviteUser(req.body.email, req.body.role);
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
const acceptInviteSchema = Joi.object({
|
||||
@@ -149,7 +183,8 @@ router.post(
|
||||
const service = new UsersService({ accountability: req.accountability });
|
||||
await service.acceptInvite(req.body.token, req.body.password);
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.post(
|
||||
@@ -159,12 +194,21 @@ router.post(
|
||||
throw new InvalidCredentialsException();
|
||||
}
|
||||
|
||||
if (!req.body.password) {
|
||||
throw new InvalidPayloadException(`"password" is required`);
|
||||
}
|
||||
|
||||
const service = new UsersService({ accountability: req.accountability });
|
||||
|
||||
const authService = new AuthenticationService({ accountability: req.accountability });
|
||||
await authService.verifyPassword(req.accountability.user, req.body.password);
|
||||
|
||||
const { url, secret } = await service.enableTFA(req.accountability.user);
|
||||
|
||||
res.locals.payload = { data: { secret, otpauth_url: url } };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.post(
|
||||
@@ -179,7 +223,7 @@ router.post(
|
||||
}
|
||||
|
||||
const service = new UsersService({ accountability: req.accountability });
|
||||
const authService = new AuthService({ accountability: req.accountability });
|
||||
const authService = new AuthenticationService({ accountability: req.accountability });
|
||||
|
||||
const otpValid = await authService.verifyOTP(req.accountability.user, req.body.otp);
|
||||
|
||||
@@ -189,7 +233,8 @@ router.post(
|
||||
|
||||
await service.disableTFA(req.accountability.user);
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -4,8 +4,9 @@ import { nanoid } from 'nanoid';
|
||||
import { InvalidQueryException, InvalidPayloadException } from '../exceptions';
|
||||
import argon2 from 'argon2';
|
||||
import collectionExists from '../middleware/collection-exists';
|
||||
import UtilsService from '../services/utils';
|
||||
import { UtilsService, RevisionsService } from '../services';
|
||||
import Joi from 'joi';
|
||||
import { respond } from '../middleware/respond';
|
||||
|
||||
const router = Router();
|
||||
|
||||
@@ -18,7 +19,8 @@ router.get(
|
||||
const string = nanoid(req.query?.length ? Number(req.query.length) : 32);
|
||||
|
||||
return res.json({ data: string });
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.post(
|
||||
@@ -31,7 +33,8 @@ router.post(
|
||||
const hash = await argon2.hash(req.body.string);
|
||||
|
||||
return res.json({ data: hash });
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.post(
|
||||
@@ -48,7 +51,8 @@ router.post(
|
||||
const result = await argon2.verify(req.body.hash, req.body.string);
|
||||
|
||||
return res.json({ data: result });
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
const SortSchema = Joi.object({
|
||||
@@ -67,7 +71,18 @@ router.post(
|
||||
await service.sort(req.collection, req.body);
|
||||
|
||||
return res.status(200).end();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/revert/:revision',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new RevisionsService({ accountability: req.accountability });
|
||||
await service.revert(req.params.revision);
|
||||
next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -1,20 +1,34 @@
|
||||
import express from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import WebhooksService from '../services/webhooks';
|
||||
import MetaService from '../services/meta';
|
||||
import { WebhooksService, MetaService } from '../services';
|
||||
import { ForbiddenException } from '../exceptions';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { respond } from '../middleware/respond';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.use(useCollection('directus_webhooks'));
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const service = new WebhooksService({ accountability: req.accountability });
|
||||
const primaryKey = await service.create(req.body);
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
|
||||
res.locals.payload = { data: item || null };
|
||||
try {
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: item || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -28,7 +42,8 @@ router.get(
|
||||
|
||||
res.locals.payload = { data: records || null, meta };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.get(
|
||||
@@ -40,7 +55,8 @@ router.get(
|
||||
|
||||
res.locals.payload = { data: record || null };
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.patch(
|
||||
@@ -49,11 +65,21 @@ router.patch(
|
||||
const service = new WebhooksService({ accountability: req.accountability });
|
||||
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const primaryKey = await service.update(req.body, pk as any);
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
|
||||
res.locals.payload = { data: item || null };
|
||||
try {
|
||||
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
|
||||
res.locals.payload = { data: item || null };
|
||||
} catch (error) {
|
||||
if (error instanceof ForbiddenException) {
|
||||
return next();
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.delete(
|
||||
@@ -64,7 +90,8 @@ router.delete(
|
||||
await service.delete(pk as any);
|
||||
|
||||
return next();
|
||||
})
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -7,13 +7,16 @@ type Migration = {
|
||||
version: string;
|
||||
name: string;
|
||||
timestamp: Date;
|
||||
}
|
||||
};
|
||||
|
||||
export default async function run(database: Knex, direction: 'up' | 'down' | 'latest') {
|
||||
let migrationFiles = await fse.readdir(__dirname);
|
||||
migrationFiles = migrationFiles.filter((file: string) => file !== 'run.ts');
|
||||
|
||||
const completedMigrations = await database.select<Migration[]>('*').from('directus_migrations').orderBy('version');
|
||||
const completedMigrations = await database
|
||||
.select<Migration[]>('*')
|
||||
.from('directus_migrations')
|
||||
.orderBy('version');
|
||||
|
||||
const migrations = migrationFiles.map((migrationFile) => {
|
||||
const version = migrationFile.split('-')[0];
|
||||
@@ -24,7 +27,7 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
|
||||
file: migrationFile,
|
||||
version,
|
||||
name,
|
||||
completed
|
||||
completed,
|
||||
};
|
||||
});
|
||||
|
||||
@@ -51,7 +54,9 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
|
||||
|
||||
const { up } = require(path.join(__dirname, nextVersion.file));
|
||||
await up(database);
|
||||
await database.insert({ version: nextVersion.version, name: nextVersion.name }).into('directus_migrations');
|
||||
await database
|
||||
.insert({ version: nextVersion.version, name: nextVersion.name })
|
||||
.into('directus_migrations');
|
||||
}
|
||||
|
||||
async function down() {
|
||||
@@ -61,7 +66,9 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
|
||||
throw Error('Nothing to downgrade');
|
||||
}
|
||||
|
||||
const migration = migrations.find((migration) => migration.version === currentVersion.version);
|
||||
const migration = migrations.find(
|
||||
(migration) => migration.version === currentVersion.version
|
||||
);
|
||||
|
||||
if (!migration) {
|
||||
throw new Error('Couldnt find migration');
|
||||
@@ -77,7 +84,9 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
|
||||
if (migration.completed === false) {
|
||||
const { up } = require(path.join(__dirname, migration.file));
|
||||
await up(database);
|
||||
await database.insert({ version: migration.version, name: migration.name }).into('directus_migrations');
|
||||
await database
|
||||
.insert({ version: migration.version, name: migration.name })
|
||||
.into('directus_migrations');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,207 +1,272 @@
|
||||
import { AST, NestedCollectionAST } from '../types/ast';
|
||||
import { clone, uniq, pick } from 'lodash';
|
||||
import { clone, cloneDeep, uniq, pick } from 'lodash';
|
||||
import database from './index';
|
||||
import SchemaInspector from 'knex-schema-inspector';
|
||||
import { Query, Item } from '../types';
|
||||
import PayloadService from '../services/payload';
|
||||
import { PayloadService } from '../services/payload';
|
||||
import applyQuery from '../utils/apply-query';
|
||||
import Knex from 'knex';
|
||||
import Knex, { QueryBuilder } from 'knex';
|
||||
|
||||
type RunASTOptions = {
|
||||
query?: AST['query'],
|
||||
knex?: Knex
|
||||
}
|
||||
query?: AST['query'];
|
||||
knex?: Knex;
|
||||
child?: boolean;
|
||||
};
|
||||
|
||||
export default async function runAST(
|
||||
originalAST: AST,
|
||||
options?: RunASTOptions
|
||||
): Promise<null | Item | Item[]> {
|
||||
const ast = cloneDeep(originalAST);
|
||||
|
||||
export default async function runAST(ast: AST, options?: RunASTOptions) {
|
||||
const query = options?.query || ast.query;
|
||||
const knex = options?.knex || database;
|
||||
|
||||
// Retrieve the database columns to select in the current AST
|
||||
const { columnsToSelect, primaryKeyField, nestedCollectionASTs } = await parseCurrentLevel(
|
||||
ast,
|
||||
knex
|
||||
);
|
||||
|
||||
// The actual knex query builder instance. This is a promise that resolves with the raw items from the db
|
||||
const dbQuery = await getDBQuery(knex, ast.name, columnsToSelect, query, primaryKeyField);
|
||||
|
||||
const rawItems: Item | Item[] = await dbQuery;
|
||||
|
||||
if (!rawItems) return null;
|
||||
|
||||
// Run the items through the special transforms
|
||||
const payloadService = new PayloadService(ast.name, { knex });
|
||||
let items = await payloadService.processValues('read', rawItems);
|
||||
|
||||
if (!items || items.length === 0) return items;
|
||||
|
||||
// Apply the `_in` filters to the nested collection batches
|
||||
const nestedASTs = applyParentFilters(nestedCollectionASTs, items);
|
||||
|
||||
for (const nestedAST of nestedASTs) {
|
||||
let tempLimit: number | null = null;
|
||||
|
||||
// Nested o2m-items are fetched from the db in a single query. This means that we're fetching
|
||||
// all nested items for all parent items at once. Because of this, we can't limit that query
|
||||
// to the "standard" item limit. Instead of _n_ nested items per parent item, it would mean
|
||||
// that there's _n_ items, which are then divided on the parent items. (no good)
|
||||
if (isO2M(nestedAST) && typeof nestedAST.query.limit === 'number') {
|
||||
tempLimit = nestedAST.query.limit;
|
||||
nestedAST.query.limit = -1;
|
||||
}
|
||||
|
||||
let nestedItems = await runAST(nestedAST, { knex, child: true });
|
||||
|
||||
if (nestedItems) {
|
||||
// Merge all fetched nested records with the parent items
|
||||
items = mergeWithParentItems(nestedItems, items, nestedAST, tempLimit);
|
||||
}
|
||||
}
|
||||
|
||||
// During the fetching of data, we have to inject a couple of required fields for the child nesting
|
||||
// to work (primary / foreign keys) even if they're not explicitly requested. After all fetching
|
||||
// and nesting is done, we parse through the output structure, and filter out all non-requested
|
||||
// fields
|
||||
if (options?.child !== true) {
|
||||
items = removeTemporaryFields(items, originalAST, primaryKeyField);
|
||||
}
|
||||
|
||||
return items;
|
||||
}
|
||||
|
||||
async function parseCurrentLevel(ast: AST, knex: Knex) {
|
||||
const schemaInspector = SchemaInspector(knex);
|
||||
|
||||
const toplevelFields: string[] = [];
|
||||
const tempFields: string[] = [];
|
||||
const nestedCollections: NestedCollectionAST[] = [];
|
||||
const primaryKeyField = await schemaInspector.primary(ast.name);
|
||||
|
||||
const columnsInCollection = (await schemaInspector.columns(ast.name)).map(
|
||||
({ column }) => column
|
||||
);
|
||||
|
||||
const payloadService = new PayloadService(ast.name, { knex });
|
||||
const columnsToSelect: string[] = [];
|
||||
const nestedCollectionASTs: NestedCollectionAST[] = [];
|
||||
|
||||
for (const child of ast.children) {
|
||||
if (child.type === 'field') {
|
||||
if (columnsInCollection.includes(child.name) || child.name === '*') {
|
||||
toplevelFields.push(child.name);
|
||||
columnsToSelect.push(child.name);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!child.relation) continue;
|
||||
|
||||
const m2o = isM2O(child);
|
||||
|
||||
if (m2o) {
|
||||
toplevelFields.push(child.relation.many_field);
|
||||
columnsToSelect.push(child.relation.many_field);
|
||||
}
|
||||
|
||||
nestedCollections.push(child);
|
||||
nestedCollectionASTs.push(child);
|
||||
}
|
||||
|
||||
/** Always fetch primary key in case there's a nested relation that needs it */
|
||||
if (toplevelFields.includes(primaryKeyField) === false) {
|
||||
tempFields.push(primaryKeyField);
|
||||
if (columnsToSelect.includes(primaryKeyField) === false) {
|
||||
columnsToSelect.push(primaryKeyField);
|
||||
}
|
||||
|
||||
let dbQuery = knex.select([...toplevelFields, ...tempFields]).from(ast.name);
|
||||
return { columnsToSelect, nestedCollectionASTs, primaryKeyField };
|
||||
}
|
||||
|
||||
// Query defaults
|
||||
query.limit = typeof query.limit === 'number' ? query.limit : 100;
|
||||
async function getDBQuery(
|
||||
knex: Knex,
|
||||
table: string,
|
||||
columns: string[],
|
||||
query: Query,
|
||||
primaryKeyField: string
|
||||
): Promise<QueryBuilder> {
|
||||
let dbQuery = knex.select(columns.map((column) => `${table}.${column}`)).from(table);
|
||||
|
||||
if (query.limit === -1) {
|
||||
delete query.limit;
|
||||
const queryCopy = clone(query);
|
||||
|
||||
queryCopy.limit = typeof queryCopy.limit === 'number' ? queryCopy.limit : 100;
|
||||
|
||||
if (queryCopy.limit === -1) {
|
||||
delete queryCopy.limit;
|
||||
}
|
||||
|
||||
query.sort = query.sort || [{ column: primaryKeyField, order: 'asc' }];
|
||||
|
||||
await applyQuery(ast.name, dbQuery, query);
|
||||
await applyQuery(table, dbQuery, queryCopy);
|
||||
|
||||
let results: Item[] = await dbQuery;
|
||||
return dbQuery;
|
||||
}
|
||||
|
||||
results = await payloadService.processValues('read', results);
|
||||
function applyParentFilters(
|
||||
nestedCollectionASTs: NestedCollectionAST[],
|
||||
parentItem: Item | Item[]
|
||||
) {
|
||||
const parentItems = Array.isArray(parentItem) ? parentItem : [parentItem];
|
||||
|
||||
for (const batch of nestedCollections) {
|
||||
const m2o = isM2O(batch);
|
||||
for (const nestedAST of nestedCollectionASTs) {
|
||||
if (!nestedAST.relation) continue;
|
||||
|
||||
let batchQuery: Query = {};
|
||||
let tempField: string;
|
||||
let tempLimit: number;
|
||||
if (isM2O(nestedAST)) {
|
||||
nestedAST.query = {
|
||||
...nestedAST.query,
|
||||
filter: {
|
||||
...(nestedAST.query.filter || {}),
|
||||
[nestedAST.relation.one_primary]: {
|
||||
_in: uniq(
|
||||
parentItems.map((res) => res[nestedAST.relation.many_field])
|
||||
).filter((id) => id),
|
||||
},
|
||||
},
|
||||
};
|
||||
} else {
|
||||
const relatedM2OisFetched = !!nestedAST.children.find((child) => {
|
||||
return child.type === 'field' && child.name === nestedAST.relation.many_field;
|
||||
});
|
||||
|
||||
if (m2o) {
|
||||
// Make sure we always fetch the nested items primary key field to ensure we have the key to match the item by
|
||||
const toplevelFields = batch.children
|
||||
.filter(({ type }) => type === 'field')
|
||||
.map(({ name }) => name);
|
||||
if (
|
||||
toplevelFields.includes(batch.relation.one_primary) === false &&
|
||||
toplevelFields.includes('*') === false
|
||||
) {
|
||||
tempField = batch.relation.one_primary;
|
||||
batch.children.push({ type: 'field', name: batch.relation.one_primary });
|
||||
if (relatedM2OisFetched === false) {
|
||||
nestedAST.children.push({ type: 'field', name: nestedAST.relation.many_field });
|
||||
}
|
||||
|
||||
batchQuery = {
|
||||
...batch.query,
|
||||
nestedAST.query = {
|
||||
...nestedAST.query,
|
||||
filter: {
|
||||
...(batch.query.filter || {}),
|
||||
[batch.relation.one_primary]: {
|
||||
_in: uniq(results.map((res) => res[batch.relation.many_field])).filter(
|
||||
...(nestedAST.query.filter || {}),
|
||||
[nestedAST.relation.many_field]: {
|
||||
_in: uniq(parentItems.map((res) => res[nestedAST.parentKey])).filter(
|
||||
(id) => id
|
||||
),
|
||||
},
|
||||
},
|
||||
};
|
||||
} else {
|
||||
// o2m
|
||||
// Make sure we always fetch the related m2o field to ensure we have the foreign key to
|
||||
// match the items by
|
||||
const toplevelFields = batch.children
|
||||
.filter(({ type }) => type === 'field')
|
||||
.map(({ name }) => name);
|
||||
if (
|
||||
toplevelFields.includes(batch.relation.many_field) === false &&
|
||||
toplevelFields.includes('*') === false
|
||||
) {
|
||||
tempField = batch.relation.many_field;
|
||||
batch.children.push({ type: 'field', name: batch.relation.many_field });
|
||||
}
|
||||
}
|
||||
|
||||
return nestedCollectionASTs;
|
||||
}
|
||||
|
||||
function mergeWithParentItems(
|
||||
nestedItem: Item | Item[],
|
||||
parentItem: Item | Item[],
|
||||
nestedAST: NestedCollectionAST,
|
||||
o2mLimit?: number | null
|
||||
) {
|
||||
const nestedItems = Array.isArray(nestedItem) ? nestedItem : [nestedItem];
|
||||
const parentItems = clone(Array.isArray(parentItem) ? parentItem : [parentItem]);
|
||||
|
||||
if (isM2O(nestedAST)) {
|
||||
for (const parentItem of parentItems) {
|
||||
const itemChild = nestedItems.find((nestedItem) => {
|
||||
return (
|
||||
nestedItem[nestedAST.relation.one_primary] === parentItem[nestedAST.fieldKey]
|
||||
);
|
||||
});
|
||||
|
||||
parentItem[nestedAST.fieldKey] = itemChild || null;
|
||||
}
|
||||
} else {
|
||||
for (const parentItem of parentItems) {
|
||||
let itemChildren = nestedItems.filter((nestedItem) => {
|
||||
if (nestedItem === null) return false;
|
||||
if (Array.isArray(nestedItem[nestedAST.relation.many_field])) return true;
|
||||
|
||||
return (
|
||||
nestedItem[nestedAST.relation.many_field] ===
|
||||
parentItem[nestedAST.relation.one_primary] ||
|
||||
nestedItem[nestedAST.relation.many_field]?.[nestedAST.relation.many_primary] ===
|
||||
parentItem[nestedAST.relation.one_primary]
|
||||
);
|
||||
});
|
||||
|
||||
// We re-apply the requested limit here. This forces the _n_ nested items per parent concept
|
||||
if (o2mLimit !== null) {
|
||||
itemChildren = itemChildren.slice(0, o2mLimit);
|
||||
nestedAST.query.limit = o2mLimit;
|
||||
}
|
||||
|
||||
batchQuery = {
|
||||
...batch.query,
|
||||
filter: {
|
||||
...(batch.query.filter || {}),
|
||||
[batch.relation.many_field]: {
|
||||
_in: uniq(results.map((res) => res[batch.parentKey])).filter((id) => id),
|
||||
},
|
||||
},
|
||||
};
|
||||
parentItem[nestedAST.fieldKey] = itemChildren.length > 0 ? itemChildren : null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The nested queries are done with a WHERE m2o IN (pk, pk, pk) query. We have to remove
|
||||
* LIMIT from that equation to ensure we limit `n` items _per parent record_ instead of
|
||||
* `n` items in total. This limit will then be re-applied in the stitching process
|
||||
* down below
|
||||
*/
|
||||
if (typeof batchQuery.limit === 'number') {
|
||||
tempLimit = batchQuery.limit;
|
||||
batchQuery.limit = -1;
|
||||
return Array.isArray(parentItem) ? parentItems : parentItems[0];
|
||||
}
|
||||
|
||||
function removeTemporaryFields(
|
||||
rawItem: Item | Item[],
|
||||
ast: AST | NestedCollectionAST,
|
||||
primaryKeyField: string
|
||||
): Item | Item[] {
|
||||
const rawItems: Item[] = Array.isArray(rawItem) ? rawItem : [rawItem];
|
||||
|
||||
const items: Item[] = [];
|
||||
|
||||
const fields = ast.children
|
||||
.filter((child) => child.type === 'field')
|
||||
.map((child) => child.name);
|
||||
const nestedCollections = ast.children.filter(
|
||||
(child) => child.type === 'collection'
|
||||
) as NestedCollectionAST[];
|
||||
|
||||
for (const rawItem of rawItems) {
|
||||
if (rawItem === null) return rawItem;
|
||||
|
||||
const item = fields.length > 0 ? pick(rawItem, fields) : rawItem[primaryKeyField];
|
||||
|
||||
for (const nestedCollection of nestedCollections) {
|
||||
if (item[nestedCollection.fieldKey] !== null) {
|
||||
item[nestedCollection.fieldKey] = removeTemporaryFields(
|
||||
rawItem[nestedCollection.fieldKey],
|
||||
nestedCollection,
|
||||
nestedCollection.relatedKey
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const nestedResults = await runAST(batch, { query: batchQuery, knex });
|
||||
|
||||
results = results.map((record) => {
|
||||
if (m2o) {
|
||||
const nestedResult =
|
||||
clone(
|
||||
nestedResults.find((nestedRecord) => {
|
||||
return (
|
||||
nestedRecord[batch.relation.one_primary] === record[batch.fieldKey]
|
||||
);
|
||||
})
|
||||
) || null;
|
||||
|
||||
if (tempField && nestedResult) {
|
||||
delete nestedResult[tempField];
|
||||
}
|
||||
|
||||
return {
|
||||
...record,
|
||||
[batch.fieldKey]: nestedResult,
|
||||
};
|
||||
}
|
||||
|
||||
// o2m
|
||||
let resultsForCurrentRecord = nestedResults
|
||||
.filter((nestedRecord) => {
|
||||
return (
|
||||
nestedRecord[batch.relation.many_field] ===
|
||||
record[batch.relation.one_primary] ||
|
||||
// In case of nested object:
|
||||
nestedRecord[batch.relation.many_field]?.[batch.relation.many_primary] ===
|
||||
record[batch.relation.one_primary]
|
||||
);
|
||||
})
|
||||
.map((nestedRecord) => {
|
||||
if (tempField) {
|
||||
delete nestedRecord[tempField];
|
||||
}
|
||||
|
||||
return nestedRecord;
|
||||
});
|
||||
|
||||
// Reapply LIMIT query on a per-record basis
|
||||
if (typeof tempLimit === 'number') {
|
||||
resultsForCurrentRecord = resultsForCurrentRecord.slice(0, tempLimit);
|
||||
}
|
||||
|
||||
const newRecord = {
|
||||
...record,
|
||||
[batch.fieldKey]: resultsForCurrentRecord,
|
||||
};
|
||||
|
||||
return newRecord;
|
||||
});
|
||||
items.push(item);
|
||||
}
|
||||
|
||||
const nestedCollectionKeys = nestedCollections.map(({ fieldKey }) => fieldKey);
|
||||
|
||||
if (toplevelFields.includes('*')) {
|
||||
return results;
|
||||
}
|
||||
|
||||
return results.map((result) =>
|
||||
pick(result, uniq([...nestedCollectionKeys, ...toplevelFields]))
|
||||
);
|
||||
return Array.isArray(rawItem) ? items : items[0];
|
||||
}
|
||||
|
||||
function isM2O(child: NestedCollectionAST) {
|
||||
@@ -209,3 +274,7 @@ function isM2O(child: NestedCollectionAST) {
|
||||
child.relation.one_collection === child.name && child.relation.many_field === child.fieldKey
|
||||
);
|
||||
}
|
||||
|
||||
function isO2M(child: NestedCollectionAST) {
|
||||
return isM2O(child) === false;
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ table: directus_collections
|
||||
columns:
|
||||
collection:
|
||||
type: string
|
||||
length: 64
|
||||
primary: true
|
||||
icon:
|
||||
type: string
|
||||
@@ -20,7 +21,7 @@ columns:
|
||||
type: boolean
|
||||
nullable: false
|
||||
default: false
|
||||
translation:
|
||||
translations:
|
||||
type: json
|
||||
archive_field:
|
||||
type: string
|
||||
|
||||
@@ -14,6 +14,7 @@ columns:
|
||||
type: string
|
||||
length: 128
|
||||
nullable: false
|
||||
unique: true
|
||||
password:
|
||||
type: string
|
||||
length: 255
|
||||
@@ -58,7 +59,7 @@ columns:
|
||||
token:
|
||||
type: string
|
||||
length: 255
|
||||
last_login:
|
||||
last_access:
|
||||
type: timestamp
|
||||
last_page:
|
||||
type: string
|
||||
|
||||
@@ -52,7 +52,7 @@ columns:
|
||||
references:
|
||||
table: directus_fields
|
||||
column: id
|
||||
translation:
|
||||
translations:
|
||||
type: json
|
||||
note:
|
||||
type: text
|
||||
|
||||
@@ -7,9 +7,9 @@ columns:
|
||||
type: string
|
||||
length: 45
|
||||
nullable: false
|
||||
action_by:
|
||||
user:
|
||||
type: uuid
|
||||
action_on:
|
||||
timestamp:
|
||||
type: timestamp
|
||||
nullable: false
|
||||
default: '$now'
|
||||
|
||||
@@ -9,7 +9,7 @@ columns:
|
||||
type: string
|
||||
length: 255
|
||||
nullable: false
|
||||
parent_folder:
|
||||
parent:
|
||||
type: uuid
|
||||
references:
|
||||
table: directus_folders
|
||||
|
||||
@@ -6,23 +6,30 @@ columns:
|
||||
name:
|
||||
type: string
|
||||
length: 255
|
||||
nullable: false
|
||||
method:
|
||||
type: string
|
||||
length: 10
|
||||
default: POST
|
||||
nullable: false
|
||||
url:
|
||||
type: string
|
||||
length: 255
|
||||
nullable: false
|
||||
status:
|
||||
type: string
|
||||
length: 10
|
||||
default: inactive
|
||||
default: active
|
||||
nullable: false
|
||||
data:
|
||||
type: boolean
|
||||
default: false
|
||||
default: true
|
||||
nullable: false
|
||||
actions:
|
||||
type: string
|
||||
length: 100
|
||||
nullable: false
|
||||
collections:
|
||||
type: string
|
||||
length: 255
|
||||
nullable: false
|
||||
|
||||
@@ -6,24 +6,47 @@ defaults:
|
||||
singleton: false
|
||||
icon: null
|
||||
note: null
|
||||
translation: null
|
||||
translations: null
|
||||
display_template: null
|
||||
|
||||
data:
|
||||
- collection: directus_activity
|
||||
note: Accountability logs for all events
|
||||
- collection: directus_collections
|
||||
icon: list_alt
|
||||
note: Additional collection configuration and metadata
|
||||
- collection: directus_fields
|
||||
icon: input
|
||||
note: Additional field configuration and metadata
|
||||
- collection: directus_files
|
||||
icon: folder
|
||||
note: Metadata for all managed file assets
|
||||
- collection: directus_folders
|
||||
note: Provides virtual directories for files
|
||||
- collection: directus_permissions
|
||||
icon: admin_panel_settings
|
||||
note: Access permissions for each role
|
||||
- collection: directus_presets
|
||||
icon: bookmark_border
|
||||
note: Presets for collection defaults and bookmarks
|
||||
- collection: directus_relations
|
||||
icon: merge_type
|
||||
note: Relationship configuration and metadata
|
||||
- collection: directus_revisions
|
||||
note: Data snapshots for all activity
|
||||
- collection: directus_roles
|
||||
icon: supervised_user_circle
|
||||
note: Permission groups for system users
|
||||
- collection: directus_sessions
|
||||
note: User session information
|
||||
- collection: directus_settings
|
||||
singleton: true
|
||||
note: Project configuration options
|
||||
- collection: directus_users
|
||||
archive_field: status
|
||||
archive_value: archived
|
||||
unarchive_value: draft
|
||||
icon: people_alt
|
||||
note: System users for the platform
|
||||
- collection: directus_webhooks
|
||||
note: Configuration for event-based HTTP requests
|
||||
|
||||
@@ -38,16 +38,47 @@ data:
|
||||
layout: tabular
|
||||
layout_query:
|
||||
tabular:
|
||||
sort: -action_on
|
||||
sort: -timestamp
|
||||
fields:
|
||||
- action
|
||||
- collection
|
||||
- action_on
|
||||
- action_by
|
||||
- timestamp
|
||||
- user
|
||||
layout_options:
|
||||
tabular:
|
||||
widths:
|
||||
action: 100
|
||||
collection: 210
|
||||
action_on: 240
|
||||
action_by: 240
|
||||
timestamp: 240
|
||||
user: 240
|
||||
|
||||
- collection: directus_webhooks
|
||||
layout: tabular
|
||||
layout_query:
|
||||
tabular:
|
||||
fields:
|
||||
- status
|
||||
- name
|
||||
- method
|
||||
- url
|
||||
layout_options:
|
||||
tabular:
|
||||
widths:
|
||||
status: 36
|
||||
name: 300
|
||||
|
||||
- collection: directus_roles
|
||||
layout: tabular
|
||||
layout_query:
|
||||
tabular:
|
||||
fields:
|
||||
- icon
|
||||
- name
|
||||
- description
|
||||
layout_options:
|
||||
tabular:
|
||||
widths:
|
||||
icon: 36
|
||||
name: 248
|
||||
description: 500
|
||||
|
||||
|
||||
@@ -32,8 +32,13 @@ data:
|
||||
many_primary: id
|
||||
one_collection: directus_users
|
||||
one_primary: id
|
||||
- many_collection: directus_presets
|
||||
many_field: role
|
||||
many_primary: id
|
||||
one_collection: directus_roles
|
||||
one_primary: id
|
||||
- many_collection: directus_folders
|
||||
many_field: parent_folder
|
||||
many_field: parent
|
||||
many_primary: id
|
||||
one_collection: directus_folders
|
||||
one_primary: id
|
||||
@@ -54,7 +59,7 @@ data:
|
||||
one_field: fields
|
||||
one_primary: id
|
||||
- many_collection: directus_activity
|
||||
many_field: action_by
|
||||
many_field: user
|
||||
many_primary: id
|
||||
one_collection: directus_users
|
||||
one_primary: id
|
||||
|
||||
@@ -63,7 +63,7 @@ fields:
|
||||
sort: 7
|
||||
width: half
|
||||
- collection: directus_collections
|
||||
field: translation
|
||||
field: translations
|
||||
special: json
|
||||
interface: repeater
|
||||
options:
|
||||
@@ -78,7 +78,7 @@ fields:
|
||||
interface: system-language
|
||||
width: half
|
||||
- field: translation
|
||||
name: Translation
|
||||
name: translation
|
||||
type: string
|
||||
meta:
|
||||
interface: text-input
|
||||
|
||||
@@ -18,6 +18,7 @@ fields:
|
||||
- collection: directus_roles
|
||||
field: icon
|
||||
interface: icon
|
||||
display: icon
|
||||
locked: true
|
||||
sort: 2
|
||||
width: half
|
||||
|
||||
@@ -33,7 +33,7 @@ fields:
|
||||
locked: true
|
||||
special: csv
|
||||
- collection: directus_fields
|
||||
field: translation
|
||||
field: translations
|
||||
hidden: true
|
||||
locked: true
|
||||
special: json
|
||||
|
||||
@@ -9,9 +9,9 @@ fields:
|
||||
iconRight: title
|
||||
placeholder: My project...
|
||||
sort: 1
|
||||
translation:
|
||||
translations:
|
||||
locale: en-US
|
||||
translation: Name
|
||||
translations: Name
|
||||
width: half
|
||||
- collection: directus_settings
|
||||
field: project_url
|
||||
@@ -21,9 +21,9 @@ fields:
|
||||
iconRight: link
|
||||
placeholder: https://example.com
|
||||
sort: 2
|
||||
translation:
|
||||
translations:
|
||||
locale: en-US
|
||||
translation: Website
|
||||
translations: Website
|
||||
width: half
|
||||
- collection: directus_settings
|
||||
field: project_color
|
||||
@@ -31,9 +31,9 @@ fields:
|
||||
locked: true
|
||||
note: Login & Logo Background
|
||||
sort: 3
|
||||
translation:
|
||||
translations:
|
||||
locale: en-US
|
||||
translation: Brand Color
|
||||
translations: Brand Color
|
||||
width: half
|
||||
- collection: directus_settings
|
||||
field: project_logo
|
||||
@@ -41,9 +41,9 @@ fields:
|
||||
locked: true
|
||||
note: White 40x40 SVG/PNG
|
||||
sort: 4
|
||||
translation:
|
||||
translations:
|
||||
locale: en-US
|
||||
translation: Brand Logo
|
||||
translations: Brand Logo
|
||||
width: half
|
||||
- collection: directus_settings
|
||||
field: public_divider
|
||||
@@ -61,18 +61,18 @@ fields:
|
||||
interface: file
|
||||
locked: true
|
||||
sort: 6
|
||||
translation:
|
||||
translations:
|
||||
locale: en-US
|
||||
translation: Login Foreground
|
||||
translations: Login Foreground
|
||||
width: half
|
||||
- collection: directus_settings
|
||||
field: public_background
|
||||
interface: file
|
||||
locked: true
|
||||
sort: 7
|
||||
translation:
|
||||
translations:
|
||||
locale: en-US
|
||||
translation: Login Background
|
||||
translations: Login Background
|
||||
width: half
|
||||
- collection: directus_settings
|
||||
field: public_note
|
||||
@@ -199,17 +199,6 @@ fields:
|
||||
text: Presets Only
|
||||
sort: 14
|
||||
width: half
|
||||
- collection: directus_settings
|
||||
field: misc_divider
|
||||
interface: divider
|
||||
locked: true
|
||||
options:
|
||||
icon: pending
|
||||
title: Miscellaneous
|
||||
color: '#2F80ED'
|
||||
special: alias
|
||||
sort: 15
|
||||
width: full
|
||||
- collection: directus_settings
|
||||
field: id
|
||||
hidden: true
|
||||
|
||||
@@ -9,11 +9,18 @@ fields:
|
||||
field: name
|
||||
interface: text-input
|
||||
locked: true
|
||||
options:
|
||||
iconRight: title
|
||||
sort: 1
|
||||
width: full
|
||||
- collection: directus_webhooks
|
||||
field: method
|
||||
interface: dropdown
|
||||
display: labels
|
||||
display_options:
|
||||
defaultBackground: "#ECEFF1"
|
||||
choices: null
|
||||
format: false
|
||||
locked: true
|
||||
options:
|
||||
choices:
|
||||
@@ -32,6 +39,20 @@ fields:
|
||||
- collection: directus_webhooks
|
||||
field: status
|
||||
interface: dropdown
|
||||
display: labels
|
||||
display_options:
|
||||
defaultColor: "#B0BEC5"
|
||||
defaultBackground: "#ECEFF1"
|
||||
showAsDot: true
|
||||
choices:
|
||||
- text: Active
|
||||
value: active
|
||||
foreground: "#607D8B"
|
||||
background: "#2F80ED"
|
||||
- text: Inactive
|
||||
value: inactive
|
||||
foreground: "#607D8B"
|
||||
background: "#ECEFF1"
|
||||
locked: true
|
||||
options:
|
||||
choices:
|
||||
@@ -46,8 +67,7 @@ fields:
|
||||
interface: toggle
|
||||
locked: true
|
||||
options:
|
||||
choices:
|
||||
label: Include item data in request
|
||||
label: Send Event Data
|
||||
sort: 5
|
||||
width: half
|
||||
- collection: directus_webhooks
|
||||
|
||||
@@ -3,7 +3,7 @@ table: directus_activity
|
||||
fields:
|
||||
- collection: directus_activity
|
||||
field: action
|
||||
display: badge
|
||||
display: labels
|
||||
display_options:
|
||||
defaultForeground: '#263238'
|
||||
defaultBackground: '#eceff1'
|
||||
@@ -30,12 +30,12 @@ fields:
|
||||
display_options:
|
||||
icon: true
|
||||
- collection: directus_activity
|
||||
field: action_on
|
||||
field: timestamp
|
||||
display: datetime
|
||||
options:
|
||||
relative: true
|
||||
- collection: directus_activity
|
||||
field: action_by
|
||||
field: user
|
||||
display: user
|
||||
- collection: directus_activity
|
||||
field: comment
|
||||
|
||||
@@ -11,5 +11,5 @@ hidden: false
|
||||
sort: null
|
||||
width: full
|
||||
group: null
|
||||
translation: null
|
||||
translations: null
|
||||
note: null
|
||||
|
||||
@@ -22,14 +22,14 @@ type TableSeed = {
|
||||
column: string;
|
||||
};
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
type RowSeed = {
|
||||
table: string;
|
||||
defaults: Record<string, any>;
|
||||
data: Record<string, any>[];
|
||||
}
|
||||
};
|
||||
|
||||
type FieldSeed = {
|
||||
table: string;
|
||||
@@ -47,10 +47,10 @@ type FieldSeed = {
|
||||
sort: number | null;
|
||||
width: string | null;
|
||||
group: number | null;
|
||||
translation: Record<string, any> | null;
|
||||
translations: Record<string, any> | null;
|
||||
note: string | null;
|
||||
}[];
|
||||
}
|
||||
};
|
||||
|
||||
export default async function runSeed(database: Knex) {
|
||||
const exists = await database.schema.hasTable('directus_collections');
|
||||
@@ -68,10 +68,13 @@ async function createTables(database: Knex) {
|
||||
const tableSeeds = await fse.readdir(path.resolve(__dirname, './01-tables/'));
|
||||
|
||||
for (const tableSeedFile of tableSeeds) {
|
||||
const yamlRaw = await fse.readFile(path.resolve(__dirname, './01-tables', tableSeedFile), 'utf8');
|
||||
const yamlRaw = await fse.readFile(
|
||||
path.resolve(__dirname, './01-tables', tableSeedFile),
|
||||
'utf8'
|
||||
);
|
||||
const seedData = yaml.safeLoad(yamlRaw) as TableSeed;
|
||||
|
||||
await database.schema.createTable(seedData.table, tableBuilder => {
|
||||
await database.schema.createTable(seedData.table, (tableBuilder) => {
|
||||
for (const [columnName, columnInfo] of Object.entries(seedData.columns)) {
|
||||
let column: ColumnBuilder;
|
||||
|
||||
@@ -129,7 +132,10 @@ async function insertRows(database: Knex) {
|
||||
const rowSeeds = await fse.readdir(path.resolve(__dirname, './02-rows/'));
|
||||
|
||||
for (const rowSeedFile of rowSeeds) {
|
||||
const yamlRaw = await fse.readFile(path.resolve(__dirname, './02-rows', rowSeedFile), 'utf8');
|
||||
const yamlRaw = await fse.readFile(
|
||||
path.resolve(__dirname, './02-rows', rowSeedFile),
|
||||
'utf8'
|
||||
);
|
||||
const seedData = yaml.safeLoad(yamlRaw) as RowSeed;
|
||||
|
||||
const dataWithDefaults = seedData.data.map((row) => {
|
||||
@@ -149,11 +155,17 @@ async function insertRows(database: Knex) {
|
||||
async function insertFields(database: Knex) {
|
||||
const fieldSeeds = await fse.readdir(path.resolve(__dirname, './03-fields/'));
|
||||
|
||||
const defaultsYaml = await fse.readFile(path.resolve(__dirname, './03-fields/_defaults.yaml'), 'utf8');
|
||||
const defaultsYaml = await fse.readFile(
|
||||
path.resolve(__dirname, './03-fields/_defaults.yaml'),
|
||||
'utf8'
|
||||
);
|
||||
const defaults = yaml.safeLoad(defaultsYaml) as FieldSeed;
|
||||
|
||||
for (const fieldSeedFile of fieldSeeds) {
|
||||
const yamlRaw = await fse.readFile(path.resolve(__dirname, './03-fields', fieldSeedFile), 'utf8');
|
||||
const yamlRaw = await fse.readFile(
|
||||
path.resolve(__dirname, './03-fields', fieldSeedFile),
|
||||
'utf8'
|
||||
);
|
||||
const seedData = yaml.safeLoad(yamlRaw) as FieldSeed;
|
||||
|
||||
if (fieldSeedFile === '_defaults.yaml') {
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import { EventEmitter2 } from 'eventemitter2';
|
||||
|
||||
const emitter = new EventEmitter2({ wildcard: true, verboseMemoryLeak: true });
|
||||
const emitter = new EventEmitter2({ wildcard: true, verboseMemoryLeak: true, delimiter: '.' });
|
||||
|
||||
// No-op function to ensure we never end up with no data
|
||||
emitter.on('*.*.before', input => input);
|
||||
|
||||
export default emitter;
|
||||
|
||||
@@ -9,11 +9,11 @@ import { clone } from 'lodash';
|
||||
dotenv.config();
|
||||
|
||||
const defaults: Record<string, any> = {
|
||||
PORT: 41201,
|
||||
PUBLIC_URL: 'http://localhost:41201',
|
||||
PORT: 8055,
|
||||
PUBLIC_URL: 'http://localhost:8055',
|
||||
|
||||
STORAGE_LOCATIONS: 'local',
|
||||
STORAGE_LOCAL_PUBLIC_URL: 'http://localhost:41201/uploads',
|
||||
STORAGE_LOCAL_PUBLIC_URL: 'http://localhost:8055/uploads',
|
||||
STORAGE_LOCAL_DRIVER: 'local',
|
||||
STORAGE_LOCAL_ROOT: './uploads',
|
||||
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
import { BaseException } from './base';
|
||||
|
||||
export class CollectionNotFoundException extends BaseException {
|
||||
constructor(collection: string) {
|
||||
super(`Collection "${collection}" doesn't exist.`, 404, 'COLLECTION_NOT_FOUND');
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,7 @@ import { FilterOperator } from '../types';
|
||||
|
||||
type FailedValidationExtensions = {
|
||||
field: string;
|
||||
type: FilterOperator;
|
||||
type: FilterOperator | 'required';
|
||||
valid?: number | string | (number | string)[];
|
||||
invalid?: number | string | (number | string)[];
|
||||
substring?: string;
|
||||
@@ -16,8 +16,6 @@ export class FailedValidationException extends BaseException {
|
||||
field: error.path[0] as string,
|
||||
};
|
||||
|
||||
console.log(error);
|
||||
|
||||
const joiType = error.type;
|
||||
|
||||
// eq | in | null | empty
|
||||
@@ -94,6 +92,11 @@ export class FailedValidationException extends BaseException {
|
||||
extensions.substring = error.context?.substring;
|
||||
}
|
||||
|
||||
// required
|
||||
if (joiType.endsWith('required')) {
|
||||
extensions.type = 'required';
|
||||
}
|
||||
|
||||
super(error.message, 400, 'FAILED_VALIDATION', extensions);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
import { BaseException } from './base';
|
||||
|
||||
export class FieldNotFoundException extends BaseException {
|
||||
constructor(collection: string, field: string) {
|
||||
super(
|
||||
`Field "${field}" in collection "${collection}" doesn't exist.`,
|
||||
404,
|
||||
'FIELD_NOT_FOUND'
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,6 @@ type Extensions = {
|
||||
|
||||
export class ForbiddenException extends BaseException {
|
||||
constructor(message = `You don't have permission to access this.`, extensions?: Extensions) {
|
||||
super(message, 403, 'NO_PERMISSION', extensions);
|
||||
super(message, 403, 'FORBIDDEN', extensions);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,10 @@
|
||||
export * from './base';
|
||||
export * from './collection-not-found';
|
||||
export * from './failed-validation';
|
||||
export * from './field-not-found';
|
||||
export * from './forbidden';
|
||||
export * from './hit-rate-limit';
|
||||
export * from './invalid-credentials';
|
||||
export * from './invalid-otp';
|
||||
export * from './invalid-payload';
|
||||
export * from './invalid-query';
|
||||
export * from './item-limit';
|
||||
export * from './item-not-found';
|
||||
export * from './route-not-found';
|
||||
export * from './service-unavailable';
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
import { BaseException } from './base';
|
||||
|
||||
export class ItemLimitException extends BaseException {
|
||||
constructor(message: string) {
|
||||
super(message, 400, 'ITEM_LIMIT_REACHED');
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
import { BaseException } from './base';
|
||||
|
||||
export class ItemNotFoundException extends BaseException {
|
||||
constructor(id: string | number, collection: string) {
|
||||
super(`Item "${id}" doesn't exist in "${collection}".`, 404, 'ITEM_NOT_FOUND');
|
||||
}
|
||||
}
|
||||
111
api/src/extensions.ts
Normal file
111
api/src/extensions.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import listFolders from './utils/list-folders';
|
||||
import path from 'path';
|
||||
import env from './env';
|
||||
import { ServiceUnavailableException } from './exceptions';
|
||||
import express, { Router } from 'express';
|
||||
import emitter from './emitter';
|
||||
import logger from './logger';
|
||||
import { HookRegisterFunction, EndpointRegisterFunction } from './types';
|
||||
import { ensureDir } from 'fs-extra';
|
||||
|
||||
import * as exceptions from './exceptions';
|
||||
import * as services from './services';
|
||||
import database from './database';
|
||||
|
||||
export async function ensureFoldersExist() {
|
||||
const folders = ['endpoints', 'hooks', 'interfaces', 'modules', 'layouts', 'displays'];
|
||||
|
||||
for (const folder of folders) {
|
||||
const folderPath = path.resolve(env.EXTENSIONS_PATH, folder);
|
||||
try {
|
||||
await ensureDir(folderPath);
|
||||
} catch (err) {
|
||||
logger.warn(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function listExtensions(type: string) {
|
||||
const extensionsPath = env.EXTENSIONS_PATH as string;
|
||||
const location = path.join(extensionsPath, type);
|
||||
|
||||
try {
|
||||
return await listFolders(location);
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
throw new ServiceUnavailableException(
|
||||
`Extension folder "extensions/${type}" couldn't be opened`,
|
||||
{
|
||||
service: 'extensions',
|
||||
}
|
||||
);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
export async function registerExtensions(router: Router) {
|
||||
await ensureFoldersExist();
|
||||
let hooks: string[] = [];
|
||||
let endpoints: string[] = [];
|
||||
|
||||
try {
|
||||
hooks = await listExtensions('hooks');
|
||||
registerHooks(hooks);
|
||||
} catch (err) {
|
||||
logger.warn(err);
|
||||
}
|
||||
|
||||
try {
|
||||
endpoints = await listExtensions('endpoints');
|
||||
registerEndpoints(endpoints, router);
|
||||
} catch (err) {
|
||||
logger.warn(err);
|
||||
}
|
||||
}
|
||||
|
||||
function registerHooks(hooks: string[]) {
|
||||
const extensionsPath = env.EXTENSIONS_PATH as string;
|
||||
|
||||
for (const hook of hooks) {
|
||||
try {
|
||||
registerHook(hook);
|
||||
} catch (error) {
|
||||
logger.warn(`Couldn't register hook "${hook}"`);
|
||||
logger.info(error);
|
||||
}
|
||||
}
|
||||
|
||||
function registerHook(hook: string) {
|
||||
const hookPath = path.resolve(extensionsPath, 'hooks', hook, 'index.js');
|
||||
const register: HookRegisterFunction = require(hookPath);
|
||||
const events = register({ services, exceptions, env, database });
|
||||
|
||||
for (const [event, handler] of Object.entries(events)) {
|
||||
emitter.on(event, handler);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function registerEndpoints(endpoints: string[], router: Router) {
|
||||
const extensionsPath = env.EXTENSIONS_PATH as string;
|
||||
|
||||
for (const endpoint of endpoints) {
|
||||
try {
|
||||
registerEndpoint(endpoint);
|
||||
} catch (error) {
|
||||
logger.warn(`Couldn't register endpoint "${endpoint}"`);
|
||||
logger.info(error);
|
||||
}
|
||||
}
|
||||
|
||||
function registerEndpoint(endpoint: string) {
|
||||
const endpointPath = path.resolve(extensionsPath, 'endpoints', endpoint, 'index.js');
|
||||
const register: EndpointRegisterFunction = require(endpointPath);
|
||||
|
||||
const scopedRouter = express.Router();
|
||||
router.use(`/${endpoint}/`, scopedRouter);
|
||||
|
||||
register(scopedRouter, { services, exceptions, env, database });
|
||||
}
|
||||
}
|
||||
39
api/src/grant.ts
Normal file
39
api/src/grant.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
/**
|
||||
* Grant is the oAuth library
|
||||
*/
|
||||
|
||||
import env from './env';
|
||||
|
||||
const enabledProviders = (env.OAUTH_PROVIDERS as string)
|
||||
.split(',')
|
||||
.map((provider) => provider.trim().toLowerCase());
|
||||
|
||||
const config: any = {
|
||||
defaults: {
|
||||
origin: env.PUBLIC_URL,
|
||||
transport: 'session',
|
||||
prefix: '/auth/oauth',
|
||||
response: ['tokens', 'profile'],
|
||||
},
|
||||
};
|
||||
|
||||
for (const [key, value] of Object.entries(env)) {
|
||||
if (key.startsWith('OAUTH') === false) continue;
|
||||
|
||||
const parts = key.split('_');
|
||||
const provider = parts[1].toLowerCase();
|
||||
|
||||
if (enabledProviders.includes(provider) === false) continue;
|
||||
|
||||
// OAUTH <PROVIDER> SETTING = VALUE
|
||||
parts.splice(0, 2);
|
||||
|
||||
const configKey = parts.join('_').toLowerCase();
|
||||
|
||||
config[provider] = {
|
||||
...(config[provider] || {}),
|
||||
[configKey]: value,
|
||||
};
|
||||
}
|
||||
|
||||
export default config;
|
||||
@@ -49,13 +49,9 @@ const authenticate: RequestHandler = asyncHandler(async (req, res, next) => {
|
||||
throw new InvalidCredentialsException();
|
||||
}
|
||||
|
||||
/** @TODO verify user status */
|
||||
|
||||
req.accountability.user = payload.id;
|
||||
req.accountability.role = user.role;
|
||||
req.accountability.admin = user.admin_access === true || user.admin_access == 1;
|
||||
|
||||
return next();
|
||||
} else {
|
||||
// Try finding the user with the provided token
|
||||
const user = await database
|
||||
@@ -77,13 +73,10 @@ const authenticate: RequestHandler = asyncHandler(async (req, res, next) => {
|
||||
req.accountability.admin = user.admin_access === true || user.admin_access == 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* @TODO
|
||||
* Implement static tokens
|
||||
*
|
||||
* @NOTE
|
||||
* We'll silently ignore wrong tokens. This makes sure we prevent brute-forcing static tokens
|
||||
*/
|
||||
if (req.accountability?.user) {
|
||||
await database('directus_users').update({ last_access: new Date() }).where({ id: req.accountability.user });
|
||||
}
|
||||
|
||||
return next();
|
||||
});
|
||||
|
||||
|
||||
@@ -1,31 +1,62 @@
|
||||
import { RequestHandler } from "express";
|
||||
import asyncHandler from "express-async-handler";
|
||||
import env from "../env";
|
||||
import { getCacheKey } from "../utils/get-cache-key";
|
||||
import { RequestHandler } from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import env from '../env';
|
||||
import { getCacheKey } from '../utils/get-cache-key';
|
||||
import cache from '../cache';
|
||||
import { Transform, transforms } from 'json2csv';
|
||||
import { PassThrough } from "stream";
|
||||
import { PassThrough } from 'stream';
|
||||
|
||||
export const respond: RequestHandler = asyncHandler(async (req, res) => {
|
||||
if (req.method.toLowerCase() === 'get' && env.CACHE_ENABLED === true && cache && !req.sanitizedQuery.export) {
|
||||
if (
|
||||
req.method.toLowerCase() === 'get' &&
|
||||
env.CACHE_ENABLED === true &&
|
||||
cache &&
|
||||
!req.sanitizedQuery.export
|
||||
) {
|
||||
const key = getCacheKey(req);
|
||||
await cache.set(key, res.locals.payload);
|
||||
}
|
||||
|
||||
if (req.sanitizedQuery.export === 'json') {
|
||||
res.attachment('export.json');
|
||||
res.set('Content-Type', 'application/json');
|
||||
return res.status(200).send(JSON.stringify(res.locals.payload, null, '\t'));
|
||||
}
|
||||
if (req.sanitizedQuery.export) {
|
||||
let filename = '';
|
||||
|
||||
if (req.sanitizedQuery.export === 'csv') {
|
||||
res.attachment('export.csv');
|
||||
res.set('Content-Type', 'text/csv');
|
||||
const stream = new PassThrough();
|
||||
stream.end(Buffer.from(JSON.stringify(res.locals.payload.data), 'utf-8'));
|
||||
const json2csv = new Transform({ transforms: [transforms.flatten({ separator: '.' })] });
|
||||
return stream.pipe(json2csv).pipe(res);
|
||||
if (req.collection) {
|
||||
filename += req.collection;
|
||||
} else {
|
||||
filename += 'Export';
|
||||
}
|
||||
|
||||
filename += ' ' + getDateFormatted();
|
||||
|
||||
if (req.sanitizedQuery.export === 'json') {
|
||||
res.attachment(`${filename}.json`);
|
||||
res.set('Content-Type', 'application/json');
|
||||
return res.status(200).send(JSON.stringify(res.locals.payload, null, '\t'));
|
||||
}
|
||||
|
||||
if (req.sanitizedQuery.export === 'csv') {
|
||||
res.attachment(`${filename}.csv`);
|
||||
res.set('Content-Type', 'text/csv');
|
||||
const stream = new PassThrough();
|
||||
stream.end(Buffer.from(JSON.stringify(res.locals.payload.data), 'utf-8'));
|
||||
const json2csv = new Transform({
|
||||
transforms: [transforms.flatten({ separator: '.' })],
|
||||
});
|
||||
return stream.pipe(json2csv).pipe(res);
|
||||
}
|
||||
}
|
||||
|
||||
return res.json(res.locals.payload);
|
||||
});
|
||||
|
||||
function getDateFormatted() {
|
||||
const date = new Date();
|
||||
|
||||
let month = String(date.getMonth() + 1);
|
||||
if (month.length === 1) month = '0' + month;
|
||||
|
||||
let day = String(date.getDate());
|
||||
if (day.length === 1) day = '0' + day;
|
||||
|
||||
return `${date.getFullYear()}-${month}-${day} at ${date.getHours()}.${date.getMinutes()}.${date.getSeconds()}`;
|
||||
}
|
||||
|
||||
38
api/src/middleware/response-hook.ts
Normal file
38
api/src/middleware/response-hook.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import { RequestHandler } from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import emitter from '../emitter';
|
||||
import logger from '../logger';
|
||||
|
||||
const responseHook: RequestHandler = asyncHandler((req, res, next) => {
|
||||
res.on('close', afterResponse);
|
||||
|
||||
const startTime = process.hrtime();
|
||||
|
||||
return next();
|
||||
|
||||
function afterResponse() {
|
||||
res.removeListener('close', afterResponse);
|
||||
|
||||
const info = {
|
||||
request: {
|
||||
method: req.method,
|
||||
uri: req.path,
|
||||
url: req.protocol + '://' + req.get('host') + req.originalUrl,
|
||||
size: req.socket.bytesRead,
|
||||
query: req.query,
|
||||
headers: req.headers,
|
||||
},
|
||||
response: {
|
||||
status: res.statusCode,
|
||||
size: (res as any)['_contentLength'] || res.getHeader('content-length'),
|
||||
headers: res.getHeaders(),
|
||||
},
|
||||
ip: req.headers['x-forwarded-for'] || req.connection.remoteAddress,
|
||||
duration: (process.hrtime(startTime)[1] / 1000000).toFixed(),
|
||||
};
|
||||
|
||||
emitter.emitAsync('response', info).catch((err) => logger.warn(err));
|
||||
}
|
||||
});
|
||||
|
||||
export default responseHook;
|
||||
@@ -4,138 +4,24 @@
|
||||
*/
|
||||
|
||||
import { RequestHandler } from 'express';
|
||||
import { Accountability, Query, Sort, Filter, Meta } from '../types';
|
||||
import logger from '../logger';
|
||||
import { parseFilter } from '../utils/parse-filter';
|
||||
import { sanitizeQuery } from '../utils/sanitize-query';
|
||||
|
||||
const sanitizeQuery: RequestHandler = (req, res, next) => {
|
||||
const sanitizeQueryMiddleware: RequestHandler = (req, res, next) => {
|
||||
req.sanitizedQuery = {};
|
||||
if (!req.query) return;
|
||||
|
||||
const query: Query = {
|
||||
fields: sanitizeFields(req.query.fields) || ['*'],
|
||||
};
|
||||
req.sanitizedQuery = sanitizeQuery(
|
||||
{
|
||||
fields: req.query.fields || '*',
|
||||
...req.query
|
||||
},
|
||||
req.accountability || null
|
||||
);
|
||||
|
||||
if (req.query.limit !== undefined) {
|
||||
const limit = sanitizeLimit(req.query.limit);
|
||||
|
||||
if (typeof limit === 'number') {
|
||||
query.limit = limit;
|
||||
}
|
||||
}
|
||||
|
||||
if (req.query.sort) {
|
||||
query.sort = sanitizeSort(req.query.sort);
|
||||
}
|
||||
|
||||
if (req.query.filter) {
|
||||
query.filter = sanitizeFilter(req.query.filter, req.accountability || null);
|
||||
}
|
||||
|
||||
if (req.query.limit == '-1') {
|
||||
delete query.limit;
|
||||
}
|
||||
|
||||
if (req.query.offset) {
|
||||
query.offset = sanitizeOffset(req.query.offset);
|
||||
}
|
||||
|
||||
if (req.query.page) {
|
||||
query.page = sanitizePage(req.query.page);
|
||||
}
|
||||
|
||||
if (req.query.single) {
|
||||
query.single = sanitizeSingle(req.query.single);
|
||||
}
|
||||
|
||||
if (req.query.meta) {
|
||||
query.meta = sanitizeMeta(req.query.meta);
|
||||
}
|
||||
|
||||
if (req.query.search && typeof req.query.search === 'string') {
|
||||
query.search = req.query.search;
|
||||
}
|
||||
|
||||
if (req.query.export && typeof req.query.export === 'string' && ['json', 'csv'].includes(req.query.export)) {
|
||||
query.export = req.query.export as 'json' | 'csv';
|
||||
}
|
||||
|
||||
req.sanitizedQuery = query;
|
||||
Object.freeze(req.sanitizedQuery);
|
||||
|
||||
return next();
|
||||
};
|
||||
|
||||
export default sanitizeQuery;
|
||||
export default sanitizeQueryMiddleware;
|
||||
|
||||
function sanitizeFields(rawFields: any) {
|
||||
if (!rawFields) return;
|
||||
|
||||
let fields: string[] = [];
|
||||
|
||||
if (typeof rawFields === 'string') fields = rawFields.split(',');
|
||||
else if (Array.isArray(rawFields)) fields = rawFields as string[];
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
function sanitizeSort(rawSort: any) {
|
||||
let fields: string[] = [];
|
||||
|
||||
if (typeof rawSort === 'string') fields = rawSort.split(',');
|
||||
else if (Array.isArray(rawSort)) fields = rawSort as string[];
|
||||
|
||||
return fields.map((field) => {
|
||||
const order = field.startsWith('-') ? 'desc' : 'asc';
|
||||
const column = field.startsWith('-') ? field.substring(1) : field;
|
||||
return { column, order } as Sort;
|
||||
});
|
||||
}
|
||||
|
||||
function sanitizeFilter(rawFilter: any, accountability: Accountability | null) {
|
||||
let filters: Filter = rawFilter;
|
||||
|
||||
if (typeof rawFilter === 'string') {
|
||||
try {
|
||||
filters = JSON.parse(rawFilter);
|
||||
} catch {
|
||||
logger.warn('Invalid value passed for filter query parameter.');
|
||||
}
|
||||
}
|
||||
|
||||
filters = parseFilter(filters, accountability);
|
||||
|
||||
return filters;
|
||||
}
|
||||
|
||||
function sanitizeLimit(rawLimit: any) {
|
||||
if (rawLimit === undefined || rawLimit === null) return null;
|
||||
return Number(rawLimit);
|
||||
}
|
||||
|
||||
function sanitizeOffset(rawOffset: any) {
|
||||
return Number(rawOffset);
|
||||
}
|
||||
|
||||
function sanitizePage(rawPage: any) {
|
||||
return Number(rawPage);
|
||||
}
|
||||
|
||||
function sanitizeSingle(rawSingle: any) {
|
||||
return true;
|
||||
}
|
||||
|
||||
function sanitizeMeta(rawMeta: any) {
|
||||
if (rawMeta === '*') {
|
||||
return Object.values(Meta);
|
||||
}
|
||||
|
||||
if (rawMeta.includes(',')) {
|
||||
return rawMeta.split(',');
|
||||
}
|
||||
|
||||
if (Array.isArray(rawMeta)) {
|
||||
return rawMeta;
|
||||
}
|
||||
|
||||
return [rawMeta];
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ import { validateDBConnection } from './database';
|
||||
export default async function start() {
|
||||
await validateDBConnection();
|
||||
|
||||
const port = env.NODE_ENV === 'development' ? 41201 : env.PORT;
|
||||
const port = env.NODE_ENV === 'development' ? 8055 : env.PORT;
|
||||
|
||||
app.listen(port, () => {
|
||||
logger.info(`Server started at port ${port}`);
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import ItemsService from './items';
|
||||
import { ItemsService } from './items';
|
||||
import { AbstractServiceOptions } from '../types';
|
||||
|
||||
/**
|
||||
* @TODO only return activity of the collections you have access to
|
||||
*/
|
||||
|
||||
export default class ActivityService extends ItemsService {
|
||||
export class ActivityService extends ItemsService {
|
||||
constructor(options?: AbstractServiceOptions) {
|
||||
super('directus_activity', options);
|
||||
}
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import { Transformation } from '../types/assets';
|
||||
import storage from '../storage';
|
||||
import sharp, { ResizeOptions } from 'sharp';
|
||||
import database from '../database';
|
||||
import path from 'path';
|
||||
import Knex from 'knex';
|
||||
import { Accountability, AbstractServiceOptions } from '../types';
|
||||
import { Accountability, AbstractServiceOptions, Transformation } from '../types';
|
||||
|
||||
export default class AssetsService {
|
||||
export class AssetsService {
|
||||
knex: Knex;
|
||||
accountability: Accountability | null;
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ import {
|
||||
} from '../exceptions';
|
||||
import { Session, Accountability, AbstractServiceOptions, Action } from '../types';
|
||||
import Knex from 'knex';
|
||||
import ActivityService from '../services/activity';
|
||||
import { ActivityService } from '../services/activity';
|
||||
import env from '../env';
|
||||
import { authenticator } from 'otplib';
|
||||
|
||||
@@ -22,7 +22,7 @@ type AuthenticateOptions = {
|
||||
otp?: string;
|
||||
};
|
||||
|
||||
export default class AuthenticationService {
|
||||
export class AuthenticationService {
|
||||
knex: Knex;
|
||||
accountability: Accountability | null;
|
||||
activityService: ActivityService;
|
||||
@@ -93,7 +93,7 @@ export default class AuthenticationService {
|
||||
if (this.accountability) {
|
||||
await this.activityService.create({
|
||||
action: Action.AUTHENTICATE,
|
||||
action_by: user.id,
|
||||
user: user.id,
|
||||
ip: this.accountability.ip,
|
||||
user_agent: this.accountability.userAgent,
|
||||
collection: 'directus_users',
|
||||
@@ -181,4 +181,22 @@ export default class AuthenticationService {
|
||||
const secret = user.tfa_secret;
|
||||
return authenticator.check(otp, secret);
|
||||
}
|
||||
|
||||
async verifyPassword(pk: string, password: string) {
|
||||
const userRecord = await this.knex
|
||||
.select('password')
|
||||
.from('directus_users')
|
||||
.where({ id: pk })
|
||||
.first();
|
||||
|
||||
if (!userRecord || !userRecord.password) {
|
||||
throw new InvalidCredentialsException();
|
||||
}
|
||||
|
||||
if ((await argon2.verify(userRecord.password, password)) === false) {
|
||||
throw new InvalidCredentialsException();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,14 +11,15 @@ import {
|
||||
Item,
|
||||
PrimaryKey,
|
||||
} from '../types';
|
||||
import SchemaInspector from 'knex-schema-inspector';
|
||||
import Knex from 'knex';
|
||||
import { ForbiddenException, FailedValidationException } from '../exceptions';
|
||||
import { uniq, merge } from 'lodash';
|
||||
import { uniq, merge, flatten } from 'lodash';
|
||||
import generateJoi from '../utils/generate-joi';
|
||||
import ItemsService from './items';
|
||||
import { ItemsService } from './items';
|
||||
import { parseFilter } from '../utils/parse-filter';
|
||||
|
||||
export default class AuthorizationService {
|
||||
export class AuthorizationService {
|
||||
knex: Knex;
|
||||
accountability: Accountability | null;
|
||||
|
||||
@@ -139,12 +140,17 @@ export default class AuthorizationService {
|
||||
|
||||
const parsedPermissions = parseFilter(permissions.permissions, accountability);
|
||||
|
||||
ast.query = {
|
||||
...ast.query,
|
||||
filter: {
|
||||
_and: [ast.query.filter || {}, parsedPermissions],
|
||||
},
|
||||
};
|
||||
if (!ast.query.filter || Object.keys(ast.query.filter).length === 0) {
|
||||
ast.query.filter = { _and: [] };
|
||||
} else {
|
||||
ast.query.filter = { _and: [ast.query.filter] };
|
||||
}
|
||||
|
||||
if (parsedPermissions && Object.keys(parsedPermissions).length > 0) {
|
||||
ast.query.filter._and.push(parsedPermissions);
|
||||
}
|
||||
|
||||
if (ast.query.filter._and.length === 0) delete ast.query.filter._and;
|
||||
|
||||
if (permissions.limit && ast.query.limit && ast.query.limit > permissions.limit) {
|
||||
throw new ForbiddenException(
|
||||
@@ -185,29 +191,39 @@ export default class AuthorizationService {
|
||||
collection: string,
|
||||
payload: Partial<Item>[] | Partial<Item>
|
||||
): Promise<Partial<Item>[] | Partial<Item>> {
|
||||
const validationErrors: FailedValidationException[] = [];
|
||||
|
||||
let payloads = Array.isArray(payload) ? payload : [payload];
|
||||
|
||||
const permission = await this.knex
|
||||
.select<Permission>('*')
|
||||
.from('directus_permissions')
|
||||
.where({ action, collection, role: this.accountability?.role || null })
|
||||
.first();
|
||||
let permission: Permission | undefined;
|
||||
|
||||
if (!permission) throw new ForbiddenException();
|
||||
if (this.accountability?.admin === true) {
|
||||
permission = { id: 0, role: this.accountability?.role, collection, action, permissions: {}, validation: {}, limit: null, fields: '*', presets: {}, }
|
||||
} else {
|
||||
permission = await this.knex
|
||||
.select<Permission>('*')
|
||||
.from('directus_permissions')
|
||||
.where({ action, collection, role: this.accountability?.role || null })
|
||||
.first();
|
||||
|
||||
const allowedFields = permission.fields?.split(',') || [];
|
||||
// Check if you have permission to access the fields you're trying to acces
|
||||
|
||||
if (allowedFields.includes('*') === false) {
|
||||
for (const payload of payloads) {
|
||||
const keysInData = Object.keys(payload);
|
||||
const invalidKeys = keysInData.filter(
|
||||
(fieldKey) => allowedFields.includes(fieldKey) === false
|
||||
);
|
||||
if (!permission) throw new ForbiddenException();
|
||||
|
||||
if (invalidKeys.length > 0) {
|
||||
throw new ForbiddenException(
|
||||
`You're not allowed to ${action} field "${invalidKeys[0]}" in collection "${collection}".`
|
||||
const allowedFields = permission.fields?.split(',') || [];
|
||||
|
||||
if (allowedFields.includes('*') === false) {
|
||||
for (const payload of payloads) {
|
||||
const keysInData = Object.keys(payload);
|
||||
const invalidKeys = keysInData.filter(
|
||||
(fieldKey) => allowedFields.includes(fieldKey) === false
|
||||
);
|
||||
|
||||
if (invalidKeys.length > 0) {
|
||||
throw new ForbiddenException(
|
||||
`You're not allowed to ${action} field "${invalidKeys[0]}" in collection "${collection}".`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -216,16 +232,49 @@ export default class AuthorizationService {
|
||||
|
||||
payloads = payloads.map((payload) => merge({}, preset, payload));
|
||||
|
||||
const schema = generateJoi(permission.validation);
|
||||
const schemaInspector = SchemaInspector(this.knex);
|
||||
const columns = await schemaInspector.columnInfo(collection);
|
||||
|
||||
for (const payload of payloads) {
|
||||
const { error } = schema.validate(payload, { abortEarly: false });
|
||||
let requiredColumns: string[] = [];
|
||||
|
||||
if (error) {
|
||||
throw error.details.map((details) => new FailedValidationException(details));
|
||||
for (const column of columns) {
|
||||
const field = await this.knex.select<{ special: string }>('special').from('directus_fields').where({ collection, field: column.name }).first();
|
||||
const specials = (field?.special || '').split(',');
|
||||
const hasGenerateSpecial = ['uuid', 'date-created', 'role-created', 'user-created'].some((name) => specials.includes(name));
|
||||
const isRequired = column.is_nullable === false && column.has_auto_increment === false && column.default_value === null && hasGenerateSpecial === false;
|
||||
|
||||
if (isRequired) {
|
||||
requiredColumns.push(column.name);
|
||||
}
|
||||
}
|
||||
|
||||
if (requiredColumns.length > 0) {
|
||||
permission.validation = {
|
||||
_and: [
|
||||
permission.validation,
|
||||
{}
|
||||
]
|
||||
}
|
||||
|
||||
if (action === 'create') {
|
||||
for (const name of requiredColumns) {
|
||||
permission.validation._and[1][name] = {
|
||||
_required: true
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (const name of requiredColumns) {
|
||||
permission.validation._and[1][name] = {
|
||||
_nnull: true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
validationErrors.push(...this.validateJoi(permission.validation, payloads));
|
||||
|
||||
if (validationErrors.length > 0) throw validationErrors;
|
||||
|
||||
if (Array.isArray(payload)) {
|
||||
return payloads;
|
||||
} else {
|
||||
@@ -233,11 +282,49 @@ export default class AuthorizationService {
|
||||
}
|
||||
}
|
||||
|
||||
validateJoi(validation: Record<string, any>, payloads: Partial<Record<string, any>>[]): FailedValidationException[] {
|
||||
const errors: FailedValidationException[] = [];
|
||||
|
||||
/**
|
||||
* Note there can only be a single _and / _or per level
|
||||
*/
|
||||
|
||||
if (Object.keys(validation)[0] === '_and') {
|
||||
const subValidation = Object.values(validation)[0];
|
||||
const nestedErrors = flatten<FailedValidationException>(subValidation.map((subObj: Record<string, any>) => this.validateJoi(subObj, payloads))).filter((err?: FailedValidationException) => err);
|
||||
errors.push(...nestedErrors);
|
||||
}
|
||||
|
||||
if (Object.keys(validation)[0] === '_or') {
|
||||
const subValidation = Object.values(validation)[0];
|
||||
const nestedErrors = flatten<FailedValidationException>(subValidation.map((subObj: Record<string, any>) => this.validateJoi(subObj, payloads)));
|
||||
const allErrored = nestedErrors.every((err?: FailedValidationException) => err);
|
||||
|
||||
if (allErrored) {
|
||||
errors.push(...nestedErrors);
|
||||
}
|
||||
}
|
||||
|
||||
const schema = generateJoi(validation);
|
||||
|
||||
for (const payload of payloads) {
|
||||
const { error } = schema.validate(payload, { abortEarly: false });
|
||||
|
||||
if (error) {
|
||||
errors.push(...error.details.map((details) => new FailedValidationException(details)));
|
||||
}
|
||||
}
|
||||
|
||||
return errors;
|
||||
}
|
||||
|
||||
async checkAccess(
|
||||
action: PermissionsAction,
|
||||
collection: string,
|
||||
pk: PrimaryKey | PrimaryKey[]
|
||||
) {
|
||||
if (this.accountability?.admin === true) return;
|
||||
|
||||
const itemsService = new ItemsService(collection, { accountability: this.accountability });
|
||||
|
||||
try {
|
||||
|
||||
@@ -3,12 +3,11 @@ import { AbstractServiceOptions, Accountability, Collection, Relation } from '..
|
||||
import Knex from 'knex';
|
||||
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
|
||||
import SchemaInspector from 'knex-schema-inspector';
|
||||
import FieldsService from '../services/fields';
|
||||
import { omit } from 'lodash';
|
||||
import ItemsService from '../services/items';
|
||||
import { FieldsService } from '../services/fields';
|
||||
import { ItemsService } from '../services/items';
|
||||
import cache from '../cache';
|
||||
|
||||
export default class CollectionsService {
|
||||
export class CollectionsService {
|
||||
knex: Knex;
|
||||
accountability: Accountability | null;
|
||||
|
||||
@@ -61,6 +60,10 @@ export default class CollectionsService {
|
||||
throw new InvalidPayloadException(`The "collection" key is required.`);
|
||||
}
|
||||
|
||||
if (payload.collection.startsWith('directus_')) {
|
||||
throw new InvalidPayloadException(`Collections can't start with "directus_"`);
|
||||
}
|
||||
|
||||
if (await schemaInspector.hasTable(payload.collection)) {
|
||||
throw new InvalidPayloadException(
|
||||
`Collection "${payload.collection}" already exists.`
|
||||
@@ -129,16 +132,16 @@ export default class CollectionsService {
|
||||
|
||||
const tablesInDatabase = await schemaInspector.tableInfo();
|
||||
const tables = tablesInDatabase.filter((table) => collectionKeys.includes(table.name));
|
||||
const meta: any[] = await collectionItemsService.readByQuery({
|
||||
const meta = await collectionItemsService.readByQuery({
|
||||
filter: { collection: { _in: collectionKeys } },
|
||||
});
|
||||
}) as Collection['meta'][];
|
||||
|
||||
const collections: Collection[] = [];
|
||||
|
||||
for (const table of tables) {
|
||||
const collection: Collection = {
|
||||
collection: table.name,
|
||||
meta: meta.find((systemInfo) => systemInfo.collection === table.name) || null,
|
||||
meta: meta.find((systemInfo) => systemInfo?.collection === table.name) || null,
|
||||
schema: table,
|
||||
};
|
||||
|
||||
@@ -167,16 +170,16 @@ export default class CollectionsService {
|
||||
}
|
||||
|
||||
const tablesToFetchInfoFor = tablesInDatabase.map((table) => table.name);
|
||||
const meta: any[] = await collectionItemsService.readByQuery({
|
||||
const meta = await collectionItemsService.readByQuery({
|
||||
filter: { collection: { _in: tablesToFetchInfoFor } },
|
||||
});
|
||||
}) as Collection['meta'][];
|
||||
|
||||
const collections: Collection[] = [];
|
||||
|
||||
for (const table of tablesInDatabase) {
|
||||
const collection: Collection = {
|
||||
collection: table.name,
|
||||
meta: meta.find((systemInfo) => systemInfo.collection === table.name) || null,
|
||||
meta: meta.find((systemInfo) => systemInfo?.collection === table.name) || null,
|
||||
schema: table,
|
||||
};
|
||||
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
import listFolders from '../utils/list-folders';
|
||||
import path from 'path';
|
||||
import env from '../env';
|
||||
import { ServiceUnavailableException } from '../exceptions';
|
||||
|
||||
export default class ExtensionsService {
|
||||
async listExtensions(type: string) {
|
||||
const extensionsPath = env.EXTENSIONS_PATH as string;
|
||||
const location = path.join(extensionsPath, type);
|
||||
|
||||
try {
|
||||
return await listFolders(location);
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
throw new ServiceUnavailableException(`Extension folder couldn't be opened`, {
|
||||
service: 'extensions',
|
||||
});
|
||||
}
|
||||
console.log(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,40 +1,35 @@
|
||||
import database, { schemaInspector } from '../database';
|
||||
import { Field } from '../types/field';
|
||||
import { Accountability, AbstractServiceOptions, FieldMeta, Relation } from '../types';
|
||||
import ItemsService from '../services/items';
|
||||
import { ItemsService } from '../services/items';
|
||||
import { ColumnBuilder } from 'knex';
|
||||
import getLocalType from '../utils/get-local-type';
|
||||
import { types } from '../types';
|
||||
import { ForbiddenException } from '../exceptions';
|
||||
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
|
||||
import Knex, { CreateTableBuilder } from 'knex';
|
||||
import PayloadService from '../services/payload';
|
||||
import { PayloadService } from '../services/payload';
|
||||
import getDefaultValue from '../utils/get-default-value';
|
||||
import cache from '../cache';
|
||||
import SchemaInspector from 'knex-schema-inspector';
|
||||
|
||||
type RawField = Partial<Field> & { field: string; type: typeof types[number] };
|
||||
|
||||
/**
|
||||
* @todo
|
||||
*
|
||||
* - Only allow admins to create/update/delete
|
||||
* - Only return fields you have permission to read (based on permissions)
|
||||
* - Don't use items service, as this is a different case than regular collections
|
||||
*/
|
||||
|
||||
export default class FieldsService {
|
||||
export class FieldsService {
|
||||
knex: Knex;
|
||||
accountability: Accountability | null;
|
||||
itemsService: ItemsService;
|
||||
payloadService: PayloadService;
|
||||
schemaInspector: typeof schemaInspector;
|
||||
|
||||
constructor(options?: AbstractServiceOptions) {
|
||||
this.knex = options?.knex || database;
|
||||
this.schemaInspector = options?.knex ? SchemaInspector(options.knex) : schemaInspector;
|
||||
this.accountability = options?.accountability || null;
|
||||
this.itemsService = new ItemsService('directus_fields', options);
|
||||
this.payloadService = new PayloadService('directus_fields');
|
||||
}
|
||||
|
||||
async readAll(collection?: string) {
|
||||
async readAll(collection?: string): Promise<Field[]> {
|
||||
let fields: FieldMeta[];
|
||||
const nonAuthorizedItemsService = new ItemsService('directus_fields', { knex: this.knex });
|
||||
|
||||
@@ -72,10 +67,7 @@ export default class FieldsService {
|
||||
return data as Field;
|
||||
});
|
||||
|
||||
const aliasQuery = this.knex
|
||||
.select<FieldMeta[]>('*')
|
||||
.from('directus_fields')
|
||||
.whereIn('special', ['alias', 'o2m', 'm2m']);
|
||||
const aliasQuery = this.knex.select<any[]>('*').from('directus_fields');
|
||||
|
||||
if (collection) {
|
||||
aliasQuery.andWhere('collection', collection);
|
||||
@@ -83,13 +75,25 @@ export default class FieldsService {
|
||||
|
||||
let aliasFields = await aliasQuery;
|
||||
|
||||
const aliasTypes = ['alias', 'o2m', 'm2m', 'files', 'files', 'translations'];
|
||||
|
||||
aliasFields = aliasFields.filter((field) => {
|
||||
const specials = (field.special || '').split(',');
|
||||
|
||||
for (const type of aliasTypes) {
|
||||
if (specials.includes(type)) return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
});
|
||||
|
||||
aliasFields = (await this.payloadService.processValues('read', aliasFields)) as FieldMeta[];
|
||||
|
||||
const aliasFieldsAsField = aliasFields.map((field) => {
|
||||
const data = {
|
||||
collection: field.collection,
|
||||
field: field.field,
|
||||
type: field.special,
|
||||
type: field.special[0],
|
||||
schema: null,
|
||||
meta: field,
|
||||
};
|
||||
@@ -184,10 +188,22 @@ export default class FieldsService {
|
||||
throw new ForbiddenException('Only admins can perform this action.');
|
||||
}
|
||||
|
||||
/**
|
||||
* @todo
|
||||
* Check if table / directus_fields row already exists
|
||||
*/
|
||||
// Check if field already exists, either as a column, or as a row in directus_fields
|
||||
if (await this.schemaInspector.hasColumn(collection, field.field)) {
|
||||
throw new InvalidPayloadException(
|
||||
`Field "${field.field}" already exists in collection "${collection}"`
|
||||
);
|
||||
} else if (
|
||||
!!(await this.knex
|
||||
.select('id')
|
||||
.from('directus_fields')
|
||||
.where({ collection, field: field.field })
|
||||
.first())
|
||||
) {
|
||||
throw new InvalidPayloadException(
|
||||
`Field "${field.field}" already exists in collection "${collection}"`
|
||||
);
|
||||
}
|
||||
|
||||
if (field.schema) {
|
||||
if (table) {
|
||||
@@ -216,7 +232,7 @@ export default class FieldsService {
|
||||
|
||||
async updateField(collection: string, field: RawField) {
|
||||
if (this.accountability && this.accountability.admin !== true) {
|
||||
throw new ForbiddenException('Only admins can perform this action.');
|
||||
throw new ForbiddenException('Only admins can perform this action');
|
||||
}
|
||||
|
||||
if (field.schema) {
|
||||
@@ -339,6 +355,8 @@ export default class FieldsService {
|
||||
column = table[type](field.field /* precision, scale */);
|
||||
} else if (field.type === 'csv') {
|
||||
column = table.string(field.field);
|
||||
} else if (field.type === 'dateTime') {
|
||||
column = table.dateTime(field.field, { useTz: false });
|
||||
} else {
|
||||
column = table[field.type](field.field);
|
||||
}
|
||||
@@ -347,7 +365,7 @@ export default class FieldsService {
|
||||
column.defaultTo(field.schema.default_value);
|
||||
}
|
||||
|
||||
if (field.schema.is_nullable !== undefined && field.schema.is_nullable === false) {
|
||||
if (field.schema?.is_nullable !== undefined && field.schema.is_nullable === false) {
|
||||
column.notNullable();
|
||||
} else {
|
||||
column.nullable();
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import ItemsService from './items';
|
||||
import { ItemsService } from './items';
|
||||
import storage from '../storage';
|
||||
import sharp from 'sharp';
|
||||
import { parse as parseICC } from 'icc';
|
||||
@@ -8,8 +8,9 @@ import path from 'path';
|
||||
import { AbstractServiceOptions, File, PrimaryKey } from '../types';
|
||||
import { clone } from 'lodash';
|
||||
import cache from '../cache';
|
||||
import { ForbiddenException } from '../exceptions';
|
||||
|
||||
export default class FilesService extends ItemsService {
|
||||
export class FilesService extends ItemsService {
|
||||
constructor(options?: AbstractServiceOptions) {
|
||||
super('directus_files', options);
|
||||
}
|
||||
@@ -89,7 +90,13 @@ export default class FilesService extends ItemsService {
|
||||
delete(keys: PrimaryKey[]): Promise<PrimaryKey[]>;
|
||||
async delete(key: PrimaryKey | PrimaryKey[]): Promise<PrimaryKey | PrimaryKey[]> {
|
||||
const keys = Array.isArray(key) ? key : [key];
|
||||
const files = await super.readByKey(keys, { fields: ['id', 'storage'] });
|
||||
let files = await super.readByKey(keys, { fields: ['id', 'storage'] });
|
||||
|
||||
if (!files) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
files = Array.isArray(files) ? files : [files];
|
||||
|
||||
for (const file of files) {
|
||||
const disk = storage.disk(file.storage);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import ItemsService from './items';
|
||||
import { ItemsService } from './items';
|
||||
import { AbstractServiceOptions } from '../types';
|
||||
|
||||
export default class FoldersService extends ItemsService {
|
||||
export class FoldersService extends ItemsService {
|
||||
constructor(options?: AbstractServiceOptions) {
|
||||
super('directus_folders', options);
|
||||
}
|
||||
|
||||
389
api/src/services/graphql.ts
Normal file
389
api/src/services/graphql.ts
Normal file
@@ -0,0 +1,389 @@
|
||||
import Knex from 'knex';
|
||||
import database from '../database';
|
||||
import { AbstractServiceOptions, Accountability, Collection, Field, Relation, Query, AbstractService } from '../types';
|
||||
import { GraphQLString, GraphQLSchema, GraphQLObjectType, GraphQLList, GraphQLResolveInfo, GraphQLInputObjectType, ObjectFieldNode, GraphQLID, ValueNode, FieldNode, GraphQLFieldConfigMap, GraphQLInt, IntValueNode, StringValueNode, BooleanValueNode, ArgumentNode, GraphQLScalarType, GraphQLBoolean, ObjectValueNode } from 'graphql';
|
||||
import { getGraphQLType } from '../utils/get-graphql-type';
|
||||
import { RelationsService } from './relations';
|
||||
import { ItemsService } from './items';
|
||||
import { cloneDeep } from 'lodash';
|
||||
import { sanitizeQuery } from '../utils/sanitize-query';
|
||||
|
||||
import { ActivityService } from './activity';
|
||||
import { CollectionsService } from './collections';
|
||||
import { FieldsService } from './fields';
|
||||
import { FilesService } from './files';
|
||||
import { FoldersService } from './folders';
|
||||
import { PermissionsService } from './permissions';
|
||||
import { PresetsService } from './presets';
|
||||
import { RevisionsService } from './revisions';
|
||||
import { RolesService } from './roles';
|
||||
import { SettingsService } from './settings';
|
||||
import { UsersService } from './users';
|
||||
import { WebhooksService } from './webhooks';
|
||||
|
||||
export class GraphQLService {
|
||||
accountability: Accountability | null;
|
||||
knex: Knex;
|
||||
fieldsService: FieldsService;
|
||||
collectionsService: CollectionsService;
|
||||
relationsService: RelationsService;
|
||||
|
||||
constructor(options?: AbstractServiceOptions) {
|
||||
this.accountability = options?.accountability || null;
|
||||
this.knex = options?.knex || database;
|
||||
this.fieldsService = new FieldsService(options);
|
||||
this.collectionsService = new CollectionsService(options);
|
||||
this.relationsService = new RelationsService({ knex: this.knex });
|
||||
}
|
||||
|
||||
args = {
|
||||
sort: {
|
||||
type: GraphQLString
|
||||
},
|
||||
limit: {
|
||||
type: GraphQLInt,
|
||||
},
|
||||
offset: {
|
||||
type: GraphQLInt,
|
||||
},
|
||||
page: {
|
||||
type: GraphQLInt,
|
||||
},
|
||||
search: {
|
||||
type: GraphQLString,
|
||||
}
|
||||
}
|
||||
|
||||
async getSchema() {
|
||||
const collectionsInSystem = await this.collectionsService.readByQuery();
|
||||
const fieldsInSystem = await this.fieldsService.readAll();
|
||||
const relationsInSystem = await this.relationsService.readByQuery({}) as Relation[];
|
||||
|
||||
const schema = this.getGraphQLSchema(collectionsInSystem, fieldsInSystem, relationsInSystem);
|
||||
|
||||
return schema;
|
||||
}
|
||||
|
||||
getGraphQLSchema(collections: Collection[], fields: Field[], relations: Relation[]) {
|
||||
const filterTypes = this.getFilterArgs(collections, fields, relations);
|
||||
const schema: any = { items: {} };
|
||||
|
||||
for (const collection of collections) {
|
||||
const systemCollection = collection.collection.startsWith('directus_');
|
||||
|
||||
const schemaSection: any = {
|
||||
type: new GraphQLObjectType({
|
||||
name: collection.collection,
|
||||
description: collection.meta?.note,
|
||||
fields: () => {
|
||||
const fieldsObject: GraphQLFieldConfigMap<any, any> = {};
|
||||
const fieldsInCollection = fields.filter((field) => field.collection === collection.collection);
|
||||
|
||||
for (const field of fieldsInCollection) {
|
||||
const relationForField = relations.find((relation) => {
|
||||
return relation.many_collection === collection.collection && relation.many_field === field.field ||
|
||||
relation.one_collection === collection.collection && relation.one_field === field.field;
|
||||
});
|
||||
|
||||
if (relationForField) {
|
||||
const isM2O = relationForField.many_collection === collection.collection && relationForField.many_field === field.field;
|
||||
|
||||
if (isM2O) {
|
||||
const relatedIsSystem = relationForField.one_collection.startsWith('directus_');
|
||||
const relatedType = relatedIsSystem ? schema[relationForField.one_collection.substring(9)].type : schema.items[relationForField.one_collection].type;
|
||||
|
||||
fieldsObject[field.field] = {
|
||||
type: relatedType,
|
||||
}
|
||||
} else {
|
||||
const relatedIsSystem = relationForField.many_collection.startsWith('directus_');
|
||||
const relatedType = relatedIsSystem ? schema[relationForField.many_collection.substring(9)].type : schema.items[relationForField.many_collection].type;
|
||||
|
||||
fieldsObject[field.field] = {
|
||||
type: new GraphQLList(relatedType),
|
||||
args: {
|
||||
...this.args,
|
||||
filter: {
|
||||
type: filterTypes[relationForField.many_collection],
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
} else {
|
||||
fieldsObject[field.field] = {
|
||||
type: field.schema?.is_primary_key ? GraphQLID : getGraphQLType(field.type),
|
||||
}
|
||||
}
|
||||
|
||||
fieldsObject[field.field].description = field.meta?.note;
|
||||
}
|
||||
|
||||
return fieldsObject;
|
||||
},
|
||||
}),
|
||||
resolve: (source: any, args: any, context: any, info: GraphQLResolveInfo) => this.resolve(info),
|
||||
args: {
|
||||
...this.args,
|
||||
filter: {
|
||||
name: `${collection.collection}_filter`,
|
||||
type: filterTypes[collection.collection],
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (systemCollection) {
|
||||
schema[collection.collection.substring(9)] = schemaSection;
|
||||
} else {
|
||||
schema.items[collection.collection] = schemaSection;
|
||||
}
|
||||
}
|
||||
|
||||
const schemaWithLists = cloneDeep(schema);
|
||||
|
||||
for (const collection of collections) {
|
||||
if (collection.meta?.singleton !== true) {
|
||||
const systemCollection = collection.collection.startsWith('directus_');
|
||||
|
||||
if (systemCollection) {
|
||||
schemaWithLists[collection.collection.substring(9)].type = new GraphQLList(schemaWithLists[collection.collection.substring(9)].type);
|
||||
} else {
|
||||
schemaWithLists.items[collection.collection].type = new GraphQLList(schemaWithLists.items[collection.collection].type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
schemaWithLists.items = {
|
||||
type: new GraphQLObjectType({
|
||||
name: 'items',
|
||||
fields: schemaWithLists.items,
|
||||
}),
|
||||
resolve: () => ({}),
|
||||
};
|
||||
|
||||
return new GraphQLSchema({
|
||||
query: new GraphQLObjectType({
|
||||
name: 'Directus',
|
||||
fields: schemaWithLists,
|
||||
}),
|
||||
});
|
||||
}
|
||||
|
||||
getFilterArgs(collections: Collection[], fields: Field[], relations: Relation[]) {
|
||||
const filterTypes: any = {};
|
||||
|
||||
for (const collection of collections) {
|
||||
filterTypes[collection.collection] = new GraphQLInputObjectType({
|
||||
name: `${collection.collection}_filter`,
|
||||
fields: () => {
|
||||
const filterFields: any = {
|
||||
_and: {
|
||||
type: new GraphQLList(filterTypes[collection.collection])
|
||||
},
|
||||
_or: {
|
||||
type: new GraphQLList(filterTypes[collection.collection])
|
||||
},
|
||||
};
|
||||
|
||||
const fieldsInCollection = fields.filter((field) => field.collection === collection.collection);
|
||||
|
||||
for (const field of fieldsInCollection) {
|
||||
const relationForField = relations.find((relation) => {
|
||||
return relation.many_collection === collection.collection && relation.many_field === field.field ||
|
||||
relation.one_collection === collection.collection && relation.one_field === field.field;
|
||||
});
|
||||
|
||||
if (relationForField) {
|
||||
const isM2O = relationForField.many_collection === collection.collection && relationForField.many_field === field.field;
|
||||
|
||||
if (isM2O) {
|
||||
const relatedType = filterTypes[relationForField.one_collection];
|
||||
|
||||
filterFields[field.field] = {
|
||||
type: relatedType,
|
||||
}
|
||||
} else {
|
||||
const relatedType = filterTypes[relationForField.many_collection];
|
||||
|
||||
filterFields[field.field] = {
|
||||
type: relatedType
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const fieldType = field.schema?.is_primary_key ? GraphQLID : getGraphQLType(field.type);
|
||||
|
||||
filterFields[field.field] = {
|
||||
type: new GraphQLInputObjectType({
|
||||
name: `${collection.collection}_${field.field}_filter_operators`,
|
||||
fields: {
|
||||
/* @todo make this a little smarter by only including filters that work with current type */
|
||||
_eq: {
|
||||
type: fieldType,
|
||||
},
|
||||
_neq: {
|
||||
type: fieldType
|
||||
},
|
||||
_contains: {
|
||||
type: fieldType,
|
||||
},
|
||||
_ncontains: {
|
||||
type: fieldType,
|
||||
},
|
||||
_in: {
|
||||
type: new GraphQLList(fieldType),
|
||||
},
|
||||
_nin: {
|
||||
type: new GraphQLList(fieldType),
|
||||
},
|
||||
_gt: {
|
||||
type: fieldType,
|
||||
},
|
||||
_gte: {
|
||||
type: fieldType,
|
||||
},
|
||||
_lt: {
|
||||
type: fieldType,
|
||||
},
|
||||
_lte: {
|
||||
type: fieldType,
|
||||
},
|
||||
_null: {
|
||||
type: GraphQLBoolean,
|
||||
},
|
||||
_nnull: {
|
||||
type: GraphQLBoolean,
|
||||
},
|
||||
_empty: {
|
||||
type: GraphQLBoolean,
|
||||
},
|
||||
_nempty: {
|
||||
type: GraphQLBoolean,
|
||||
}
|
||||
}
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return filterFields;
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return filterTypes
|
||||
}
|
||||
|
||||
async resolve(info: GraphQLResolveInfo) {
|
||||
const systemField = info.path.prev?.key !== 'items';
|
||||
|
||||
const collection = systemField ? `directus_${info.fieldName}` : info.fieldName;
|
||||
const selections = info.fieldNodes[0]?.selectionSet?.selections?.filter((node) => node.kind === 'Field') as FieldNode[] | undefined;
|
||||
if (!selections) return null;
|
||||
|
||||
return await this.getData(collection, selections, info.fieldNodes[0].arguments);
|
||||
}
|
||||
|
||||
async getData(collection: string, selections: FieldNode[], argsArray?: readonly ArgumentNode[]) {
|
||||
const args: Record<string, any> = this.parseArgs(argsArray);
|
||||
|
||||
const query: Query = sanitizeQuery(args, this.accountability);
|
||||
|
||||
const parseFields = (selections: FieldNode[], parent?: string): string[] => {
|
||||
const fields: string[] = [];
|
||||
|
||||
for (const selection of selections) {
|
||||
const current = parent ? `${parent}.${selection.name.value}` : selection.name.value;
|
||||
|
||||
if (selection.selectionSet === undefined) {
|
||||
fields.push(current);
|
||||
} else {
|
||||
const children = parseFields(selection.selectionSet.selections.filter((selection) => selection.kind === 'Field') as FieldNode[], current);
|
||||
fields.push(...children);
|
||||
}
|
||||
|
||||
if (selection.arguments && selection.arguments.length > 0) {
|
||||
if (!query.deep) query.deep = {};
|
||||
|
||||
const args: Record<string, any> = this.parseArgs(selection.arguments);
|
||||
query.deep[current] = sanitizeQuery(args, this.accountability);
|
||||
}
|
||||
}
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
query.fields = parseFields(selections.filter((selection) => selection.kind === 'Field') as FieldNode[]);
|
||||
|
||||
let service: ItemsService;
|
||||
|
||||
switch (collection) {
|
||||
case 'directus_activity':
|
||||
service = new ActivityService({ knex: this.knex, accountability: this.accountability });
|
||||
// case 'directus_collections':
|
||||
// service = new CollectionsService({ knex: this.knex, accountability: this.accountability });
|
||||
// case 'directus_fields':
|
||||
// service = new FieldsService({ knex: this.knex, accountability: this.accountability });
|
||||
case 'directus_files':
|
||||
service = new FilesService({ knex: this.knex, accountability: this.accountability });
|
||||
case 'directus_folders':
|
||||
service = new FoldersService({ knex: this.knex, accountability: this.accountability });
|
||||
case 'directus_folders':
|
||||
service = new FoldersService({ knex: this.knex, accountability: this.accountability });
|
||||
case 'directus_permissions':
|
||||
service = new PermissionsService({ knex: this.knex, accountability: this.accountability });
|
||||
case 'directus_presets':
|
||||
service = new PresetsService({ knex: this.knex, accountability: this.accountability });
|
||||
case 'directus_relations':
|
||||
service = new RelationsService({ knex: this.knex, accountability: this.accountability });
|
||||
case 'directus_revisions':
|
||||
service = new RevisionsService({ knex: this.knex, accountability: this.accountability });
|
||||
case 'directus_roles':
|
||||
service = new RolesService({ knex: this.knex, accountability: this.accountability });
|
||||
case 'directus_settings':
|
||||
service = new SettingsService({ knex: this.knex, accountability: this.accountability });
|
||||
case 'directus_users':
|
||||
service = new UsersService({ knex: this.knex, accountability: this.accountability });
|
||||
case 'directus_webhooks':
|
||||
service = new WebhooksService({ knex: this.knex, accountability: this.accountability });
|
||||
default:
|
||||
service = new ItemsService(collection, { knex: this.knex, accountability: this.accountability });
|
||||
}
|
||||
|
||||
const collectionInfo = await this.knex.select('singleton').from('directus_collections').where({ collection: collection }).first();
|
||||
const result = collectionInfo?.singleton === true ? await service.readSingleton(query) : await service.readByQuery(query);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
parseArgs(args?: readonly ArgumentNode[] | readonly ObjectFieldNode[]): Record<string, any> {
|
||||
if (!args) return {};
|
||||
|
||||
const parseObjectValue = (arg: ObjectValueNode) => {
|
||||
return this.parseArgs(arg.fields);
|
||||
}
|
||||
|
||||
const argsObject: any = {};
|
||||
|
||||
for (const argument of args) {
|
||||
if (argument.value.kind === 'ObjectValue') {
|
||||
argsObject[argument.name.value] = parseObjectValue(argument.value);
|
||||
} else if (argument.value.kind === 'ListValue') {
|
||||
const values: any = [];
|
||||
|
||||
for (const valueNode of argument.value.values) {
|
||||
if (valueNode.kind === 'ObjectValue') {
|
||||
values.push(this.parseArgs(valueNode.fields));
|
||||
} else {
|
||||
values.push((valueNode as any).value);
|
||||
}
|
||||
}
|
||||
|
||||
argsObject[argument.name.value] = values;
|
||||
} else {
|
||||
argsObject[argument.name.value] = (argument.value as IntValueNode | StringValueNode | BooleanValueNode).value;
|
||||
}
|
||||
}
|
||||
|
||||
return argsObject;
|
||||
}
|
||||
}
|
||||
|
||||
22
api/src/services/index.ts
Normal file
22
api/src/services/index.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
export * from './activity';
|
||||
export * from './assets';
|
||||
export * from './authentication';
|
||||
export * from './collections';
|
||||
export * from './fields';
|
||||
export * from './files';
|
||||
export * from './folders';
|
||||
export * from './graphql';
|
||||
export * from './items';
|
||||
export * from './meta';
|
||||
export * from './payload';
|
||||
export * from './permissions';
|
||||
export * from './presets';
|
||||
export * from './relations';
|
||||
export * from './revisions';
|
||||
export * from './roles';
|
||||
export * from './server';
|
||||
export * from './settings';
|
||||
export * from './users';
|
||||
export * from './utils';
|
||||
export * from './webhooks';
|
||||
export * from './specifications'
|
||||
@@ -15,23 +15,31 @@ import {
|
||||
import Knex from 'knex';
|
||||
import cache from '../cache';
|
||||
import emitter from '../emitter';
|
||||
import logger from '../logger';
|
||||
|
||||
import PayloadService from './payload';
|
||||
import AuthorizationService from './authorization';
|
||||
import { PayloadService } from './payload';
|
||||
import { AuthorizationService } from './authorization';
|
||||
|
||||
import { pick, clone } from 'lodash';
|
||||
import { pick, clone, cloneDeep } from 'lodash';
|
||||
import getDefaultValue from '../utils/get-default-value';
|
||||
import { InvalidPayloadException } from '../exceptions';
|
||||
|
||||
export default class ItemsService implements AbstractService {
|
||||
export class ItemsService implements AbstractService {
|
||||
collection: string;
|
||||
knex: Knex;
|
||||
accountability: Accountability | null;
|
||||
eventScope: string;
|
||||
schemaInspector: ReturnType<typeof SchemaInspector>;
|
||||
|
||||
constructor(collection: string, options?: AbstractServiceOptions) {
|
||||
this.collection = collection;
|
||||
this.knex = options?.knex || database;
|
||||
this.accountability = options?.accountability || null;
|
||||
this.eventScope = this.collection.startsWith('directus_')
|
||||
? this.collection.substring(9)
|
||||
: 'items';
|
||||
|
||||
this.schemaInspector = SchemaInspector(this.knex);
|
||||
|
||||
return this;
|
||||
}
|
||||
@@ -39,9 +47,8 @@ export default class ItemsService implements AbstractService {
|
||||
async create(data: Partial<Item>[]): Promise<PrimaryKey[]>;
|
||||
async create(data: Partial<Item>): Promise<PrimaryKey>;
|
||||
async create(data: Partial<Item> | Partial<Item>[]): Promise<PrimaryKey | PrimaryKey[]> {
|
||||
const schemaInspector = SchemaInspector(this.knex);
|
||||
const primaryKeyField = await schemaInspector.primary(this.collection);
|
||||
const columns = await schemaInspector.columns(this.collection);
|
||||
const primaryKeyField = await this.schemaInspector.primary(this.collection);
|
||||
const columns = await this.schemaInspector.columns(this.collection);
|
||||
|
||||
let payloads = clone(Array.isArray(data) ? data : [data]);
|
||||
|
||||
@@ -51,10 +58,35 @@ export default class ItemsService implements AbstractService {
|
||||
knex: trx,
|
||||
});
|
||||
|
||||
const authorizationService = new AuthorizationService({
|
||||
accountability: this.accountability,
|
||||
knex: trx,
|
||||
});
|
||||
const customProcessed = await emitter.emitAsync(
|
||||
`${this.eventScope}.create.before`,
|
||||
payloads,
|
||||
{
|
||||
event: `${this.eventScope}.create.before`,
|
||||
accountability: this.accountability,
|
||||
collection: this.collection,
|
||||
item: null,
|
||||
action: 'create',
|
||||
payload: payloads,
|
||||
}
|
||||
);
|
||||
|
||||
if (customProcessed) {
|
||||
payloads = customProcessed[customProcessed.length - 1];
|
||||
}
|
||||
|
||||
if (this.accountability) {
|
||||
const authorizationService = new AuthorizationService({
|
||||
accountability: this.accountability,
|
||||
knex: trx,
|
||||
});
|
||||
|
||||
payloads = await authorizationService.validatePayload(
|
||||
'create',
|
||||
this.collection,
|
||||
payloads
|
||||
);
|
||||
}
|
||||
|
||||
payloads = await payloadService.processM2O(payloads);
|
||||
|
||||
@@ -70,14 +102,6 @@ export default class ItemsService implements AbstractService {
|
||||
payloadsWithoutAliases
|
||||
);
|
||||
|
||||
if (this.accountability && this.accountability.admin !== true) {
|
||||
payloads = await authorizationService.validatePayload(
|
||||
'create',
|
||||
this.collection,
|
||||
payloads
|
||||
);
|
||||
}
|
||||
|
||||
const primaryKeys: PrimaryKey[] = [];
|
||||
|
||||
for (const payloadWithoutAlias of payloadsWithoutAliases) {
|
||||
@@ -111,7 +135,7 @@ export default class ItemsService implements AbstractService {
|
||||
if (this.accountability) {
|
||||
const activityRecords = primaryKeys.map((key) => ({
|
||||
action: Action.CREATE,
|
||||
action_by: this.accountability!.user,
|
||||
user: this.accountability!.user,
|
||||
collection: this.collection,
|
||||
ip: this.accountability!.ip,
|
||||
user_agent: this.accountability!.userAgent,
|
||||
@@ -151,12 +175,16 @@ export default class ItemsService implements AbstractService {
|
||||
await cache.clear();
|
||||
}
|
||||
|
||||
emitter.emitAsync(`item.create.${this.collection}`, {
|
||||
collection: this.collection,
|
||||
item: primaryKeys,
|
||||
action: 'create',
|
||||
payload: payloads,
|
||||
});
|
||||
emitter
|
||||
.emitAsync(`${this.eventScope}.create`, {
|
||||
event: `${this.eventScope}.create`,
|
||||
accountability: this.accountability,
|
||||
collection: this.collection,
|
||||
item: primaryKeys,
|
||||
action: 'create',
|
||||
payload: payloads,
|
||||
})
|
||||
.catch((err) => logger.warn(err));
|
||||
|
||||
return primaryKeys;
|
||||
});
|
||||
@@ -164,33 +192,43 @@ export default class ItemsService implements AbstractService {
|
||||
return Array.isArray(data) ? savedPrimaryKeys : savedPrimaryKeys[0];
|
||||
}
|
||||
|
||||
async readByQuery(query: Query): Promise<Item[]> {
|
||||
async readByQuery(query: Query): Promise<null | Item | Item[]> {
|
||||
const authorizationService = new AuthorizationService({
|
||||
accountability: this.accountability,
|
||||
});
|
||||
let ast = await getASTFromQuery(this.collection, query, { accountability: this.accountability, knex: this.knex });
|
||||
|
||||
let ast = await getASTFromQuery(this.collection, query, {
|
||||
accountability: this.accountability,
|
||||
knex: this.knex,
|
||||
});
|
||||
|
||||
if (this.accountability && this.accountability.admin !== true) {
|
||||
ast = await authorizationService.processAST(ast);
|
||||
}
|
||||
|
||||
const records = await runAST(ast);
|
||||
|
||||
return records;
|
||||
}
|
||||
|
||||
readByKey(keys: PrimaryKey[], query?: Query, action?: PermissionsAction): Promise<Item[]>;
|
||||
readByKey(key: PrimaryKey, query?: Query, action?: PermissionsAction): Promise<Item>;
|
||||
readByKey(
|
||||
keys: PrimaryKey[],
|
||||
query?: Query,
|
||||
action?: PermissionsAction
|
||||
): Promise<null | Item[]>;
|
||||
readByKey(key: PrimaryKey, query?: Query, action?: PermissionsAction): Promise<null | Item>;
|
||||
async readByKey(
|
||||
key: PrimaryKey | PrimaryKey[],
|
||||
query: Query = {},
|
||||
action: PermissionsAction = 'read'
|
||||
): Promise<Item | Item[]> {
|
||||
): Promise<null | Item | Item[]> {
|
||||
query = clone(query);
|
||||
const schemaInspector = SchemaInspector(this.knex);
|
||||
const primaryKeyField = await schemaInspector.primary(this.collection);
|
||||
const primaryKeyField = await this.schemaInspector.primary(this.collection);
|
||||
const keys = Array.isArray(key) ? key : [key];
|
||||
|
||||
if (keys.length === 1) {
|
||||
query.single = true;
|
||||
}
|
||||
|
||||
const queryWithFilter = {
|
||||
...query,
|
||||
filter: {
|
||||
@@ -201,15 +239,11 @@ export default class ItemsService implements AbstractService {
|
||||
},
|
||||
};
|
||||
|
||||
let ast = await getASTFromQuery(
|
||||
this.collection,
|
||||
queryWithFilter,
|
||||
{
|
||||
accountability: this.accountability,
|
||||
action,
|
||||
knex: this.knex,
|
||||
}
|
||||
);
|
||||
let ast = await getASTFromQuery(this.collection, queryWithFilter, {
|
||||
accountability: this.accountability,
|
||||
action,
|
||||
knex: this.knex,
|
||||
});
|
||||
|
||||
if (this.accountability && this.accountability.admin !== true) {
|
||||
const authorizationService = new AuthorizationService({
|
||||
@@ -219,9 +253,8 @@ export default class ItemsService implements AbstractService {
|
||||
ast = await authorizationService.processAST(ast, action);
|
||||
}
|
||||
|
||||
const records = await runAST(ast, { knex: this.knex });
|
||||
return Array.isArray(key) ? records : records[0];
|
||||
return [] as Item;
|
||||
const result = await runAST(ast, { knex: this.knex });
|
||||
return result;
|
||||
}
|
||||
|
||||
update(data: Partial<Item>, keys: PrimaryKey[]): Promise<PrimaryKey[]>;
|
||||
@@ -231,9 +264,8 @@ export default class ItemsService implements AbstractService {
|
||||
data: Partial<Item> | Partial<Item>[],
|
||||
key?: PrimaryKey | PrimaryKey[]
|
||||
): Promise<PrimaryKey | PrimaryKey[]> {
|
||||
const schemaInspector = SchemaInspector(this.knex);
|
||||
const primaryKeyField = await schemaInspector.primary(this.collection);
|
||||
const columns = await schemaInspector.columns(this.collection);
|
||||
const primaryKeyField = await this.schemaInspector.primary(this.collection);
|
||||
const columns = await this.schemaInspector.columns(this.collection);
|
||||
|
||||
// Updating one or more items to the same payload
|
||||
if (data && key) {
|
||||
@@ -241,11 +273,30 @@ export default class ItemsService implements AbstractService {
|
||||
|
||||
let payload = clone(data);
|
||||
|
||||
if (this.accountability && this.accountability.admin !== true) {
|
||||
const customProcessed = await emitter.emitAsync(
|
||||
`${this.eventScope}.update.before`,
|
||||
payload,
|
||||
{
|
||||
event: `${this.eventScope}.update.before`,
|
||||
accountability: this.accountability,
|
||||
collection: this.collection,
|
||||
item: null,
|
||||
action: 'update',
|
||||
payload,
|
||||
}
|
||||
);
|
||||
|
||||
if (customProcessed) {
|
||||
payload = customProcessed[customProcessed.length - 1];
|
||||
}
|
||||
|
||||
if (this.accountability) {
|
||||
const authorizationService = new AuthorizationService({
|
||||
accountability: this.accountability,
|
||||
});
|
||||
|
||||
await authorizationService.checkAccess('update', this.collection, keys);
|
||||
|
||||
payload = await authorizationService.validatePayload(
|
||||
'update',
|
||||
this.collection,
|
||||
@@ -284,7 +335,7 @@ export default class ItemsService implements AbstractService {
|
||||
if (this.accountability) {
|
||||
const activityRecords = keys.map((key) => ({
|
||||
action: Action.UPDATE,
|
||||
action_by: this.accountability!.user,
|
||||
user: this.accountability!.user,
|
||||
collection: this.collection,
|
||||
ip: this.accountability!.ip,
|
||||
user_agent: this.accountability!.userAgent,
|
||||
@@ -296,11 +347,13 @@ export default class ItemsService implements AbstractService {
|
||||
for (const activityRecord of activityRecords) {
|
||||
await trx.insert(activityRecord).into('directus_activity');
|
||||
let primaryKey;
|
||||
|
||||
const result = await trx
|
||||
.select('id')
|
||||
.from('directus_activity')
|
||||
.orderBy('id', 'desc')
|
||||
.first();
|
||||
|
||||
primaryKey = result.id;
|
||||
activityPrimaryKeys.push(primaryKey);
|
||||
}
|
||||
@@ -312,7 +365,10 @@ export default class ItemsService implements AbstractService {
|
||||
activity: key,
|
||||
collection: this.collection,
|
||||
item: keys[index],
|
||||
data: JSON.stringify(snapshots[index]),
|
||||
data:
|
||||
snapshots && Array.isArray(snapshots)
|
||||
? JSON.stringify(snapshots?.[index])
|
||||
: JSON.stringify(snapshots),
|
||||
delta: JSON.stringify(payloadWithoutAliases),
|
||||
}));
|
||||
|
||||
@@ -324,12 +380,16 @@ export default class ItemsService implements AbstractService {
|
||||
await cache.clear();
|
||||
}
|
||||
|
||||
emitter.emitAsync(`item.update.${this.collection}`, {
|
||||
collection: this.collection,
|
||||
item: key,
|
||||
action: 'update',
|
||||
payload,
|
||||
});
|
||||
emitter
|
||||
.emitAsync(`${this.eventScope}.update`, {
|
||||
event: `${this.eventScope}.update`,
|
||||
accountability: this.accountability,
|
||||
collection: this.collection,
|
||||
item: key,
|
||||
action: 'update',
|
||||
payload,
|
||||
})
|
||||
.catch((err) => logger.warn(err));
|
||||
|
||||
return key;
|
||||
}
|
||||
@@ -347,9 +407,13 @@ export default class ItemsService implements AbstractService {
|
||||
for (const single of payloads as Partial<Item>[]) {
|
||||
let payload = clone(single);
|
||||
const key = payload[primaryKeyField];
|
||||
if (!key)
|
||||
|
||||
if (!key) {
|
||||
throw new InvalidPayloadException('Primary key is missing in update payload.');
|
||||
}
|
||||
|
||||
keys.push(key);
|
||||
|
||||
await itemsService.update(payload, key);
|
||||
}
|
||||
});
|
||||
@@ -357,12 +421,58 @@ export default class ItemsService implements AbstractService {
|
||||
return keys;
|
||||
}
|
||||
|
||||
async updateByQuery(data: Partial<Item>, query: Query): Promise<PrimaryKey[]> {
|
||||
const primaryKeyField = await this.schemaInspector.primary(this.collection);
|
||||
const readQuery = cloneDeep(query);
|
||||
readQuery.fields = [primaryKeyField];
|
||||
|
||||
// Not authenticated:
|
||||
const itemsService = new ItemsService(this.collection);
|
||||
|
||||
let itemsToUpdate = await itemsService.readByQuery(readQuery);
|
||||
itemsToUpdate = Array.isArray(itemsToUpdate) ? itemsToUpdate : [itemsToUpdate];
|
||||
|
||||
const keys: PrimaryKey[] = itemsToUpdate.map(
|
||||
(item: Partial<Item>) => item[primaryKeyField]
|
||||
);
|
||||
|
||||
return await this.update(data, keys);
|
||||
}
|
||||
|
||||
upsert(data: Partial<Item>): Promise<PrimaryKey>;
|
||||
upsert(data: Partial<Item>[]): Promise<PrimaryKey[]>;
|
||||
async upsert(data: Partial<Item> | Partial<Item>[]): Promise<PrimaryKey | PrimaryKey[]> {
|
||||
const primaryKeyField = await this.schemaInspector.primary(this.collection);
|
||||
const payloads = Array.isArray(data) ? data : [data];
|
||||
const primaryKeys: PrimaryKey[] = [];
|
||||
|
||||
for (const payload of payloads) {
|
||||
const primaryKey = payload[primaryKeyField];
|
||||
const exists =
|
||||
primaryKey &&
|
||||
!!(await this.knex
|
||||
.select(primaryKeyField)
|
||||
.from(this.collection)
|
||||
.where({ [primaryKeyField]: primaryKey })
|
||||
.first());
|
||||
|
||||
if (exists) {
|
||||
const keys = await this.update([payload]);
|
||||
primaryKeys.push(...keys);
|
||||
} else {
|
||||
const key = await this.create(payload);
|
||||
primaryKeys.push(key);
|
||||
}
|
||||
}
|
||||
|
||||
return Array.isArray(data) ? primaryKeys : primaryKeys[0];
|
||||
}
|
||||
|
||||
delete(key: PrimaryKey): Promise<PrimaryKey>;
|
||||
delete(keys: PrimaryKey[]): Promise<PrimaryKey[]>;
|
||||
async delete(key: PrimaryKey | PrimaryKey[]): Promise<PrimaryKey | PrimaryKey[]> {
|
||||
const keys = (Array.isArray(key) ? key : [key]) as PrimaryKey[];
|
||||
const schemaInspector = SchemaInspector(this.knex);
|
||||
const primaryKeyField = await schemaInspector.primary(this.collection);
|
||||
const primaryKeyField = await this.schemaInspector.primary(this.collection);
|
||||
|
||||
if (this.accountability && this.accountability.admin !== true) {
|
||||
const authorizationService = new AuthorizationService({
|
||||
@@ -372,13 +482,22 @@ export default class ItemsService implements AbstractService {
|
||||
await authorizationService.checkAccess('delete', this.collection, key);
|
||||
}
|
||||
|
||||
await emitter.emitAsync(`${this.eventScope}.delete.before`, {
|
||||
event: `${this.eventScope}.delete.before`,
|
||||
accountability: this.accountability,
|
||||
collection: this.collection,
|
||||
item: keys,
|
||||
action: 'delete',
|
||||
payload: null,
|
||||
});
|
||||
|
||||
await this.knex.transaction(async (trx) => {
|
||||
await trx(this.collection).whereIn(primaryKeyField, keys).delete();
|
||||
|
||||
if (this.accountability) {
|
||||
const activityRecords = keys.map((key) => ({
|
||||
action: Action.DELETE,
|
||||
action_by: this.accountability!.user,
|
||||
user: this.accountability!.user,
|
||||
collection: this.collection,
|
||||
ip: this.accountability!.ip,
|
||||
user_agent: this.accountability!.userAgent,
|
||||
@@ -393,25 +512,45 @@ export default class ItemsService implements AbstractService {
|
||||
await cache.clear();
|
||||
}
|
||||
|
||||
emitter.emitAsync(`item.delete.${this.collection}`, {
|
||||
collection: this.collection,
|
||||
item: key,
|
||||
action: 'delete',
|
||||
});
|
||||
emitter
|
||||
.emitAsync(`${this.eventScope}.delete`, {
|
||||
event: `${this.eventScope}.delete`,
|
||||
accountability: this.accountability,
|
||||
collection: this.collection,
|
||||
item: keys,
|
||||
action: 'delete',
|
||||
payload: null,
|
||||
})
|
||||
.catch((err) => logger.warn(err));
|
||||
|
||||
return key;
|
||||
}
|
||||
|
||||
async deleteByQuery(query: Query): Promise<PrimaryKey[]> {
|
||||
const primaryKeyField = await this.schemaInspector.primary(this.collection);
|
||||
const readQuery = cloneDeep(query);
|
||||
readQuery.fields = [primaryKeyField];
|
||||
|
||||
// Not authenticated:
|
||||
const itemsService = new ItemsService(this.collection);
|
||||
|
||||
let itemsToDelete = await itemsService.readByQuery(readQuery);
|
||||
itemsToDelete = Array.isArray(itemsToDelete) ? itemsToDelete : [itemsToDelete];
|
||||
|
||||
const keys: PrimaryKey[] = itemsToDelete.map(
|
||||
(item: Partial<Item>) => item[primaryKeyField]
|
||||
);
|
||||
return await this.delete(keys);
|
||||
}
|
||||
|
||||
async readSingleton(query: Query) {
|
||||
query = clone(query);
|
||||
const schemaInspector = SchemaInspector(this.knex);
|
||||
query.limit = 1;
|
||||
query.single = true;
|
||||
|
||||
const records = await this.readByQuery(query);
|
||||
const record = records[0];
|
||||
const record = (await this.readByQuery(query)) as Item;
|
||||
|
||||
if (!record) {
|
||||
const columns = await schemaInspector.columnInfo(this.collection);
|
||||
const columns = await this.schemaInspector.columnInfo(this.collection);
|
||||
const defaults: Record<string, any> = {};
|
||||
|
||||
for (const column of columns) {
|
||||
@@ -425,8 +564,7 @@ export default class ItemsService implements AbstractService {
|
||||
}
|
||||
|
||||
async upsertSingleton(data: Partial<Item>) {
|
||||
const schemaInspector = SchemaInspector(this.knex);
|
||||
const primaryKeyField = await schemaInspector.primary(this.collection);
|
||||
const primaryKeyField = await this.schemaInspector.primary(this.collection);
|
||||
|
||||
const record = await this.knex
|
||||
.select(primaryKeyField)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user