Merge branch 'main' into aggregation

This commit is contained in:
rijkvanzanten
2021-09-13 12:50:19 -04:00
476 changed files with 28319 additions and 34337 deletions

3
.github/CODEOWNERS vendored
View File

@@ -3,5 +3,6 @@
/docs/*.md @benhaynes
/packages/shared @nickrum
/packages/extension-sdk @nickrum
/packages/extensions-sdk @nickrum
/packages/create-directus-extension @nickrum
/app/vite.config.js @nickrum

View File

@@ -0,0 +1,24 @@
name: Sync Readme to Docker Hub
on:
push:
branches:
- main
paths: # ensures this workflow only runs when the readme.md or this file changes.
- 'readme.md'
- '.github/workflows/sync-dockerhub-readme.yml'
workflow_dispatch:
jobs:
sync-dockerhub-readme:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Sync Readme to Docker Hub
uses: peter-evans/dockerhub-description@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
readme-filepath: ./readme.md

1
.gitignore vendored
View File

@@ -17,3 +17,4 @@ dist
app/public/img/docs/*
*.tsbuildinfo
.e2e-containers.json
coverage

View File

@@ -34,5 +34,5 @@ RUN npm install
WORKDIR /directus/api
CMD ["sh", "-c", "node ./dist/cli/index.js bootstrap; node ./dist/start.js;"]
CMD ["sh", "-c", "node ./cli.js bootstrap; node ./dist/start.js;"]
EXPOSE 8055/tcp

2
api/.gitignore vendored
View File

@@ -10,4 +10,4 @@ test
dist
tmp
keys.json
coverage

View File

@@ -1,2 +1,2 @@
#!/usr/bin/env node
require('./dist/cli/index.js');
require('./dist/cli/run.js');

View File

@@ -9,18 +9,58 @@ LOG_STYLE="pretty"
####################################################################################################
# Database
## PostgreSQL Example
## These match the databases defined in the docker-compose file in the root of this repo
## Postgres
DB_CLIENT="pg"
DB_HOST="localhost"
DB_PORT=5432
DB_PORT=5100
DB_DATABASE="directus"
DB_USER="postgres"
DB_PASSWORD="psql1234"
DB_PASSWORD="secret"
## MySQL 8
# DB_CLIENT="mysql"
# DB_HOST="localhost"
# DB_PORT=5101
# DB_DATABASE="directus"
# DB_USER="root"
# DB_PASSWORD="secret"
## MariaDB
# DB_CLIENT="mysql"
# DB_HOST="localhost"
# DB_PORT=5102
# DB_DATABASE="directus"
# DB_USER="root"
# DB_PASSWORD="secret"
## MS SQL
# DB_CLIENT="mssql"
# DB_HOST="localhost"
# DB_PORT=5103
# DB_DATABASE="directus"
# DB_USER="sa"
# DB_PASSWORD="Test@123"
## OracleDB
# DB_CLIENT="oracle"
# DB_CONNECT_STRING="localhost:5104/XE"
# DB_USER="secretsysuser"
# DB_PASSWORD="secretpassword"
## SQLite Example
# DB_CLIENT="sqlite3"
# DB_FILENAME="./data.db"
## MySQL 5.7
# DB_CLIENT="mysql"
# DB_HOST="localhost"
# DB_PORT=5102
# DB_DATABASE="directus"
# DB_USER="root"
# DB_PASSWORD="secret"
####################################################################################################
# Rate Limiting
@@ -32,45 +72,25 @@ RATE_LIMITER_DURATION=1
RATE_LIMITER_STORE=memory
# memory | redis | memcache
## Redis (see https://github.com/animir/node-rate-limiter-flexible/wiki/Redis and
## https://www.npmjs.com/package/ioredis#connect-to-redis)
# RATE_LIMITER_EXEC_EVENLY=false
# RATE_LIMITER_BLOCK_DURATION=0
# RATE_LIMITER_KEY_PREFIX=rlflx
# RATE_LIMITER_REDIS="redis://:authpassword@127.0.0.1:6380/4"
# --OR--
# RATE_LIMITER_REDIS_HOST="127.0.0.1"
# RATE_LIMITER_REDIS_PORT="127.0.0.1"
# RATE_LIMITER_REDIS_PASSWORD="127.0.0.1"
# RATE_LIMITER_REDIS_DB="127.0.0.1"
## Memcache (see https://github.com/animir/node-rate-limiter-flexible/wiki/Memcache and
## https://www.npmjs.com/package/memcached)
# RATE_LIMITER_MEMCACHE='localhost:11211'
# RATE_LIMITER_REDIS="redis://@127.0.0.1:5105"
# RATE_LIMITER_MEMCACHE="localhost:5109"
####################################################################################################
# Caching
CACHE_ENABLED=true
CACHE_TTL="30m"
CACHE_NAMESPACE="directus-cache"
CACHE_STORE=memory
# memory | redis | memcache
CACHE_AUTO_PURGE=true
# memory | redis | memcache
CACHE_STORE=memory
ASSETS_CACHE_TTL="30m"
# CACHE_REDIS="redis://:authpassword@127.0.0.1:6380/4"
# --OR--
# CACHE_REDIS_HOST="127.0.0.1"
# CACHE_REDIS_PORT="127.0.0.1"
# CACHE_REDIS_PASSWORD="127.0.0.1"
# CACHE_REDIS_DB="127.0.0.1"
# CACHE_REDIS="redis://@127.0.0.1:5105"
## Memcache (see https://github.com/animir/node-rate-limiter-flexible/wiki/Memcache and
## https://www.npmjs.com/package/memcached)
# CACHE_MEMCACHE='localhost:11211'
# CACHE_MEMCACHE="localhost:5109"
####################################################################################################
# File Storage
@@ -113,6 +133,16 @@ CORS_EXPOSED_HEADERS=Content-Range
CORS_CREDENTIALS="true"
CORS_MAX_AGE=18000
####################################################################################################
# Argon2
# HASH_MEMORY_COST=81920
# HASH_HASH_LENGTH=32
# HASH_TIME_COST=10
# HASH_PARALLELISM=2
# HASH_TYPE=2
# HASH_ASSOCIATED_DATA=foo
####################################################################################################
# SSO (OAuth) Providers

12
api/jest.config.js Normal file
View File

@@ -0,0 +1,12 @@
const base = require('../jest.config.js');
require('dotenv').config();
module.exports = {
...base,
roots: ['<rootDir>/src'],
verbose: true,
setupFiles: ['dotenv/config'],
testURL: process.env.TEST_URL || 'http://localhost',
collectCoverageFrom: ['src/**/*.ts'],
};

View File

@@ -1,6 +1,6 @@
{
"name": "directus",
"version": "9.0.0-rc.90",
"version": "9.0.0-rc.92",
"license": "GPL-3.0-only",
"homepage": "https://github.com/directus/directus#readme",
"description": "Directus is a real-time API and App dashboard for managing SQL database content.",
@@ -56,7 +56,9 @@
"build": "tsc --build && copyfiles \"src/**/*.*\" -e \"src/**/*.ts\" -u 1 dist",
"cleanup": "rimraf dist",
"dev": "cross-env NODE_ENV=development SERVE_APP=false ts-node-dev --files --transpile-only --respawn --watch \".env\" --inspect --exit-child -- src/start.ts",
"cli": "cross-env NODE_ENV=development SERVE_APP=false ts-node --script-mode --transpile-only src/cli/index.ts"
"cli": "cross-env NODE_ENV=development SERVE_APP=false ts-node --script-mode --transpile-only src/cli/run.ts",
"test": "jest --coverage",
"test:watch": "jest --watchAll"
},
"engines": {
"node": ">=12.20.0"
@@ -68,19 +70,20 @@
"example.env"
],
"dependencies": {
"@directus/app": "9.0.0-rc.90",
"@directus/drive": "9.0.0-rc.90",
"@directus/drive-azure": "9.0.0-rc.90",
"@directus/drive-gcs": "9.0.0-rc.90",
"@directus/drive-s3": "9.0.0-rc.90",
"@directus/format-title": "9.0.0-rc.90",
"@directus/schema": "9.0.0-rc.90",
"@directus/shared": "9.0.0-rc.90",
"@directus/specs": "9.0.0-rc.90",
"@directus/app": "9.0.0-rc.92",
"@directus/drive": "9.0.0-rc.92",
"@directus/drive-azure": "9.0.0-rc.92",
"@directus/drive-gcs": "9.0.0-rc.92",
"@directus/drive-s3": "9.0.0-rc.92",
"@directus/extensions-sdk": "9.0.0-rc.92",
"@directus/format-title": "9.0.0-rc.92",
"@directus/schema": "9.0.0-rc.92",
"@directus/shared": "9.0.0-rc.92",
"@directus/specs": "9.0.0-rc.92",
"@godaddy/terminus": "^4.9.0",
"@rollup/plugin-alias": "^3.1.2",
"@rollup/plugin-virtual": "^2.0.3",
"argon2": "^0.28.1",
"argon2": "^0.28.2",
"async": "^3.2.0",
"async-mutex": "^0.3.1",
"atob": "^2.1.2",
@@ -114,7 +117,7 @@
"jsonwebtoken": "^8.5.1",
"keyv": "^4.0.3",
"knex": "^0.95.6",
"knex-schema-inspector": "1.5.13",
"knex-schema-inspector": "1.6.0",
"liquidjs": "^9.25.0",
"lodash": "^4.17.21",
"macos-release": "^2.4.1",
@@ -128,16 +131,17 @@
"openapi3-ts": "^2.0.0",
"ora": "^5.4.0",
"otplib": "^12.0.1",
"pino": "6.13.0",
"pino": "6.13.2",
"pino-colada": "^2.1.0",
"pino-http": "5.6.0",
"pino-http": "5.7.0",
"prettier": "^2.3.1",
"qs": "^6.9.4",
"rate-limiter-flexible": "^2.2.2",
"resolve-cwd": "^3.0.0",
"rollup": "^2.52.1",
"sharp": "^0.28.3",
"sharp": "^0.29.0",
"stream-json": "^1.7.1",
"supertest": "^6.1.6",
"update-check": "^1.5.4",
"uuid": "^8.3.2",
"uuid-validate": "0.0.3",
@@ -155,7 +159,7 @@
"nodemailer-mailgun-transport": "^2.1.3",
"pg": "^8.6.0",
"sqlite3": "^5.0.2",
"tedious": "^11.0.8"
"tedious": "^12.0.0"
},
"gitHead": "24621f3934dc77eb23441331040ed13c676ceffd",
"devDependencies": {
@@ -171,27 +175,31 @@
"@types/express-session": "1.17.4",
"@types/flat": "^5.0.2",
"@types/fs-extra": "9.0.12",
"@types/inquirer": "7.3.3",
"@types/js-yaml": "4.0.2",
"@types/inquirer": "8.1.1",
"@types/jest": "27.0.1",
"@types/js-yaml": "4.0.3",
"@types/json2csv": "5.0.3",
"@types/jsonwebtoken": "8.5.4",
"@types/keyv": "3.1.2",
"@types/jsonwebtoken": "8.5.5",
"@types/keyv": "3.1.3",
"@types/lodash": "4.14.172",
"@types/mime-types": "2.1.0",
"@types/mime-types": "2.1.1",
"@types/ms": "0.7.31",
"@types/node": "15.12.2",
"@types/node-cron": "2.0.4",
"@types/nodemailer": "6.4.4",
"@types/object-hash": "2.1.1",
"@types/object-hash": "2.2.0",
"@types/qs": "6.9.7",
"@types/sharp": "0.28.5",
"@types/sharp": "0.29.1",
"@types/stream-json": "1.7.1",
"@types/supertest": "2.0.11",
"@types/uuid": "8.3.1",
"@types/uuid-validate": "0.0.1",
"@types/wellknown": "0.5.1",
"copyfiles": "2.4.1",
"cross-env": "7.0.3",
"jest": "27.2.0",
"ts-jest": "27.0.5",
"ts-node-dev": "1.1.8",
"typescript": "4.3.5"
"typescript": "4.4.3"
}
}

View File

@@ -0,0 +1,6 @@
export const cache = {
get: jest.fn().mockResolvedValue(undefined),
set: jest.fn().mockResolvedValue(true),
};
export const getCache = jest.fn().mockReturnValue({ cache });

View File

@@ -24,7 +24,7 @@ import settingsRouter from './controllers/settings';
import usersRouter from './controllers/users';
import utilsRouter from './controllers/utils';
import webhooksRouter from './controllers/webhooks';
import { isInstalled, validateDBConnection, validateMigrations } from './database';
import { isInstalled, validateDatabaseConnection, validateDatabaseExtensions, validateMigrations } from './database';
import { emitAsyncSafe } from './emitter';
import env from './env';
import { InvalidPayloadException } from './exceptions';
@@ -45,20 +45,19 @@ import { validateStorage } from './utils/validate-storage';
import { register as registerWebhooks } from './webhooks';
import { session } from './middleware/session';
import { flushCaches } from './cache';
import { URL } from 'url';
import { Url } from './utils/url';
export default async function createApp(): Promise<express.Application> {
validateEnv(['KEY', 'SECRET']);
try {
new URL(env.PUBLIC_URL);
} catch {
logger.warn('PUBLIC_URL is not a valid URL');
if (!new Url(env.PUBLIC_URL).isAbsolute()) {
logger.warn('PUBLIC_URL should be a full URL');
}
await validateStorage();
await validateDBConnection();
await validateDatabaseConnection();
await validateDatabaseExtensions();
if ((await isInstalled()) === false) {
logger.error(`Database doesn't have Directus tables installed.`);
@@ -126,11 +125,14 @@ export default async function createApp(): Promise<express.Application> {
if (env.SERVE_APP) {
const adminPath = require.resolve('@directus/app/dist/index.html');
const publicUrl = env.PUBLIC_URL.endsWith('/') ? env.PUBLIC_URL : env.PUBLIC_URL + '/';
const adminUrl = new Url(env.PUBLIC_URL).addPath('admin');
// Set the App's base path according to the APIs public URL
let html = fse.readFileSync(adminPath, 'utf-8');
html = html.replace(/<meta charset="utf-8" \/>/, `<meta charset="utf-8" />\n\t\t<base href="${publicUrl}admin/">`);
html = html.replace(
/<meta charset="utf-8" \/>/,
`<meta charset="utf-8" />\n\t\t<base href="${adminUrl.toString({ rootRelative: true })}/">`
);
app.get('/admin', (req, res) => res.send(html));
app.use('/admin', express.static(path.join(adminPath, '..')));
@@ -183,7 +185,8 @@ export default async function createApp(): Promise<express.Application> {
app.use('/users', usersRouter);
app.use('/utils', utilsRouter);
app.use('/webhooks', webhooksRouter);
app.use('/custom', customRouter);
app.use(customRouter);
// Register custom hooks / endpoints
await emitAsyncSafe('routes.custom.init.before', { app });

View File

@@ -52,7 +52,6 @@ function getConfig(store: 'memory' | 'redis' | 'memcache' = 'memory', ttl: numbe
config.store = new KeyvRedis(env.CACHE_REDIS || getConfigFromEnv('CACHE_REDIS_'), {
commandTimeout: 500,
retryStrategy: false,
});
}

View File

@@ -6,7 +6,7 @@ import env from '../../../env';
import logger from '../../../logger';
import { getSchema } from '../../../utils/get-schema';
import { RolesService, UsersService, SettingsService } from '../../../services';
import getDatabase, { isInstalled, validateDBConnection, hasDatabaseConnection } from '../../../database';
import getDatabase, { isInstalled, validateDatabaseConnection, hasDatabaseConnection } from '../../../database';
import { SchemaOverview } from '../../../types';
export default async function bootstrap({ skipAdminInit }: { skipAdminInit?: boolean }): Promise<void> {
@@ -59,7 +59,7 @@ async function waitForDatabase(database: Knex) {
}
// This will throw and exit the process if the database is not available
await validateDBConnection(database);
await validateDatabaseConnection(database);
}
async function createDefaultAdmin(schema: SchemaOverview) {

View File

@@ -1,12 +1,11 @@
/* eslint-disable no-console */
import getDatabase from '../../../database';
import logger from '../../../logger';
export default async function count(collection: string): Promise<void> {
const database = getDatabase();
if (!collection) {
console.error('Collection is required');
logger.error('Collection is required');
process.exit(1);
}
@@ -14,11 +13,11 @@ export default async function count(collection: string): Promise<void> {
const records = await database(collection).count('*', { as: 'count' });
const count = Number(records[0].count);
console.log(count);
process.stdout.write(`${count}\n`);
database.destroy();
process.exit(0);
} catch (err) {
console.error(err);
} catch (err: any) {
logger.error(err);
database.destroy();
process.exit(1);
}

View File

@@ -1,8 +1,7 @@
/* eslint-disable no-console */
import runMigrations from '../../../database/migrations/run';
import installSeeds from '../../../database/seeds/run';
import getDatabase from '../../../database';
import logger from '../../../logger';
export default async function start(): Promise<void> {
const database = getDatabase();
@@ -12,8 +11,8 @@ export default async function start(): Promise<void> {
await runMigrations(database, 'latest');
database.destroy();
process.exit(0);
} catch (err) {
console.log(err);
} catch (err: any) {
logger.error(err);
database.destroy();
process.exit(1);
}

View File

@@ -1,25 +1,24 @@
/* eslint-disable no-console */
import run from '../../../database/migrations/run';
import getDatabase from '../../../database';
import logger from '../../../logger';
export default async function migrate(direction: 'latest' | 'up' | 'down'): Promise<void> {
const database = getDatabase();
try {
console.log('Running migrations...');
logger.info('Running migrations...');
await run(database, direction);
if (direction === 'down') {
console.log('Downgrade successful');
logger.info('Downgrade successful');
} else {
console.log('Database up to date');
logger.info('Database up to date');
}
database.destroy();
process.exit();
} catch (err) {
console.log(err);
} catch (err: any) {
logger.error(err);
database.destroy();
process.exit(1);
}

View File

@@ -1,6 +1,3 @@
/* eslint-disable no-console */
import argon2 from 'argon2';
import chalk from 'chalk';
import execa from 'execa';
import inquirer from 'inquirer';
@@ -13,6 +10,7 @@ import createDBConnection, { Credentials } from '../../utils/create-db-connectio
import createEnv from '../../utils/create-env';
import { drivers, getDriverForClient } from '../../utils/drivers';
import { databaseQuestions } from './questions';
import { generateHash } from '../../../utils/generate-hash';
export default async function init(): Promise<void> {
const rootPath = process.cwd();
@@ -48,20 +46,17 @@ export default async function init(): Promise<void> {
try {
await runSeed(db);
await runMigrations(db, 'latest');
} catch (err) {
console.log();
console.log('Something went wrong while seeding the database:');
console.log();
console.log(`${chalk.red(`[${err.code || 'Error'}]`)} ${err.message}`);
console.log();
console.log('Please try again');
console.log();
} catch (err: any) {
process.stdout.write('\nSomething went wrong while seeding the database:\n');
process.stdout.write(`\n${chalk.red(`[${err.code || 'Error'}]`)} ${err.message}\n`);
process.stdout.write('\nPlease try again\n\n');
attemptsRemaining--;
if (attemptsRemaining > 0) {
return await trySeed();
} else {
console.log(`Couldn't seed the database. Exiting.`);
process.stdout.write("Couldn't seed the database. Exiting.\n");
process.exit(1);
}
}
@@ -71,10 +66,7 @@ export default async function init(): Promise<void> {
await createEnv(dbClient, credentials!, rootPath);
console.log();
console.log();
console.log(`Create your first admin user:`);
process.stdout.write('\nCreate your first admin user:\n\n');
const firstUser = await inquirer.prompt([
{
@@ -95,7 +87,7 @@ export default async function init(): Promise<void> {
},
]);
firstUser.password = await argon2.hash(firstUser.password);
firstUser.password = await generateHash(firstUser.password);
const userID = uuidV4();
const roleID = uuidV4();
@@ -120,15 +112,11 @@ export default async function init(): Promise<void> {
await db.destroy();
console.log(`
Your project has been created at ${chalk.green(rootPath)}.
The configuration can be found in ${chalk.green(rootPath + '/.env')}
Start Directus by running:
${chalk.blue('cd')} ${rootPath}
${chalk.blue('npx directus')} start
`);
process.stdout.write(`\nYour project has been created at ${chalk.green(rootPath)}.\n`);
process.stdout.write(`\nThe configuration can be found in ${chalk.green(rootPath + '/.env')}\n`);
process.stdout.write(`\nStart Directus by running:\n`);
process.stdout.write(` ${chalk.blue('cd')} ${rootPath}\n`);
process.stdout.write(` ${chalk.blue('npx directus')} start\n`);
process.exit(0);
}

View File

@@ -1,14 +1,13 @@
/* eslint-disable no-console */
import { getSchema } from '../../../utils/get-schema';
import { RolesService } from '../../../services';
import getDatabase from '../../../database';
import logger from '../../../logger';
export default async function rolesCreate({ role: name, admin }: { role: string; admin: boolean }): Promise<void> {
const database = getDatabase();
if (!name) {
console.error('Name is required');
logger.error('Name is required');
process.exit(1);
}
@@ -17,11 +16,11 @@ export default async function rolesCreate({ role: name, admin }: { role: string;
const service = new RolesService({ schema: schema, knex: database });
const id = await service.createOne({ name, admin_access: admin });
console.log(id);
process.stdout.write(`${String(id)}\n`);
database.destroy();
process.exit(0);
} catch (err) {
console.error(err);
} catch (err: any) {
logger.error(err);
process.exit(1);
}
}

View File

@@ -1,8 +1,7 @@
/* eslint-disable no-console */
import { getSchema } from '../../../utils/get-schema';
import { UsersService } from '../../../services';
import getDatabase from '../../../database';
import logger from '../../../logger';
export default async function usersCreate({
email,
@@ -16,7 +15,7 @@ export default async function usersCreate({
const database = getDatabase();
if (!email || !password || !role) {
console.error('Email, password, role are required');
logger.error('Email, password, role are required');
process.exit(1);
}
@@ -25,11 +24,11 @@ export default async function usersCreate({
const service = new UsersService({ schema, knex: database });
const id = await service.createOne({ email, password, role, status: 'active' });
console.log(id);
process.stdout.write(`${String(id)}\n`);
database.destroy();
process.exit(0);
} catch (err) {
console.error(err);
} catch (err: any) {
logger.error(err);
process.exit(1);
}
}

View File

@@ -1,35 +1,34 @@
/* eslint-disable no-console */
import argon2 from 'argon2';
import { getSchema } from '../../../utils/get-schema';
import { generateHash } from '../../../utils/generate-hash';
import { UsersService } from '../../../services';
import getDatabase from '../../../database';
import logger from '../../../logger';
export default async function usersPasswd({ email, password }: { email?: string; password?: string }): Promise<void> {
const database = getDatabase();
if (!email || !password) {
console.error('Email and password are required');
logger.error('Email and password are required');
process.exit(1);
}
try {
const passwordHashed = await argon2.hash(password);
const passwordHashed = await generateHash(password);
const schema = await getSchema();
const service = new UsersService({ schema, knex: database });
const user = await service.knex.select('id').from('directus_users').where({ email }).first();
if (user) {
await service.knex('directus_users').update({ password: passwordHashed }).where({ id: user.id });
console.log(`Password is updated for user ${user.id}`);
logger.info(`Password is updated for user ${user.id}`);
} else {
console.log('No such user by this email');
logger.error('No such user by this email');
}
await database.destroy();
process.exit(user ? 0 : 1);
} catch (err) {
console.error(err);
} catch (err: any) {
logger.error(err);
process.exit(1);
}
}

62
api/src/cli/index.test.ts Normal file
View File

@@ -0,0 +1,62 @@
import { Command } from 'commander';
import { Extension } from '@directus/shared/types';
import { createCli } from '.';
jest.mock('../env', () => ({
...jest.requireActual('../env').default,
LOG_LEVEL: 'silent',
EXTENSIONS_PATH: '',
SERVE_APP: false,
}));
jest.mock('@directus/shared/utils/node/get-extensions', () => ({
getPackageExtensions: jest.fn(() => Promise.resolve([])),
getLocalExtensions: jest.fn(() => Promise.resolve([customCliExtension])),
}));
jest.mock(`/hooks/custom-cli/index.js`, () => () => customCliHook, { virtual: true });
const customCliExtension: Extension = {
path: `/hooks/custom-cli`,
name: 'custom-cli',
type: 'hook',
entrypoint: 'index.js',
local: true,
root: true,
};
const beforeHook = jest.fn();
const afterAction = jest.fn();
const afterHook = jest.fn(({ program }: { program: Command }) => program.command('custom').action(afterAction));
const customCliHook = { 'cli.init.before': beforeHook, 'cli.init.after': afterHook };
const writeOut = jest.fn();
const writeErr = jest.fn();
const setup = async () => {
const program = await createCli();
program.exitOverride();
program.configureOutput({ writeOut, writeErr });
return program;
};
beforeEach(jest.clearAllMocks);
describe('cli hooks', () => {
test('should call hooks before and after creating the cli', async () => {
const program = await setup();
expect(beforeHook).toHaveBeenCalledTimes(1);
expect(beforeHook).toHaveBeenCalledWith({ program });
expect(afterHook).toHaveBeenCalledTimes(1);
expect(afterHook).toHaveBeenCalledWith({ program });
});
test('should be able to add a custom cli command', async () => {
const program = await setup();
program.parseAsync(['custom'], { from: 'user' });
expect(afterAction).toHaveBeenCalledTimes(1);
});
});

View File

@@ -1,9 +1,7 @@
#!/usr/bin/env node
/* eslint-disable no-console */
import { program } from 'commander';
import { Command } from 'commander';
import start from '../start';
import { emitAsyncSafe } from '../emitter';
import { initializeExtensions, registerExtensionHooks } from '../extensions';
import bootstrap from './commands/bootstrap';
import count from './commands/count';
import dbInstall from './commands/database/install';
@@ -15,61 +13,69 @@ import usersPasswd from './commands/users/passwd';
const pkg = require('../../package.json');
program.name('directus').usage('[command] [options]');
program.version(pkg.version, '-v, --version');
export async function createCli(): Promise<Command> {
const program = new Command();
program.command('start').description('Start the Directus API').action(start);
program.command('init').description('Create a new Directus Project').action(init);
await initializeExtensions();
registerExtensionHooks();
const dbCommand = program.command('database');
dbCommand.command('install').description('Install the database').action(dbInstall);
dbCommand
.command('migrate:latest')
.description('Upgrade the database')
.action(() => dbMigrate('latest'));
dbCommand
.command('migrate:up')
.description('Upgrade the database')
.action(() => dbMigrate('up'));
dbCommand
.command('migrate:down')
.description('Downgrade the database')
.action(() => dbMigrate('down'));
await emitAsyncSafe('cli.init.before', { program });
const usersCommand = program.command('users');
program.name('directus').usage('[command] [options]');
program.version(pkg.version, '-v, --version');
usersCommand
.command('create')
.description('Create a new user')
.option('--email <value>', `user's email`)
.option('--password <value>', `user's password`)
.option('--role <value>', `user's role`)
.action(usersCreate);
program.command('start').description('Start the Directus API').action(start);
program.command('init').description('Create a new Directus Project').action(init);
usersCommand
.command('passwd')
.description('Set user password')
.option('--email <value>', `user's email`)
.option('--password <value>', `user's new password`)
.action(usersPasswd);
const dbCommand = program.command('database');
dbCommand.command('install').description('Install the database').action(dbInstall);
dbCommand
.command('migrate:latest')
.description('Upgrade the database')
.action(() => dbMigrate('latest'));
dbCommand
.command('migrate:up')
.description('Upgrade the database')
.action(() => dbMigrate('up'));
dbCommand
.command('migrate:down')
.description('Downgrade the database')
.action(() => dbMigrate('down'));
const rolesCommand = program.command('roles');
rolesCommand
.command('create')
.description('Create a new role')
.option('--role <value>', `name for the role`)
.option('--admin', `whether or not the role has admin access`)
.action(rolesCreate);
const usersCommand = program.command('users');
program.command('count <collection>').description('Count the amount of items in a given collection').action(count);
usersCommand
.command('create')
.description('Create a new user')
.option('--email <value>', `user's email`)
.option('--password <value>', `user's password`)
.option('--role <value>', `user's role`)
.action(usersCreate);
program
.command('bootstrap')
.description('Initialize or update the database')
.option('--skipAdminInit', 'Skips the creation of the default Admin Role and User')
.action(bootstrap);
usersCommand
.command('passwd')
.description('Set user password')
.option('--email <value>', `user's email`)
.option('--password <value>', `user's new password`)
.action(usersPasswd);
program.parseAsync(process.argv).catch((err) => {
console.error(err);
process.exit(1);
});
const rolesCommand = program.command('roles');
rolesCommand
.command('create')
.description('Create a new role')
.option('--role <value>', `name for the role`)
.option('--admin', `whether or not the role has admin access`)
.action(rolesCreate);
program.command('count <collection>').description('Count the amount of items in a given collection').action(count);
program
.command('bootstrap')
.description('Initialize or update the database')
.option('--skipAdminInit', 'Skips the creation of the default Admin Role and User')
.action(bootstrap);
await emitAsyncSafe('cli.init.after', { program });
return program;
}

9
api/src/cli/run.ts Normal file
View File

@@ -0,0 +1,9 @@
import { createCli } from './index';
createCli()
.then((program) => program.parseAsync(process.argv))
.catch((err) => {
// eslint-disable-next-line no-console
console.error(err);
process.exit(1);
});

View File

@@ -99,7 +99,7 @@ router.post(
res.locals.payload = {
data: record || null,
};
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -138,7 +138,7 @@ router.patch(
res.locals.payload = {
data: record || null,
};
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}

View File

@@ -177,7 +177,7 @@ router.post(
try {
await service.requestPasswordReset(req.body.email, req.body.reset_url || null);
return next();
} catch (err) {
} catch (err: any) {
if (err instanceof InvalidPayloadException) {
throw err;
} else {
@@ -320,7 +320,7 @@ router.get(
authResponse = await authenticationService.authenticate({
email,
});
} catch (error) {
} catch (error: any) {
emitStatus('fail');
logger.warn(error);

View File

@@ -88,7 +88,7 @@ router.patch(
try {
const collection = await collectionsService.readOne(req.params.collection);
res.locals.payload = { data: collection || null };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}

View File

@@ -101,7 +101,7 @@ router.post(
try {
const createdField = await service.readOne(req.params.collection, field.field);
res.locals.payload = { data: createdField || null };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -138,7 +138,7 @@ router.patch(
results.push(updatedField);
res.locals.payload = { data: results || null };
}
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -193,7 +193,7 @@ router.patch(
try {
const updatedField = await service.readOne(req.params.collection, req.params.field);
res.locals.payload = { data: updatedField || null };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}

View File

@@ -55,10 +55,6 @@ const multipartHandler = asyncHandler(async (req, res, next) => {
payload.title = formatTitle(path.parse(filename).name);
}
if (req.accountability?.user) {
payload.uploaded_by = req.accountability.user;
}
const payloadWithRequiredFields: Partial<File> & {
filename_download: string;
type: string;
@@ -77,7 +73,7 @@ const multipartHandler = asyncHandler(async (req, res, next) => {
const primaryKey = await service.uploadOne(fileStream, payloadWithRequiredFields, existingPrimaryKey);
savedFiles.push(primaryKey);
tryDone();
} catch (error) {
} catch (error: any) {
busboy.emit('error', error);
}
});
@@ -131,7 +127,7 @@ router.post(
data: record,
};
}
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -168,7 +164,7 @@ router.post(
try {
const record = await service.readOne(primaryKey, req.sanitizedQuery);
res.locals.payload = { data: record || null };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -246,7 +242,7 @@ router.patch(
try {
const result = await service.readMany(keys, req.sanitizedQuery);
res.locals.payload = { data: result || null };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -273,7 +269,7 @@ router.patch(
try {
const record = await service.readOne(req.params.pk, req.sanitizedQuery);
res.locals.payload = { data: record || null };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}

View File

@@ -37,7 +37,7 @@ router.post(
const record = await service.readOne(savedKeys[0], req.sanitizedQuery);
res.locals.payload = { data: record };
}
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -114,7 +114,7 @@ router.patch(
try {
const result = await service.readMany(keys, req.sanitizedQuery);
res.locals.payload = { data: result || null };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -140,7 +140,7 @@ router.patch(
try {
const record = await service.readOne(primaryKey, req.sanitizedQuery);
res.locals.payload = { data: record || null };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}

View File

@@ -42,7 +42,7 @@ router.post(
const result = await service.readOne(savedKeys[0], req.sanitizedQuery);
res.locals.payload = { data: result || null };
}
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -97,10 +97,6 @@ router.get(
asyncHandler(async (req, res, next) => {
if (req.params.collection.startsWith('directus_')) throw new ForbiddenException();
if (req.singleton) {
throw new RouteNotFoundException(req.path);
}
const service = new ItemsService(req.collection, {
accountability: req.accountability,
schema: req.schema,
@@ -111,6 +107,7 @@ router.get(
res.locals.payload = {
data: result || null,
};
return next();
}),
respond
@@ -147,7 +144,7 @@ router.patch(
try {
const result = await service.readMany(keys, req.sanitizedQuery);
res.locals.payload = { data: result };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -180,7 +177,7 @@ router.patch(
try {
const result = await service.readOne(updatedPrimaryKey, req.sanitizedQuery);
res.locals.payload = { data: result || null };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}

View File

@@ -20,7 +20,7 @@ const notFound: RequestHandler = async (req, res, next) => {
return next();
}
next(new RouteNotFoundException(req.path));
} catch (err) {
} catch (err: any) {
next(err);
}
};

View File

@@ -37,7 +37,7 @@ router.post(
const item = await service.readOne(savedKeys[0], req.sanitizedQuery);
res.locals.payload = { data: item };
}
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -116,7 +116,7 @@ router.patch(
try {
const result = await service.readMany(keys, req.sanitizedQuery);
res.locals.payload = { data: result };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -142,7 +142,7 @@ router.patch(
try {
const item = await service.readOne(primaryKey, req.sanitizedQuery);
res.locals.payload = { data: item || null };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}

View File

@@ -37,7 +37,7 @@ router.post(
const record = await service.readOne(savedKeys[0], req.sanitizedQuery);
res.locals.payload = { data: record };
}
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -115,7 +115,7 @@ router.patch(
try {
const result = await service.readMany(keys, req.sanitizedQuery);
res.locals.payload = { data: result };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -141,7 +141,7 @@ router.patch(
try {
const record = await service.readOne(primaryKey, req.sanitizedQuery);
res.locals.payload = { data: record };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}

View File

@@ -90,7 +90,7 @@ router.post(
try {
const createdRelation = await service.readOne(req.body.collection, req.body.field);
res.locals.payload = { data: createdRelation || null };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -135,7 +135,7 @@ router.patch(
try {
const updatedField = await service.readOne(req.params.collection, req.params.field);
res.locals.payload = { data: updatedField || null };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}

View File

@@ -37,7 +37,7 @@ router.post(
const item = await service.readOne(savedKeys[0], req.sanitizedQuery);
res.locals.payload = { data: item };
}
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -106,7 +106,7 @@ router.patch(
try {
const result = await service.readMany(keys, req.sanitizedQuery);
res.locals.payload = { data: result };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -132,7 +132,7 @@ router.patch(
try {
const item = await service.readOne(primaryKey, req.sanitizedQuery);
res.locals.payload = { data: item || null };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}

View File

@@ -35,7 +35,7 @@ router.patch(
try {
const record = await service.readSingleton(req.sanitizedQuery);
res.locals.payload = { data: record || null };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}

View File

@@ -38,7 +38,7 @@ router.post(
const item = await service.readOne(savedKeys[0], req.sanitizedQuery);
res.locals.payload = { data: item };
}
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -86,7 +86,7 @@ router.get(
try {
const item = await service.readOne(req.accountability.user, req.sanitizedQuery);
res.locals.payload = { data: item || null };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
res.locals.payload = { data: { id: req.accountability.user } };
return next();
@@ -177,7 +177,7 @@ router.patch(
try {
const result = await service.readMany(keys, req.sanitizedQuery);
res.locals.payload = { data: result };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -203,7 +203,7 @@ router.patch(
try {
const item = await service.readOne(primaryKey, req.sanitizedQuery);
res.locals.payload = { data: item || null };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}

View File

@@ -9,6 +9,7 @@ import { RevisionsService, UtilsService, ImportService } from '../services';
import asyncHandler from '../utils/async-handler';
import Busboy from 'busboy';
import { flushCaches } from '../cache';
import { generateHash } from '../utils/generate-hash';
const router = Router();
@@ -31,7 +32,7 @@ router.post(
throw new InvalidPayloadException(`"string" is required`);
}
const hash = await argon2.hash(req.body.string);
const hash = await generateHash(req.body.string);
return res.json({ data: hash });
})
@@ -103,7 +104,7 @@ router.post(
busboy.on('file', async (fieldname, fileStream, filename, encoding, mimetype) => {
try {
await service.import(req.params.collection, mimetype, fileStream);
} catch (err) {
} catch (err: any) {
return next(err);
}

View File

@@ -37,7 +37,7 @@ router.post(
const item = await service.readOne(savedKeys[0], req.sanitizedQuery);
res.locals.payload = { data: item };
}
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -106,7 +106,7 @@ router.patch(
try {
const result = await service.readMany(keys, req.sanitizedQuery);
res.locals.payload = { data: result };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}
@@ -132,7 +132,7 @@ router.patch(
try {
const item = await service.readOne(primaryKey, req.sanitizedQuery);
res.locals.payload = { data: item || null };
} catch (error) {
} catch (error: any) {
if (error instanceof ForbiddenException) {
return next();
}

View File

@@ -14,6 +14,7 @@ export function getGeometryHelper(): KnexSpatial {
mariadb: KnexSpatial_MySQL,
sqlite3: KnexSpatial,
pg: KnexSpatial_PG,
postgres: KnexSpatial_PG,
redshift: KnexSpatial_Redshift,
mssql: KnexSpatial_MSSQL,
oracledb: KnexSpatial_Oracle,

View File

@@ -8,6 +8,7 @@ import { validateEnv } from '../utils/validate-env';
import fse from 'fs-extra';
import path from 'path';
import { merge } from 'lodash';
import { promisify } from 'util';
let database: Knex | null = null;
let inspector: ReturnType<typeof SchemaInspector> | null = null;
@@ -22,6 +23,7 @@ export default function getDatabase(): Knex {
'DB_SEARCH_PATH',
'DB_CONNECTION_STRING',
'DB_POOL',
'DB_EXCLUDE_TABLES',
]);
const poolConfig = getConfigFromEnv('DB_POOL');
@@ -54,7 +56,12 @@ export default function getDatabase(): Knex {
connection: env.DB_CONNECTION_STRING || connectionConfig,
log: {
warn: (msg) => {
// Ignore warnings about returning not being supported in some DBs
if (msg.startsWith('.returning()')) return;
// Ignore warning about MySQL not supporting TRX for DDL
if (msg.startsWith('Transaction was implicitly committed, do not mix transactions and DDL with MySQL')) return;
return logger.warn(msg);
},
error: (msg) => logger.error(msg),
@@ -66,8 +73,14 @@ export default function getDatabase(): Knex {
if (env.DB_CLIENT === 'sqlite3') {
knexConfig.useNullAsDefault = true;
poolConfig.afterCreate = (conn: any, cb: any) => {
conn.run('PRAGMA foreign_keys = ON', cb);
poolConfig.afterCreate = async (conn: any, callback: any) => {
logger.trace('Enabling SQLite Foreign Keys support...');
const run = promisify(conn.run.bind(conn));
await run('PRAGMA foreign_keys = ON');
callback(null, conn);
};
}
@@ -111,7 +124,7 @@ export async function hasDatabaseConnection(database?: Knex): Promise<boolean> {
database = database ?? getDatabase();
try {
if (env.DB_CLIENT === 'oracledb') {
if (getDatabaseClient(database) === 'oracle') {
await database.raw('select 1 from DUAL');
} else {
await database.raw('SELECT 1');
@@ -123,28 +136,48 @@ export async function hasDatabaseConnection(database?: Knex): Promise<boolean> {
}
}
export async function validateDBConnection(database?: Knex): Promise<void> {
export async function validateDatabaseConnection(database?: Knex): Promise<void> {
database = database ?? getDatabase();
try {
if (env.DB_CLIENT === 'oracledb') {
if (getDatabaseClient(database) === 'oracle') {
await database.raw('select 1 from DUAL');
} else {
await database.raw('SELECT 1');
}
} catch (error) {
} catch (error: any) {
logger.error(`Can't connect to the database.`);
logger.error(error);
process.exit(1);
}
}
export function getDatabaseClient(database?: Knex): 'mysql' | 'postgres' | 'sqlite' | 'oracle' | 'mssql' {
database = database ?? getDatabase();
switch (database.client.constructor.name) {
case 'Client_MySQL':
return 'mysql';
case 'Client_PG':
return 'postgres';
case 'Client_SQLite3':
return 'sqlite';
case 'Client_Oracledb':
case 'Client_Oracle':
return 'oracle';
case 'Client_MSSQL':
return 'mssql';
}
throw new Error(`Couldn't extract database client`);
}
export async function isInstalled(): Promise<boolean> {
const inspector = getSchemaInspector();
// The existence of a directus_collections table alone isn't a "proper" check to see if everything
// is installed correctly of course, but it's safe enough to assume that this collection only
// exists when using the installer CLI.
// exists when Directus is properly installed.
return await inspector.hasTable('directus_collections');
}
@@ -173,9 +206,45 @@ export async function validateMigrations(): Promise<boolean> {
);
return requiredVersions.every((version) => completedVersions.includes(version));
} catch (error) {
} catch (error: any) {
logger.error(`Database migrations cannot be found`);
logger.error(error);
throw process.exit(1);
}
}
/**
* These database extensions should be optional, so we don't throw or return any problem states when they don't
*/
export async function validateDatabaseExtensions(): Promise<void> {
const database = getDatabase();
const databaseClient = getDatabaseClient(database);
if (databaseClient === 'postgres') {
let available = false;
let installed = false;
const exists = await database.raw(`SELECT name FROM pg_available_extensions WHERE name = 'postgis';`);
if (exists.rows.length > 0) {
available = true;
}
if (available) {
try {
await database.raw(`SELECT PostGIS_version();`);
installed = true;
} catch {
installed = false;
}
}
if (available === false) {
logger.warn(`PostGIS isn't installed. Geometry type support will be limited.`);
} else if (available === true && installed === false) {
logger.warn(
`PostGIS is installed, but hasn't been activated on this database. Geometry type support will be limited.`
);
}
}
}

View File

@@ -68,7 +68,7 @@ export async function up(knex: Knex): Promise<void> {
await knex(constraint.many_collection)
.update({ [constraint.many_field]: null })
.whereIn(currentPrimaryKeyField, ids);
} catch (err) {
} catch (err: any) {
logger.error(
`${constraint.many_collection}.${constraint.many_field} contains illegal foreign keys which couldn't be set to NULL. Please fix these references and rerun this migration to complete the upgrade.`
);
@@ -111,7 +111,7 @@ export async function up(knex: Knex): Promise<void> {
builder.onDelete('SET NULL');
}
});
} catch (err) {
} catch (err: any) {
logger.warn(
`Couldn't add foreign key constraint for ${constraint.many_collection}.${constraint.many_field}<->${constraint.one_collection}`
);
@@ -140,7 +140,7 @@ export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(relation.many_collection, (table) => {
table.dropForeign([relation.many_field]);
});
} catch (err) {
} catch (err: any) {
logger.warn(
`Couldn't drop foreign key constraint for ${relation.many_collection}.${relation.many_field}<->${relation.one_collection}`
);

View File

@@ -99,7 +99,7 @@ export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(update.table, (table) => {
table.dropForeign([constraint.column], existingForeignKey?.constraint_name || undefined);
});
} catch (err) {
} catch (err: any) {
logger.warn(`Couldn't drop foreign key ${update.table}.${constraint.column}->${constraint.references}`);
logger.warn(err);
}
@@ -114,7 +114,7 @@ export async function up(knex: Knex): Promise<void> {
// Knex uses a default convention for index names: `table_column_type`
table.dropIndex([constraint.column], `${update.table}_${constraint.column}_foreign`);
});
} catch (err) {
} catch (err: any) {
logger.warn(
`Couldn't clean up index for foreign key ${update.table}.${constraint.column}->${constraint.references}`
);
@@ -126,7 +126,7 @@ export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(update.table, (table) => {
table.foreign(constraint.column).references(constraint.references).onDelete(constraint.on_delete);
});
} catch (err) {
} catch (err: any) {
logger.warn(`Couldn't add foreign key to ${update.table}.${constraint.column}->${constraint.references}`);
logger.warn(err);
}
@@ -141,7 +141,7 @@ export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(update.table, (table) => {
table.dropForeign([constraint.column]);
});
} catch (err) {
} catch (err: any) {
logger.warn(`Couldn't drop foreign key ${update.table}.${constraint.column}->${constraint.references}`);
logger.warn(err);
}
@@ -156,7 +156,7 @@ export async function down(knex: Knex): Promise<void> {
// Knex uses a default convention for index names: `table_column_type`
table.dropIndex([constraint.column], `${update.table}_${constraint.column}_foreign`);
});
} catch (err) {
} catch (err: any) {
logger.warn(
`Couldn't clean up index for foreign key ${update.table}.${constraint.column}->${constraint.references}`
);
@@ -168,7 +168,7 @@ export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(update.table, (table) => {
table.foreign(constraint.column).references(constraint.references);
});
} catch (err) {
} catch (err: any) {
logger.warn(`Couldn't add foreign key to ${update.table}.${constraint.column}->${constraint.references}`);
logger.warn(err);
}

View File

@@ -14,7 +14,7 @@ export async function up(knex: Knex): Promise<void> {
if (options.icon) newOptions.headerIcon = options.icon;
if (options.color) newOptions.headerColor = options.color;
} catch (err) {
} catch (err: any) {
logger.warn(`Couldn't convert previous options from field ${dividerGroup.collection}.${dividerGroup.field}`);
logger.warn(err);
}
@@ -27,7 +27,7 @@ export async function up(knex: Knex): Promise<void> {
options: JSON.stringify(newOptions),
})
.where('id', '=', dividerGroup.id);
} catch (err) {
} catch (err: any) {
logger.warn(`Couldn't update ${dividerGroup.collection}.${dividerGroup.field} to new group interface`);
logger.warn(err);
}

View File

@@ -0,0 +1,13 @@
import { Knex } from 'knex';
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_permissions', (table) => {
table.dropColumn('limit');
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_permissions', (table) => {
table.integer('limit').unsigned();
});
}

View File

@@ -0,0 +1,13 @@
import { Knex } from 'knex';
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_webhooks', (table) => {
table.text('collections').notNullable().alter();
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_webhooks', (table) => {
table.text('collections').alter();
});
}

View File

@@ -1,10 +1,9 @@
/* eslint-disable no-console */
import formatTitle from '@directus/format-title';
import fse from 'fs-extra';
import { Knex } from 'knex';
import path from 'path';
import env from '../../env';
import logger from '../../logger';
import { Migration } from '../../types';
export default async function run(database: Knex, direction: 'up' | 'down' | 'latest'): Promise<void> {
@@ -62,7 +61,7 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
const { up } = require(nextVersion.file);
console.log(`Applying ${nextVersion.name}...`);
logger.info(`Applying ${nextVersion.name}...`);
await up(database);
await database.insert({ version: nextVersion.version, name: nextVersion.name }).into('directus_migrations');
@@ -83,7 +82,7 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
const { down } = require(migration.file);
console.log(`Undoing ${migration.name}...`);
logger.info(`Undoing ${migration.name}...`);
await down(database);
await database('directus_migrations').delete().where({ version: migration.version });
@@ -94,7 +93,7 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
if (migration.completed === false) {
const { up } = require(migration.file);
console.log(`Applying ${migration.name}...`);
logger.info(`Applying ${migration.name}...`);
await up(database);
await database.insert({ version: migration.version, name: migration.name }).into('directus_migrations');

View File

@@ -8,7 +8,6 @@ const defaults: Partial<Permission> = {
validation: null,
presets: null,
fields: ['*'],
limit: null,
system: true,
};

View File

@@ -12,48 +12,48 @@ defaults:
data:
- collection: directus_activity
note: Accountability logs for all events
note: $t:directus_collection.directus_activity
- collection: directus_collections
icon: list_alt
note: Additional collection configuration and metadata
note: $t:directus_collection.directus_collections
- collection: directus_fields
icon: input
note: Additional field configuration and metadata
note: $t:directus_collection.directus_fields
- collection: directus_files
icon: folder
note: Metadata for all managed file assets
note: $t:directus_collection.directus_files
display_template: '{{ $thumbnail }} {{ title }}'
- collection: directus_folders
note: Provides virtual directories for files
note: $t:directus_collection.directus_folders
display_template: '{{ name }}'
- collection: directus_migrations
note: What version of the database you're using
note: $t:directus_collection.directus_migrations
- collection: directus_permissions
icon: admin_panel_settings
note: Access permissions for each role
note: $t:directus_collection.directus_permissions
- collection: directus_presets
icon: bookmark_border
note: Presets for collection defaults and bookmarks
note: $t:directus_collection.directus_presets
accountability: null
- collection: directus_relations
icon: merge_type
note: Relationship configuration and metadata
note: $t:directus_collection.directus_relations
- collection: directus_revisions
note: Data snapshots for all activity
note: $t:directus_collection.directus_revisions
- collection: directus_roles
icon: supervised_user_circle
note: Permission groups for system users
note: $t:directus_collection.directus_roles
- collection: directus_sessions
note: User session information
note: $t:directus_collection.directus_sessions
- collection: directus_settings
singleton: true
note: Project configuration options
note: $t:directus_collection.directus_settings
- collection: directus_users
archive_field: status
archive_value: archived
unarchive_value: draft
icon: people_alt
note: System users for the platform
note: $t:directus_collection.directus_users
display_template: '{{ first_name }} {{ last_name }}'
- collection: directus_webhooks
note: Configuration for event-based HTTP requests
note: $t:directus_collection.directus_webhooks

View File

@@ -13,19 +13,19 @@ fields:
defaultForeground: 'var(--foreground-normal)'
defaultBackground: 'var(--background-normal-alt)'
choices:
- text: Create
- text: $t:field_options.directus_activity.create
value: create
foreground: 'var(--primary)'
background: 'var(--primary-25)'
- text: Update
- text: $t:field_options.directus_activity.update
value: update
foreground: 'var(--blue)'
background: 'var(--blue-25)'
- text: Delete
- text: $t:field_options.directus_activity.delete
value: delete
foreground: 'var(--danger)'
background: 'var(--danger-25)'
- text: Login
- text: $t:field_options.directus_activity.login
value: authenticate
foreground: 'var(--purple)'
background: 'var(--purple-25)'

View File

@@ -8,7 +8,7 @@ fields:
interface: presentation-divider
options:
icon: box
title: Collection Setup
title: $t:field_options.directus_collections.collection_setup
width: full
- field: collection
@@ -32,7 +32,7 @@ fields:
- field: color
interface: select-color
options:
placeholder: Choose a color...
placeholder: $t:field_options.directus_collections.note_placeholder
width: half
- field: display_template
@@ -45,7 +45,7 @@ fields:
special: boolean
interface: boolean
options:
label: Hide within the App
label: $t:field_options.directus_collections.hidden_label
width: half
- field: singleton
@@ -102,7 +102,7 @@ fields:
interface: presentation-divider
options:
icon: archive
title: Archive
title: $t:field_options.directus_collections.archive_divider
width: full
- field: archive_field
@@ -110,14 +110,14 @@ fields:
options:
collectionField: collection
allowNone: true
placeholder: Choose a field...
placeholder: $t:field_options.directus_collections.archive_field
width: half
- field: archive_app_filter
interface: boolean
special: boolean
options:
label: Enable App Archive Filter
label: $t:field_options.directus_collections.archive_app_filter
width: half
- field: archive_value
@@ -125,7 +125,7 @@ fields:
options:
font: monospace
iconRight: archive
placeholder: Value set when archiving...
placeholder: $t:field_options.directus_collections.archive_value
width: half
- field: unarchive_value
@@ -133,7 +133,7 @@ fields:
options:
font: monospace
iconRight: unarchive
placeholder: Value set when unarchiving...
placeholder: $t:field_options.directus_collections.unarchive_value
width: half
- field: sort_divider
@@ -143,14 +143,14 @@ fields:
interface: presentation-divider
options:
icon: sort
title: Sort
title: $t:field_options.directus_collections.divider
width: full
- field: sort_field
interface: system-field
options:
collectionField: collection
placeholder: Choose a field...
placeholder: $t:field_options.directus_collections.sort_field
typeAllowList:
- float
- decimal
@@ -165,7 +165,7 @@ fields:
interface: presentation-divider
options:
icon: admin_panel_settings
title: Accountability
title: $t:field_options.directus_collections.accountability_divider
width: full
- field: accountability

View File

@@ -10,14 +10,14 @@ fields:
interface: input
options:
iconRight: title
placeholder: A unique title...
placeholder: $t:field_options.directus_files.title
width: full
- field: description
interface: input-multiline
width: full
options:
placeholder: An optional description...
placeholder: $t:field_options.directus_files.description
- field: tags
interface: tags
@@ -35,7 +35,7 @@ fields:
interface: input
options:
iconRight: place
placeholder: An optional location...
placeholder: $t:field_options.directus_files.location
width: half
- field: storage
@@ -49,7 +49,7 @@ fields:
interface: presentation-divider
options:
icon: insert_drive_file
title: File Naming
title: $t:field_options.directus_files.storage_divider
special:
- alias
- no-data
@@ -59,7 +59,7 @@ fields:
interface: input
options:
iconRight: publish
placeholder: Name on disk storage...
placeholder: $t:field_options.directus_files.filename_disk
readonly: true
width: half
@@ -67,7 +67,7 @@ fields:
interface: input
options:
iconRight: get_app
placeholder: Name when downloading...
placeholder: $t:field_options.directus_files.filename_download
width: half
- field: metadata
@@ -106,6 +106,7 @@ fields:
display: user
width: half
hidden: true
special: user-created
- field: uploaded_on
display: datetime

View File

@@ -15,9 +15,6 @@ fields:
- field: role
width: half
- field: limit
width: half
- field: collection
width: half

View File

@@ -9,7 +9,7 @@ fields:
- field: name
interface: input
options:
placeholder: The unique name for this role...
placeholder: $t:field_options.directus_roles.name
width: half
- field: icon
@@ -20,7 +20,7 @@ fields:
- field: description
interface: input
options:
placeholder: A description of this role...
placeholder: $t:field_options.directus_roles.description
width: full
- field: app_access
@@ -36,7 +36,7 @@ fields:
- field: ip_access
interface: tags
options:
placeholder: Add allowed IP addresses, leave empty to allow all...
placeholder: $t:field_options.directus_roles.ip_access
special: csv
width: full
@@ -60,13 +60,13 @@ fields:
template: '{{ name }}'
addLabel: Add New Module...
fields:
- name: Icon
- name: $t:field_options.directus_roles.fields.icon_name
field: icon
type: string
meta:
interface: select-icon
width: half
- name: Name
- name: $t:field_options.directus_roles.fields.name_name
field: name
type: string
meta:
@@ -74,8 +74,8 @@ fields:
width: half
options:
iconRight: title
placeholder: Enter a title...
- name: Link
placeholder:
- name: $t:field_options.directus_roles.fields.link_name
field: link
type: string
meta:
@@ -83,7 +83,7 @@ fields:
width: full
options:
iconRight: link
placeholder: Relative or absolute URL...
placeholder: $t:field_options.directus_roles.fields.link_placeholder
special: json
width: full
@@ -91,9 +91,9 @@ fields:
interface: list
options:
template: '{{ group_name }}'
addLabel: Add New Group...
addLabel: $t:field_options.directus_roles.collection_list.group_name_addLabel
fields:
- name: Group Name
- name: $t:field_options.directus_roles.collection_list.fields.group_name
field: group_name
type: string
meta:
@@ -101,10 +101,10 @@ fields:
interface: input
options:
iconRight: title
placeholder: Label this group...
placeholder: $t:field_options.directus_roles.collection_list.fields.group_placeholder
schema:
is_nullable: false
- name: Type
- name: $t:field_options.directus_roles.collection_list.fields.type_name
field: accordion
type: string
schema:
@@ -115,21 +115,21 @@ fields:
options:
choices:
- value: always_open
text: Always Open
text: $t:field_options.directus_roles.collection_list.fields.choices_always
- value: start_open
text: Start Open
text: $t:field_options.directus_roles.collection_list.fields.choices_start_open
- value: start_collapsed
text: Start Collapsed
- name: Collections
text: $t:field_options.directus_roles.collection_list.fields.choices_start_collapsed
- name: $t:field_options.directus_roles.collections_name
field: collections
type: JSON
meta:
interface: list
options:
addLabel: Add New Collection...
addLabel: $t:field_options.directus_roles.collections_addLabel
template: '{{ collection }}'
fields:
- name: Collection
- name: $t:field_options.directus_roles.collections_name
field: collection
type: string
meta:

View File

@@ -8,7 +8,7 @@ fields:
interface: input
options:
iconRight: title
placeholder: My project...
placeholder: $t:field_options.directus_settings.project_name_placeholder
translations:
language: en-US
translations: Name
@@ -26,7 +26,7 @@ fields:
- field: project_color
interface: select-color
note: Login & Logo Background
note: $t:field_options.directus_settings.project_logo_note
translations:
language: en-US
translations: Brand Color
@@ -67,7 +67,7 @@ fields:
- field: public_note
interface: input-multiline
options:
placeholder: A short, public message that supports markdown formatting...
placeholder: $t:field_options.directus_settings.public_note_placeholder
width: full
- field: security_divider
@@ -85,11 +85,11 @@ fields:
options:
choices:
- value: null
text: None  Not Recommended
text: $t:field_options.directus_settings.auth_password_policy.none_text
- value: '/^.{8,}$/'
text: Weak Minimum 8 Characters
text: $t:field_options.directus_settings.auth_password_policy.weak_text
- value: "/(?=^.{8,}$)(?=.*\\d)(?=.*[a-z])(?=.*[A-Z])(?=.*[!@#$%^&*()_+}{';'?>.<,])(?!.*\\s).*$/"
text: Strong Upper / Lowercase / Numbers / Special
text: $t:field_options.directus_settings.auth_password_policy.strong_text
allowOther: true
width: half
@@ -135,13 +135,13 @@ fields:
options:
choices:
- value: contain
text: Contain (preserve aspect ratio)
text: $t:field_options.directus_settings.storage_asset_presets.fit.contain_text
- value: cover
text: Cover (forces exact size)
text: $t:field_options.directus_settings.storage_asset_presets.fit.cover_text
- value: inside
text: Fit inside
text: $t:field_options.directus_settings.storage_asset_presets.fit.fit_text
- value: outside
text: Fit outside
text: $t:field_options.directus_settings.storage_asset_presets.fit.outside_text
width: half
- field: width
name: $t:width
@@ -181,7 +181,7 @@ fields:
interface: boolean
width: half
options:
label: Don't upscale images
label: $t:no_upscale
- field: format
name: Format
type: string
@@ -203,15 +203,14 @@ fields:
text: Tiff
width: half
- field: transforms
name: Additional Transformations
name: $t:field_options.directus_settings.additional_transforms
type: json
schema:
is_nullable: false
default_value: []
meta:
note:
The Sharp method name and its arguments. See https://sharp.pixelplumbing.com/api-constructor for more
information.
note: $t:field_options.directus_settings.transforms_note
interface: json
options:
template: >
@@ -279,8 +278,8 @@ fields:
interface: input
options:
icon: key
title: Mapbox Access Token
placeholder: pk.eyJ1Ijo.....
title: $t:field_options.directus_settings.mapbox_key
placeholder: $t:field_options.directus_settings.mapbox_placeholder
iconLeft: vpn_key
font: monospace
width: half
@@ -306,11 +305,11 @@ fields:
options:
choices:
- value: raster
text: Raster
text: $t:field_options.directus_settings.basemaps_raster
- value: tile
text: Raster TileJSON
text: $t:field_options.directus_settings.basemaps_tile
- value: style
text: Mapbox Style
text: $t:field_options.directus_settings.basemaps_style
- field: url
name: $t:url
schema:
@@ -319,3 +318,17 @@ fields:
interface: text-input
options:
placeholder: http://{a-c}.tile.openstreetmap.org/{z}/{x}/{y}.png
- field: tileSize
name: $t:tile_size
schema:
is_nullable: true
meta:
interface: input
options:
placeholder: '512'
conditions:
- name: typeNeqRaster
rule:
type:
_neq: 'raster'
hidden: true

View File

@@ -18,7 +18,7 @@ const emitter = new EventEmitter2({
export async function emitAsyncSafe(name: string, ...args: any[]): Promise<any> {
try {
return await emitter.emitAsync(name, ...args);
} catch (err) {
} catch (err: any) {
logger.warn(`An error was thrown while executing hook "${name}"`);
logger.warn(err);
}

View File

@@ -19,6 +19,8 @@ const defaults: Record<string, any> = {
PUBLIC_URL: '/',
MAX_PAYLOAD_SIZE: '100kb',
DB_EXCLUDE_TABLES: 'spatial_ref_sys',
STORAGE_LOCATIONS: 'local',
STORAGE_LOCAL_DRIVER: 'local',
STORAGE_LOCAL_ROOT: './uploads',

View File

@@ -1,4 +1,6 @@
import getDatabase from '../../database';
import { compact, last } from 'lodash';
import { getDatabaseClient } from '../../database';
import emitter from '../../emitter';
import { extractError as mssql } from './dialects/mssql';
import { extractError as mysql } from './dialects/mysql';
import { extractError as oracle } from './dialects/oracle';
@@ -16,22 +18,29 @@ import { SQLError } from './dialects/types';
* - Value Too Long
*/
export async function translateDatabaseError(error: SQLError): Promise<any> {
const database = getDatabase();
const client = getDatabaseClient();
let defaultError: any;
switch (database.client.constructor.name) {
case 'Client_MySQL':
return mysql(error);
case 'Client_PG':
return postgres(error);
case 'Client_SQLite3':
return sqlite(error);
case 'Client_Oracledb':
case 'Client_Oracle':
return oracle(error);
case 'Client_MSSQL':
return await mssql(error);
default:
return error;
switch (client) {
case 'mysql':
defaultError = mysql(error);
break;
case 'postgres':
defaultError = postgres(error);
break;
case 'sqlite':
defaultError = sqlite(error);
break;
case 'oracle':
defaultError = oracle(error);
break;
case 'mssql':
defaultError = await mssql(error);
break;
}
const hookResult = await emitter.emitAsync('database.error', defaultError, { client });
const hookError = Array.isArray(hookResult) ? last(compact(hookResult)) : hookResult;
return hookError || defaultError;
}

View File

@@ -21,7 +21,7 @@ import emitter from './emitter';
import env from './env';
import * as exceptions from './exceptions';
import logger from './logger';
import { HookRegisterFunction, EndpointRegisterFunction } from './types';
import { HookConfig, EndpointConfig } from './types';
import fse from 'fs-extra';
import { getSchema } from './utils/get-schema';
@@ -33,15 +33,18 @@ import { rollup } from 'rollup';
// @ts-expect-error
import virtual from '@rollup/plugin-virtual';
import alias from '@rollup/plugin-alias';
import { Url } from './utils/url';
import getModuleDefault from './utils/get-module-default';
let extensions: Extension[] = [];
let extensionBundles: Partial<Record<AppExtensionType, string>> = {};
const registeredHooks: string[] = [];
export async function initializeExtensions(): Promise<void> {
try {
await ensureExtensionDirs(env.EXTENSIONS_PATH, env.SERVE_APP ? EXTENSION_TYPES : API_EXTENSION_TYPES);
extensions = await getExtensions();
} catch (err) {
} catch (err: any) {
logger.warn(`Couldn't load extensions`);
logger.warn(err);
}
@@ -121,14 +124,15 @@ async function generateExtensionBundles() {
async function getSharedDepsMapping(deps: string[]) {
const appDir = await fse.readdir(path.join(resolvePackage('@directus/app'), 'dist'));
const adminUrl = env.PUBLIC_URL.endsWith('/') ? env.PUBLIC_URL + 'admin' : env.PUBLIC_URL + '/admin';
const depsMapping: Record<string, string> = {};
for (const dep of deps) {
const depName = appDir.find((file) => dep.replace(/\//g, '_') === file.substring(0, file.indexOf('.')));
if (depName) {
depsMapping[dep] = `${adminUrl}/${depName}`;
const depUrl = new Url(env.PUBLIC_URL).addPath('admin', depName);
depsMapping[dep] = depUrl.toString({ rootRelative: true });
} else {
logger.warn(`Couldn't find shared extension dependency "${dep}"`);
}
@@ -141,7 +145,7 @@ function registerHooks(hooks: Extension[]) {
for (const hook of hooks) {
try {
registerHook(hook);
} catch (error) {
} catch (error: any) {
logger.warn(`Couldn't register hook "${hook.name}"`);
logger.warn(error);
}
@@ -149,16 +153,18 @@ function registerHooks(hooks: Extension[]) {
function registerHook(hook: Extension) {
const hookPath = path.resolve(hook.path, hook.entrypoint || '');
const hookInstance: HookRegisterFunction | { default?: HookRegisterFunction } = require(hookPath);
const hookInstance: HookConfig | { default: HookConfig } = require(hookPath);
let register: HookRegisterFunction = hookInstance as HookRegisterFunction;
if (typeof hookInstance !== 'function') {
if (hookInstance.default) {
register = hookInstance.default;
}
// Make sure hooks are only registered once
if (registeredHooks.includes(hookPath)) {
return;
} else {
registeredHooks.push(hookPath);
}
const events = register({ services, exceptions, env, database: getDatabase(), getSchema });
const register = getModuleDefault(hookInstance);
const events = register({ services, exceptions, env, database: getDatabase(), logger, getSchema });
for (const [event, handler] of Object.entries(events)) {
if (event.startsWith('cron(')) {
@@ -180,7 +186,7 @@ function registerEndpoints(endpoints: Extension[], router: Router) {
for (const endpoint of endpoints) {
try {
registerEndpoint(endpoint);
} catch (error) {
} catch (error: any) {
logger.warn(`Couldn't register endpoint "${endpoint.name}"`);
logger.warn(error);
}
@@ -188,18 +194,16 @@ function registerEndpoints(endpoints: Extension[], router: Router) {
function registerEndpoint(endpoint: Extension) {
const endpointPath = path.resolve(endpoint.path, endpoint.entrypoint || '');
const endpointInstance: EndpointRegisterFunction | { default?: EndpointRegisterFunction } = require(endpointPath);
const endpointInstance: EndpointConfig | { default: EndpointConfig } = require(endpointPath);
let register: EndpointRegisterFunction = endpointInstance as EndpointRegisterFunction;
if (typeof endpointInstance !== 'function') {
if (endpointInstance.default) {
register = endpointInstance.default;
}
}
const mod = getModuleDefault(endpointInstance);
const register = typeof mod === 'function' ? mod : mod.handler;
const pathName = typeof mod === 'function' ? endpoint.name : mod.id;
const scopedRouter = express.Router();
router.use(`/${endpoint.name}/`, scopedRouter);
router.use(`/${pathName}`, scopedRouter);
register(scopedRouter, { services, exceptions, env, database: getDatabase(), getSchema });
register(scopedRouter, { services, exceptions, env, database: getDatabase(), logger, getSchema });
}
}

View File

@@ -43,7 +43,7 @@ export default function getMailer(): Transporter {
api_key: env.EMAIL_MAILGUN_API_KEY,
domain: env.EMAIL_MAILGUN_DOMAIN,
},
host: env.EMAIL_MAILGUN_HOST || 'https://api.mailgun.net',
host: env.EMAIL_MAILGUN_HOST || 'api.mailgun.net',
}) as any
);
} else {

View File

@@ -4,7 +4,7 @@ import getDatabase from '../database';
import env from '../env';
import { InvalidCredentialsException } from '../exceptions';
import asyncHandler from '../utils/async-handler';
import isJWT from '../utils/is-jwt';
import isDirectusJWT from '../utils/is-directus-jwt';
/**
* Verify the passed JWT and assign the user ID and role to `req`
@@ -23,12 +23,12 @@ const authenticate: RequestHandler = asyncHandler(async (req, res, next) => {
const database = getDatabase();
if (isJWT(req.token)) {
if (isDirectusJWT(req.token)) {
let payload: { id: string };
try {
payload = jwt.verify(req.token, env.SECRET as string) as { id: string };
} catch (err) {
payload = jwt.verify(req.token, env.SECRET as string, { issuer: 'directus' }) as { id: string };
} catch (err: any) {
if (err instanceof TokenExpiredError) {
throw new InvalidCredentialsException('Token expired.');
} else if (err instanceof JsonWebTokenError) {

View File

@@ -0,0 +1,76 @@
import express from 'express';
import request from 'supertest';
import checkCacheMiddleware from './cache';
jest.mock('../cache');
jest.mock('../env', () => ({
CACHE_ENABLED: true,
CACHE_NAMESPACE: 'test',
CACHE_STORE: 'memory',
CACHE_TTL: '5s',
CACHE_CONTROL_S_MAXAGE: true,
}));
const { cache } = jest.requireMock('../cache');
const env = jest.requireMock('../env');
const handler = jest.fn((req, res) => res.json({ data: 'Uncached value' }));
const setup = () => express().use(checkCacheMiddleware).all('/items/test', handler);
beforeEach(jest.clearAllMocks);
describe('cache middleware', () => {
test('should return the cached response for a request', async () => {
cache.get.mockResolvedValueOnce({ data: 'Cached value' });
cache.get.mockResolvedValueOnce(new Date().getTime() + 1000 * 60);
const res = await request(setup()).get('/items/test').send();
expect(res.body.data).toBe('Cached value');
expect(res.headers['vary']).toBe('Origin, Cache-Control');
expect(res.headers['cache-control']).toMatch(/public, max-age=\d+, s-maxage=\d+/);
expect(handler).not.toHaveBeenCalled();
});
test('should call the handler when there is no cached value', async () => {
cache.get.mockResolvedValueOnce(undefined);
const res = await request(setup()).get('/items/test').send();
expect(res.body.data).toBe('Uncached value');
expect(cache.get).toHaveBeenCalledTimes(1);
expect(handler).toHaveBeenCalledTimes(1);
});
test('should not cache requests then the cache is disabled', async () => {
env.CACHE_ENABLED = false;
const res = await request(setup()).get('/items/test').send();
expect(res.body.data).toBe('Uncached value');
expect(cache.get).not.toHaveBeenCalled();
expect(handler).toHaveBeenCalledTimes(1);
env.CACHE_ENABLED = true;
});
test('should not use cache when the "Cache-Control" header is set to "no-store"', async () => {
const res = await request(setup()).get('/items/test').set('Cache-Control', 'no-store').send();
expect(res.body.data).toBe('Uncached value');
expect(cache.get).not.toHaveBeenCalled();
expect(handler).toHaveBeenCalledTimes(1);
});
test('should only cache get requests', async () => {
const app = setup();
await request(app).post('/items/test').send();
await request(app).put('/items/test').send();
await request(app).patch('/items/test').send();
await request(app).delete('/items/test').send();
expect(cache.get).not.toHaveBeenCalled();
expect(handler).toHaveBeenCalledTimes(4);
});
});

View File

@@ -23,7 +23,7 @@ const checkCacheMiddleware: RequestHandler = asyncHandler(async (req, res, next)
try {
cachedData = await cache.get(key);
} catch (err) {
} catch (err: any) {
logger.warn(err, `[cache] Couldn't read key ${key}. ${err.message}`);
return next();
}
@@ -33,7 +33,7 @@ const checkCacheMiddleware: RequestHandler = asyncHandler(async (req, res, next)
try {
cacheExpiryDate = (await cache.get(`${key}__expires_at`)) as number | null;
} catch (err) {
} catch (err: any) {
logger.warn(err, `[cache] Couldn't read key ${`${key}__expires_at`}. ${err.message}`);
return next();
}

View File

@@ -17,7 +17,7 @@ export const parseGraphQL: RequestHandler = asyncHandler(async (req, res, next)
if (req.method === 'GET') {
query = (req.query.query as string | undefined) || null;
if (req.params.variables) {
if (req.query.variables) {
try {
variables = JSON.parse(req.query.variables as string);
} catch {
@@ -40,7 +40,7 @@ export const parseGraphQL: RequestHandler = asyncHandler(async (req, res, next)
try {
document = parse(new Source(query));
} catch (err) {
} catch (err: any) {
throw new InvalidPayloadException(`GraphQL schema validation error.`, {
graphqlErrors: [err],
});

View File

@@ -18,7 +18,7 @@ if (env.RATE_LIMITER_ENABLED === true) {
checkRateLimit = asyncHandler(async (req, res, next) => {
try {
await rateLimiter.consume(req.ip, 1);
} catch (rateLimiterRes) {
} catch (rateLimiterRes: any) {
if (rateLimiterRes instanceof Error) throw rateLimiterRes;
res.set('Retry-After', String(rateLimiterRes.msBeforeNext / 1000));

View File

@@ -25,7 +25,7 @@ export const respond: RequestHandler = asyncHandler(async (req, res) => {
try {
await cache.set(key, res.locals.payload, ms(env.CACHE_TTL as string));
await cache.set(`${key}__expires_at`, Date.now() + ms(env.CACHE_TTL as string));
} catch (err) {
} catch (err: any) {
logger.warn(err, `[cache] Couldn't set key ${key}. ${err}`);
}

View File

@@ -65,13 +65,13 @@ export class AuthenticationService {
const { email, password, ip, userAgent, otp } = options;
let user = await this.knex
const user = await this.knex
.select('id', 'password', 'role', 'tfa_secret', 'status')
.from('directus_users')
.whereRaw('LOWER(??) = ?', ['email', email.toLowerCase()])
.first();
const updatedUser = await emitter.emitAsync('auth.login.before', options, {
const updatedOptions = await emitter.emitAsync('auth.login.before', options, {
event: 'auth.login.before',
action: 'login',
schema: this.schema,
@@ -82,8 +82,8 @@ export class AuthenticationService {
database: this.knex,
});
if (updatedUser) {
user = updatedUser.length > 0 ? updatedUser.reduce((val, acc) => merge(acc, val)) : user;
if (updatedOptions) {
options = updatedOptions.length > 0 ? updatedOptions.reduce((acc, val) => merge(acc, val), {}) : options;
}
const emitStatus = (status: 'fail' | 'success') => {
@@ -121,7 +121,7 @@ export class AuthenticationService {
try {
await loginAttemptsLimiter.consume(user.id);
} catch (err) {
} catch {
await this.knex('directus_users').update({ status: 'suspended' }).where({ id: user.id });
user.status = 'suspended';
@@ -171,6 +171,7 @@ export class AuthenticationService {
*/
const accessToken = jwt.sign(payload, env.SECRET as string, {
expiresIn: env.ACCESS_TOKEN_TTL,
issuer: 'directus',
});
const refreshToken = nanoid(64);
@@ -237,6 +238,7 @@ export class AuthenticationService {
const accessToken = jwt.sign({ id: record.id }, env.SECRET as string, {
expiresIn: env.ACCESS_TOKEN_TTL,
issuer: 'directus',
});
const newRefreshToken = nanoid(64);

View File

@@ -186,15 +186,6 @@ export class AuthorizationService {
}
if (query.filter._and.length === 0) delete query.filter._and;
if (permissions.limit && query.limit && query.limit > permissions.limit) {
throw new ForbiddenException();
}
// Default to the permissions limit if limit hasn't been set
if (permissions.limit && !query.limit) {
query.limit = permissions.limit;
}
}
}
}
@@ -215,7 +206,6 @@ export class AuthorizationService {
action,
permissions: {},
validation: {},
limit: null,
fields: ['*'],
presets: {},
};
@@ -316,7 +306,7 @@ export class AuthorizationService {
};
if (Array.isArray(pk)) {
const result = await itemsService.readMany(pk, query, { permissionsAction: action });
const result = await itemsService.readMany(pk, { ...query, limit: pk.length }, { permissionsAction: action });
if (!result) throw new ForbiddenException();
if (result.length !== pk.length) throw new ForbiddenException();
} else {

View File

@@ -414,15 +414,6 @@ export class CollectionsService {
if (relation.related_collection === collectionKey) {
await fieldsService.deleteField(relation.collection, relation.field);
}
const isM2O = relation.collection === collectionKey;
// Delete any fields that have a relationship to/from the current collection
if (isM2O && relation.related_collection && relation.meta?.one_field) {
await fieldsService.deleteField(relation.related_collection!, relation.meta.one_field);
} else {
await fieldsService.deleteField(relation.collection, relation.field);
}
}
const m2aRelationsThatIncludeThisCollection = this.schema.relations.filter((relation) => {

View File

@@ -277,7 +277,7 @@ export class FieldsService {
if (!field.schema) return;
this.addColumnToTable(table, field, existingColumn);
});
} catch (err) {
} catch (err: any) {
throw await translateDatabaseError(err);
}
}

View File

@@ -63,7 +63,7 @@ export class FilesService extends ItemsService {
try {
await storage.disk(data.storage).put(payload.filename_disk, stream, payload.type);
} catch (err) {
} catch (err: any) {
logger.warn(`Couldn't save file ${payload.filename_disk}`);
logger.warn(err);
throw new ServiceUnavailableException(`Couldn't save file ${payload.filename_disk}`, { service: 'files' });
@@ -88,7 +88,7 @@ export class FilesService extends ItemsService {
try {
payload.metadata = await exifr.parse(buffer.content, {
icc: true,
icc: false,
iptc: true,
ifd1: true,
interop: true,
@@ -105,7 +105,7 @@ export class FilesService extends ItemsService {
if (payload.metadata?.iptc?.Keywords) {
payload.tags = payload.metadata.iptc.Keywords;
}
} catch (err) {
} catch (err: any) {
logger.warn(`Couldn't extract metadata from file`);
logger.warn(err);
}
@@ -156,7 +156,7 @@ export class FilesService extends ItemsService {
fileResponse = await axios.get<NodeJS.ReadableStream>(importURL, {
responseType: 'stream',
});
} catch (err) {
} catch (err: any) {
logger.warn(`Couldn't fetch file from url "${importURL}"`);
logger.warn(err);
throw new ServiceUnavailableException(`Couldn't fetch file from url "${importURL}"`, {

View File

@@ -74,6 +74,7 @@ import { SpecificationService } from './specifications';
import { UsersService } from './users';
import { UtilsService } from './utils';
import { WebhooksService } from './webhooks';
import { generateHash } from '../utils/generate-hash';
const GraphQLVoid = new GraphQLScalarType({
name: 'Void',
@@ -157,7 +158,7 @@ export class GraphQLService {
variableValues: variables,
operationName,
});
} catch (err) {
} catch (err: any) {
throw new InvalidPayloadException('GraphQL execution error.', { graphqlErrors: [err.message] });
}
@@ -1169,8 +1170,8 @@ export class GraphQLService {
return { ids: keys };
}
}
} catch (err) {
this.formatError(err);
} catch (err: any) {
return this.formatError(err);
}
}
@@ -1206,7 +1207,7 @@ export class GraphQLService {
}
return true;
} catch (err) {
} catch (err: any) {
throw this.formatError(err);
}
}
@@ -1766,7 +1767,7 @@ export class GraphQLService {
try {
await service.requestPasswordReset(args.email, args.reset_url || null);
} catch (err) {
} catch (err: any) {
if (err instanceof InvalidPayloadException) {
throw err;
}
@@ -1864,7 +1865,7 @@ export class GraphQLService {
string: GraphQLNonNull(GraphQLString),
},
resolve: async (_, args) => {
return await argon2.hash(args.string);
return await generateHash(args.string);
},
},
utils_hash_verify: {

View File

@@ -104,8 +104,16 @@ export class ImportService {
.pipe(csv())
.on('data', (value: Record<string, string>) => {
const obj = transform(value, (result: Record<string, string>, value, key) => {
if (value.length === 0) delete result[key];
else set(result, key, value);
if (value.length === 0) {
delete result[key];
} else {
try {
const parsedJson = JSON.parse(value);
set(result, key, parsedJson);
} catch {
set(result, key, value);
}
}
});
saveQueue.push(obj);

View File

@@ -135,7 +135,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
try {
const result = await trx.insert(payloadWithoutAliases).into(this.collection).returning(primaryKeyField);
primaryKey = primaryKey ?? result[0];
} catch (err) {
} catch (err: any) {
throw await translateDatabaseError(err);
}
@@ -442,7 +442,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
if (Object.keys(payloadWithTypeCasting).length > 0) {
try {
await trx(this.collection).update(payloadWithTypeCasting).whereIn(primaryKeyField, keys);
} catch (err) {
} catch (err: any) {
throw await translateDatabaseError(err);
}
}

View File

@@ -11,6 +11,7 @@ import { Accountability } from '@directus/shared/types';
import getMailer from '../../mailer';
import { Transporter, SendMailOptions } from 'nodemailer';
import prettier from 'prettier';
import { Url } from '../../utils/url';
const liquidEngine = new Liquid({
root: [path.resolve(env.EXTENSIONS_PATH, 'templates'), path.resolve(__dirname, 'templates')],
@@ -100,16 +101,15 @@ export class MailService {
};
function getProjectLogoURL(logoID?: string) {
let projectLogoURL = env.PUBLIC_URL;
if (projectLogoURL.endsWith('/') === false) {
projectLogoURL += '/';
}
const projectLogoUrl = new Url(env.PUBLIC_URL);
if (logoID) {
projectLogoURL += `assets/${logoID}`;
projectLogoUrl.addPath('assets', logoID);
} else {
projectLogoURL += `admin/img/directus-white.png`;
projectLogoUrl.addPath('admin', 'img', 'directus-white.png');
}
return projectLogoURL;
return projectLogoUrl.toString();
}
}
}

View File

@@ -1,4 +1,3 @@
import argon2 from 'argon2';
import { format, parseISO } from 'date-fns';
import Joi from 'joi';
import { Knex } from 'knex';
@@ -14,6 +13,7 @@ import { unflatten } from 'flat';
import { isNativeGeometry } from '../utils/geometry';
import { getGeometryHelper } from '../database/helpers/geometry';
import { parse as wktToGeoJSON } from 'wellknown';
import { generateHash } from '../utils/generate-hash';
type Action = 'create' | 'read' | 'update';
@@ -49,9 +49,8 @@ export class PayloadService {
public transformers: Transformers = {
async hash({ action, value }) {
if (!value) return;
if (action === 'create' || action === 'update') {
return await argon2.hash(String(value));
return await generateHash(String(value));
}
return value;

View File

@@ -209,7 +209,7 @@ export class ServerService {
try {
await cache!.set(`health-${checkID}`, true, 5);
await cache!.delete(`health-${checkID}`);
} catch (err) {
} catch (err: any) {
checks['cache:responseTime'][0].status = 'error';
checks['cache:responseTime'][0].output = err;
} finally {
@@ -249,7 +249,7 @@ export class ServerService {
try {
await rateLimiter.consume(`health-${checkID}`, 1);
await rateLimiter.delete(`health-${checkID}`);
} catch (err) {
} catch (err: any) {
checks['rateLimiter:responseTime'][0].status = 'error';
checks['rateLimiter:responseTime'][0].output = err;
} finally {
@@ -289,7 +289,7 @@ export class ServerService {
await disk.put(`health-${checkID}`, 'check');
await disk.get(`health-${checkID}`);
await disk.delete(`health-${checkID}`);
} catch (err) {
} catch (err: any) {
checks[`storage:${location}:responseTime`][0].status = 'error';
checks[`storage:${location}:responseTime`][0].output = err;
} finally {
@@ -323,7 +323,7 @@ export class ServerService {
try {
await mailer.verify();
} catch (err) {
} catch (err: any) {
checks['email:connection'][0].status = 'error';
checks['email:connection'][0].output = err;
}

View File

@@ -1,4 +1,3 @@
import argon2 from 'argon2';
import jwt from 'jsonwebtoken';
import { Knex } from 'knex';
import { clone, cloneDeep } from 'lodash';
@@ -17,7 +16,9 @@ import { AbstractServiceOptions, Item, PrimaryKey, Query, SchemaOverview } from
import { Accountability } from '@directus/shared/types';
import isUrlAllowed from '../utils/is-url-allowed';
import { toArray } from '@directus/shared/utils';
import { Url } from '../utils/url';
import { AuthenticationService } from './authentication';
import { generateHash } from '../utils/generate-hash';
import { ItemsService, MutationOptions } from './items';
import { MailService } from './mail';
import { SettingsService } from './settings';
@@ -304,10 +305,10 @@ export class UsersService extends ItemsService {
await service.createOne({ email, role, status: 'invited' });
const payload = { email, scope: 'invite' };
const token = jwt.sign(payload, env.SECRET as string, { expiresIn: '7d' });
const inviteURL = url ?? env.PUBLIC_URL + '/admin/accept-invite';
const acceptURL = inviteURL + '?token=' + token;
const subjectLine = subject ? subject : "You've been invited";
const token = jwt.sign(payload, env.SECRET as string, { expiresIn: '7d', issuer: 'directus' });
const subjectLine = subject ?? "You've been invited";
const inviteURL = url ? new Url(url) : new Url(env.PUBLIC_URL).addPath('admin', 'accept-invite');
inviteURL.setQuery('token', token);
await mailService.send({
to: email,
@@ -315,7 +316,7 @@ export class UsersService extends ItemsService {
template: {
name: 'user-invitation',
data: {
url: acceptURL,
url: inviteURL.toString(),
email,
},
},
@@ -325,7 +326,7 @@ export class UsersService extends ItemsService {
}
async acceptInvite(token: string, password: string): Promise<void> {
const { email, scope } = jwt.verify(token, env.SECRET as string) as {
const { email, scope } = jwt.verify(token, env.SECRET as string, { issuer: 'directus' }) as {
email: string;
scope: string;
};
@@ -338,7 +339,7 @@ export class UsersService extends ItemsService {
throw new InvalidPayloadException(`Email address ${email} hasn't been invited.`);
}
const passwordHashed = await argon2.hash(password);
const passwordHashed = generateHash(password);
await this.knex('directus_users').update({ password: passwordHashed, status: 'active' }).where({ id: user.id });
@@ -364,7 +365,7 @@ export class UsersService extends ItemsService {
});
const payload = { email, scope: 'password-reset' };
const token = jwt.sign(payload, env.SECRET as string, { expiresIn: '1d' });
const token = jwt.sign(payload, env.SECRET as string, { expiresIn: '1d', issuer: 'directus' });
if (url && isUrlAllowed(url, env.PASSWORD_RESET_URL_ALLOW_LIST) === false) {
throw new InvalidPayloadException(`Url "${url}" can't be used to reset passwords.`);
@@ -389,7 +390,7 @@ export class UsersService extends ItemsService {
}
async resetPassword(token: string, password: string): Promise<void> {
const { email, scope } = jwt.verify(token, env.SECRET as string) as {
const { email, scope } = jwt.verify(token, env.SECRET as string, { issuer: 'directus' }) as {
email: string;
scope: string;
};
@@ -402,7 +403,7 @@ export class UsersService extends ItemsService {
throw new ForbiddenException();
}
const passwordHashed = await argon2.hash(password);
const passwordHashed = await generateHash(password);
await this.knex('directus_users').update({ password: passwordHashed, status: 'active' }).where({ id: user.id });

View File

@@ -30,7 +30,7 @@ export default async function start(): Promise<void> {
// No need to log/warn here. The update message is only an informative nice-to-have
});
logger.info(`Server started at port ${port}`);
logger.info(`Server started at http://localhost:${port}`);
emitAsyncSafe('server.start');
})
.once('error', (err: any) => {

View File

@@ -1,6 +1,7 @@
import { ListenerFn } from 'eventemitter2';
import { Router } from 'express';
import { Knex } from 'knex';
import { Logger } from 'pino';
import env from '../env';
import * as exceptions from '../exceptions';
import * as services from '../services';
@@ -11,8 +12,18 @@ export type ExtensionContext = {
exceptions: typeof exceptions;
database: Knex;
env: typeof env;
logger: Logger;
getSchema: typeof getSchema;
};
export type HookRegisterFunction = (context: ExtensionContext) => Record<string, ListenerFn>;
export type EndpointRegisterFunction = (router: Router, context: ExtensionContext) => void;
type HookHandlerFunction = (context: ExtensionContext) => Record<string, ListenerFn>;
export type HookConfig = HookHandlerFunction;
type EndpointHandlerFunction = (router: Router, context: ExtensionContext) => void;
interface EndpointAdvancedConfig {
id: string;
handler: EndpointHandlerFunction;
}
export type EndpointConfig = EndpointHandlerFunction | EndpointAdvancedConfig;

View File

@@ -9,7 +9,6 @@ export type Permission = {
action: PermissionsAction;
permissions: Record<string, any>;
validation: Filter | null;
limit: number | null;
presets: Record<string, any> | null;
fields: string[] | null;
system?: true;

View File

@@ -322,15 +322,13 @@ export function applyFilter(
if (operator === '_empty' || (operator === '_nempty' && compareValue === false)) {
dbQuery[logical].andWhere((query) => {
query.whereNull(selectionRaw);
query.orWhere(selectionRaw, '=', '');
query.where(key, '=', '');
});
}
if (operator === '_nempty' || (operator === '_empty' && compareValue === false)) {
dbQuery[logical].andWhere((query) => {
query.whereNotNull(selectionRaw);
query.orWhere(selectionRaw, '!=', '');
query.where(key, '!=', '');
});
}
@@ -344,8 +342,6 @@ export function applyFilter(
// reported as [undefined].
// We need to remove any undefined values, as they are useless
compareValue = compareValue.filter((val) => val !== undefined);
// And ignore the result filter if there are no values in it
if (compareValue.length === 0) return;
}
if (operator === '_eq') {

View File

@@ -0,0 +1,10 @@
import argon2 from 'argon2';
import { getConfigFromEnv } from './get-config-from-env';
export function generateHash(stringToHash: string): Promise<string> {
const argon2HashConfigOptions = getConfigFromEnv('HASH_', 'HASH_RAW'); // Disallow the HASH_RAW option, see https://github.com/directus/directus/discussions/7670#discussioncomment-1255805
// associatedData, if specified, must be passed as a Buffer to argon2.hash, see https://github.com/ranisalt/node-argon2/wiki/Options#associateddata
'associatedData' in argon2HashConfigOptions &&
(argon2HashConfigOptions.associatedData = Buffer.from(argon2HashConfigOptions.associatedData));
return argon2.hash(stringToHash, argon2HashConfigOptions);
}

View File

@@ -0,0 +1,60 @@
import { Request } from 'express';
import { getCacheKey } from './get-cache-key';
const restUrl = 'http://localhost/items/example';
const graphQlUrl = 'http://localhost/graphql';
const accountability = { user: '00000000-0000-0000-0000-000000000000' };
const requests = [
{
name: 'as unauthenticated request',
params: { originalUrl: restUrl },
key: '17da8272c9a0ec6eea38a37d6d78bddeb7c79045',
},
{
name: 'as authenticated request',
params: { originalUrl: restUrl, accountability },
key: '99a6394222a3d7d149ac1662fc2fff506932db58',
},
{
name: 'a request with a fields query',
params: { originalUrl: restUrl, sanitizedQuery: { fields: ['id', 'name'] } },
key: 'aa6e2d8a78de4dfb4af6eaa230d1cd9b7d31ed19',
},
{
name: 'a request with a filter query',
params: { originalUrl: restUrl, sanitizedQuery: { filter: { name: { _eq: 'test' } } } },
key: 'd7eb8970f0429e1cf85e12eb5bb8669f618b09d3',
},
{
name: 'a GraphQL query request',
params: { originalUrl: graphQlUrl, query: { query: 'query { test { id } }' } },
key: '201731b75c627c60554512d819b6935b54c73814',
},
];
const cases = requests.map(({ name, params, key }) => [name, params, key]);
describe('get cache key', () => {
test.each(cases)('should create a cache key for %s', (_, params, key) => {
expect(getCacheKey(params as unknown as Request)).toEqual(key);
});
test('should create a unique key for each request', () => {
const keys = requests.map((r) => r.key);
const hasDuplicate = keys.some((key) => keys.indexOf(key) !== keys.lastIndexOf(key));
expect(hasDuplicate).toBeFalsy();
});
test('should create a unique key for GraphQL requests with different variables', () => {
const query = 'query Test ($name: String) { test (filter: { name: { _eq: $name } }) { id } }';
const operationName = 'test';
const variables1 = JSON.stringify({ name: 'test 1' });
const variables2 = JSON.stringify({ name: 'test 2' });
const req1: any = { originalUrl: graphQlUrl, query: { query, operationName, variables: variables1 } };
const req2: any = { originalUrl: graphQlUrl, query: { query, operationName, variables: variables2 } };
expect(getCacheKey(req1)).not.toEqual(getCacheKey(req2));
});
});

View File

@@ -1,14 +1,16 @@
import { Request } from 'express';
import url from 'url';
import hash from 'object-hash';
import { pick } from 'lodash';
export function getCacheKey(req: Request): string {
const path = url.parse(req.originalUrl).pathname;
const isGraphQl = path?.includes('/graphql');
const info = {
user: req.accountability?.user || null,
path,
query: path?.includes('/graphql') ? req.query.query : req.sanitizedQuery,
query: isGraphQl ? pick(req.query, ['query', 'variables']) : req.sanitizedQuery,
};
const key = hash(info);

View File

@@ -145,8 +145,12 @@ export default function getLocalType(
return { type: 'text' };
}
/** Handle Boolean as TINYINT*/
if (column.data_type.toLowerCase() === 'tinyint(1)' || column.data_type.toLowerCase() === 'tinyint(0)') {
/** Handle Boolean as TINYINT and edgecase MySQL where it still is just tinyint */
if (
(database.client.constructor.name === 'Client_MySQL' && column.data_type.toLowerCase() === 'tinyint') ||
column.data_type.toLowerCase() === 'tinyint(1)' ||
column.data_type.toLowerCase() === 'tinyint(0)'
) {
return { type: 'boolean' };
}

View File

@@ -0,0 +1,6 @@
export default function getModuleDefault<T>(mod: T | { default: T }): T {
if ('default' in mod) {
return mod.default;
}
return mod;
}

View File

@@ -32,7 +32,7 @@ export async function getSchema(options?: {
try {
cachedSchema = (await schemaCache.get('schema')) as SchemaOverview;
} catch (err) {
} catch (err: any) {
logger.warn(err, `[schema-cache] Couldn't retrieve cache. ${err}`);
}
@@ -47,7 +47,7 @@ export async function getSchema(options?: {
result,
typeof env.CACHE_SCHEMA === 'string' ? ms(env.CACHE_SCHEMA) : undefined
);
} catch (err) {
} catch (err: any) {
logger.warn(err, `[schema-cache] Couldn't save cache. ${err}`);
}
}
@@ -116,6 +116,11 @@ async function getDatabaseSchema(
];
for (const [collection, info] of Object.entries(schemaOverview)) {
if (toArray(env.DB_EXCLUDE_TABLES).includes(collection)) {
logger.trace(`Collection "${collection}" is configured to be excluded and will be ignored`);
continue;
}
if (!info.primary) {
logger.warn(`Collection "${collection}" doesn't have a primary key column and will be ignored`);
continue;

View File

@@ -2,9 +2,10 @@ import atob from 'atob';
import logger from '../logger';
/**
* Check if a given string conforms to the structure of a JWT.
* Check if a given string conforms to the structure of a JWT
* and whether it is issued by Directus.
*/
export default function isJWT(string: string): boolean {
export default function isDirectusJWT(string: string): boolean {
const parts = string.split('.');
// JWTs have the structure header.payload.signature
@@ -15,7 +16,7 @@ export default function isJWT(string: string): boolean {
atob(parts[0]);
atob(parts[1]);
atob(parts[2]);
} catch (err) {
} catch (err: any) {
logger.error(err);
return false;
}
@@ -23,7 +24,8 @@ export default function isJWT(string: string): boolean {
// Check if the header and payload are valid JSON
try {
JSON.parse(atob(parts[0]));
JSON.parse(atob(parts[1]));
const payload = JSON.parse(atob(parts[1]));
if (payload.iss !== 'directus') return false;
} catch {
return false;
}

View File

@@ -25,7 +25,6 @@ function mergePerm(currentPerm: Permission, newPerm: Permission) {
let validation = currentPerm.validation;
let fields = currentPerm.fields;
let presets = currentPerm.presets;
let limit = currentPerm.limit;
if (newPerm.permissions) {
if (currentPerm.permissions && Object.keys(currentPerm.permissions)[0] === '_or') {
@@ -73,16 +72,11 @@ function mergePerm(currentPerm: Permission, newPerm: Permission) {
presets = merge({}, presets, newPerm.presets);
}
if (newPerm.limit && newPerm.limit > (currentPerm.limit || 0)) {
limit = newPerm.limit;
}
return {
...currentPerm,
permissions,
validation,
fields,
presets,
limit,
};
}

View File

@@ -13,7 +13,7 @@ export async function track(event: string): Promise<void> {
try {
await axios.post('https://telemetry.directus.io/', info);
} catch (err) {
} catch (err: any) {
if (env.NODE_ENV === 'development') {
logger.error(err);
}

View File

@@ -16,14 +16,22 @@ export function resolvePreset(input: TransformationParams | TransformationPreset
);
}
function extractOptions<T extends Record<string, any>>(keys: (keyof T)[], numberKeys: (keyof T)[] = []) {
function extractOptions<T extends Record<string, any>>(
keys: (keyof T)[],
numberKeys: (keyof T)[] = [],
booleanKeys: (keyof T)[] = []
) {
return function (input: TransformationParams | TransformationPreset): T {
return Object.entries(input).reduce(
(config, [key, value]) =>
keys.includes(key as any) && isNil(value) === false
? {
...config,
[key]: numberKeys.includes(key as any) ? +value : value,
[key]: numberKeys.includes(key as any)
? +value
: booleanKeys.includes(key as any)
? Boolean(value)
: value,
}
: config,
{} as T
@@ -53,7 +61,8 @@ function extractResize(input: TransformationParams | TransformationPreset): Tran
'resize',
extractOptions<TransformationPresetResize>(
['width', 'height', 'fit', 'withoutEnlargement'],
['width', 'height']
['width', 'height'],
['withoutEnlargement']
)(input),
];
}

78
api/src/utils/url.ts Normal file
View File

@@ -0,0 +1,78 @@
import { URL } from 'url';
export class Url {
protocol: string | null;
host: string | null;
port: string | null;
path: string[];
query: Record<string, string>;
hash: string | null;
constructor(url: string) {
const parsedUrl = new URL(url, 'http://localhost');
const isProtocolRelative = /^\/\//.test(url);
const isRootRelative = /^\/$|^\/[^/]/.test(url);
const isPathRelative = /^\./.test(url);
this.protocol =
!isProtocolRelative && !isRootRelative && !isPathRelative
? parsedUrl.protocol.substring(0, parsedUrl.protocol.length - 1)
: null;
this.host = !isRootRelative && !isPathRelative ? parsedUrl.host : null;
this.port = parsedUrl.port !== '' ? parsedUrl.port : null;
this.path = parsedUrl.pathname.split('/').filter((p) => p !== '');
this.query = Object.fromEntries(parsedUrl.searchParams.entries());
this.hash = parsedUrl.hash !== '' ? parsedUrl.hash.substring(1) : null;
}
public isAbsolute(): boolean {
return this.protocol !== null && this.host !== null;
}
public isProtocolRelative(): boolean {
return this.protocol === null && this.host !== null;
}
public isRootRelative(): boolean {
return this.protocol === null && this.host === null;
}
public addPath(...paths: string[]): Url {
const pathToAdd = paths.flatMap((p) => p.split('/')).filter((p) => p !== '');
for (const pathSegment of pathToAdd) {
if (pathSegment === '..') {
this.path.pop();
} else if (pathSegment !== '.') {
this.path.push(pathSegment);
}
}
return this;
}
public setQuery(key: string, value: string): Url {
this.query[key] = value;
return this;
}
public toString({ rootRelative } = { rootRelative: false }): string {
const protocol = this.protocol !== null ? `${this.protocol}:` : '';
const host = this.host ?? '';
const port = this.port !== null ? `:${this.port}` : '';
const origin = `${this.host !== null ? `${protocol}//` : ''}${host}${port}`;
const path = `/${this.path.join('/')}`;
const query =
Object.keys(this.query).length !== 0
? `?${Object.entries(this.query)
.map(([k, v]) => `${k}=${v}`)
.join('&')}`
: '';
const hash = this.hash !== null ? `#${this.hash}` : '';
return `${!rootRelative ? origin : ''}${path}${query}${hash}`;
}
}

View File

@@ -61,7 +61,7 @@ function createHandler(webhook: Webhook): ListenerFn {
method: webhook.method,
data: webhook.data ? webhookPayload : null,
});
} catch (error) {
} catch (error: any) {
logger.warn(`Webhook "${webhook.name}" (id: ${webhook.id}) failed`);
logger.warn(error);
}

View File

@@ -1,6 +1,6 @@
{
"name": "@directus/app",
"version": "9.0.0-rc.90",
"version": "9.0.0-rc.92",
"private": false,
"description": "Directus is an Open-Source Headless CMS & API for Managing Custom Databases",
"author": "Rijk van Zanten <rijkvanzanten@me.com>",
@@ -27,10 +27,10 @@
},
"gitHead": "24621f3934dc77eb23441331040ed13c676ceffd",
"devDependencies": {
"@directus/docs": "9.0.0-rc.90",
"@directus/extension-sdk": "9.0.0-rc.90",
"@directus/format-title": "9.0.0-rc.90",
"@directus/shared": "9.0.0-rc.90",
"@directus/docs": "9.0.0-rc.92",
"@directus/extensions-sdk": "9.0.0-rc.92",
"@directus/format-title": "9.0.0-rc.92",
"@directus/shared": "9.0.0-rc.92",
"@fullcalendar/core": "5.9.0",
"@fullcalendar/daygrid": "5.9.0",
"@fullcalendar/interaction": "5.9.0",
@@ -38,7 +38,7 @@
"@fullcalendar/timegrid": "5.9.0",
"@mapbox/mapbox-gl-draw": "1.3.0",
"@mapbox/mapbox-gl-draw-static-mode": "1.0.1",
"@mapbox/mapbox-gl-geocoder": "4.7.2",
"@mapbox/mapbox-gl-geocoder": "4.7.3",
"@popperjs/core": "2.9.3",
"@rollup/plugin-yaml": "3.1.0",
"@sindresorhus/slugify": "2.1.0",
@@ -48,56 +48,58 @@
"@types/bytes": "3.1.1",
"@types/codemirror": "5.60.2",
"@types/color": "3.0.2",
"@types/diacritics": "1.3.1",
"@types/diff": "5.0.1",
"@types/dompurify": "2.2.3",
"@types/geojson": "7946.0.8",
"@types/lodash": "4.14.172",
"@types/mapbox__mapbox-gl-draw": "1.2.3",
"@types/mapbox__mapbox-gl-geocoder": "4.7.1",
"@types/markdown-it": "12.2.0",
"@types/marked": "2.0.4",
"@types/mime-types": "2.1.0",
"@types/markdown-it": "12.2.1",
"@types/marked": "2.0.5",
"@types/mime-types": "2.1.1",
"@types/ms": "0.7.31",
"@types/qrcode": "1.4.1",
"@types/wellknown": "0.5.1",
"@vitejs/plugin-vue": "1.4.0",
"@vitejs/plugin-vue": "1.6.2",
"@vue/cli-plugin-babel": "4.5.13",
"@vue/cli-plugin-router": "4.5.13",
"@vue/cli-plugin-typescript": "4.5.13",
"@vue/cli-plugin-vuex": "4.5.13",
"@vue/cli-service": "4.5.13",
"@vue/compiler-sfc": "3.2.2",
"axios": "0.21.1",
"@vue/compiler-sfc": "3.2.11",
"axios": "0.21.4",
"base-64": "1.0.0",
"codemirror": "5.62.2",
"codemirror": "5.62.3",
"copyfiles": "2.4.1",
"cropperjs": "1.5.12",
"date-fns": "2.23.0",
"dompurify": "2.3.0",
"diacritics": "1.3.0",
"dompurify": "2.3.1",
"escape-string-regexp": "5.0.0",
"front-matter": "4.0.2",
"html-entities": "2.3.2",
"jsonlint-mod": "1.7.6",
"maplibre-gl": "1.15.2",
"marked": "2.1.3",
"marked": "3.0.0",
"micromustache": "8.0.3",
"mime": "2.5.2",
"mitt": "3.0.0",
"nanoid": "3.1.25",
"p-queue": "7.1.0",
"pinia": "2.0.0-rc.4",
"prettier": "2.3.2",
"pinia": "2.0.0-rc.9",
"prettier": "2.4.0",
"pretty-ms": "7.0.1",
"qrcode": "1.4.4",
"rimraf": "3.0.2",
"sass": "1.37.5",
"tinymce": "5.8.2",
"typescript": "4.3.5",
"vite": "2.4.4",
"vue": "3.2.2",
"sass": "1.39.2",
"tinymce": "5.9.2",
"typescript": "4.4.3",
"vite": "2.5.7",
"vue": "3.2.11",
"vue-i18n": "9.1.7",
"vue-router": "4.0.11",
"vuedraggable": "4.0.3",
"vuedraggable": "4.1.0",
"wellknown": "0.5.0"
}
}

View File

@@ -95,13 +95,13 @@ api.interceptors.response.use(onResponse, onError);
export default api;
function getToken() {
export function getToken(): string | null {
return api.defaults.headers?.['Authorization']?.split(' ')[1] || null;
}
export function addTokenToURL(url: string, token?: string): string {
token = token || getToken();
if (!token) return url;
const accessToken = token || getToken();
if (!accessToken) return url;
return addQueryToPath(url, { access_token: token });
return addQueryToPath(url, { access_token: accessToken });
}

View File

@@ -22,26 +22,22 @@
<script lang="ts">
import { useI18n } from 'vue-i18n';
import { defineComponent, toRefs, watch, computed, provide, onMounted, onUnmounted } from 'vue';
import * as stores from '@/stores';
import api, { addTokenToURL } from '@/api';
import axios from 'axios';
import { defineComponent, toRefs, watch, computed, onMounted, onUnmounted } from 'vue';
import { useAppStore, useUserStore, useServerStore } from '@/stores';
import { startIdleTracking, stopIdleTracking } from './idle';
import useSystem from '@/composables/use-system';
import useWindowSize from '@/composables/use-window-size';
import setFavicon from '@/utils/set-favicon';
export default defineComponent({
setup() {
const { t } = useI18n();
const { useAppStore, useUserStore, useServerStore } = stores;
const appStore = useAppStore();
const userStore = useUserStore();
const serverStore = useServerStore();
const { hydrating, sidebarOpen } = toRefs(appStore);
const { hydrating } = toRefs(appStore);
const brandStyle = computed(() => {
return {
@@ -57,23 +53,6 @@ export default defineComponent({
setFavicon(serverStore.info?.project?.project_color || '#00C897', hasCustomLogo);
});
const { width } = useWindowSize();
watch(
width,
(newWidth, oldWidth) => {
if (newWidth === null || newWidth === 0) return;
if (newWidth === oldWidth) return;
if (newWidth >= 1424) {
if (sidebarOpen.value === false) sidebarOpen.value = true;
} else {
if (sidebarOpen.value === true) sidebarOpen.value = false;
}
},
{ immediate: true }
);
watch(
() => userStore.currentUser,
(newUser) => {
@@ -106,15 +85,7 @@ export default defineComponent({
const error = computed(() => appStore.error);
/**
* This allows custom extensions to use the apps internals
*/
provide('system', {
...stores,
api,
axios,
addTokenToURL,
});
useSystem();
return { t, hydrating, brandStyle, error, customCSS };
},

View File

@@ -92,7 +92,7 @@ export async function refresh({ navigate }: LogoutOptions = { navigate: true }):
appStore.authenticated = true;
return accessToken;
} catch (error) {
} catch (error: any) {
await logout({ navigate, reason: LogoutReason.SESSION_EXPIRED });
}
}

View File

@@ -27,6 +27,7 @@ import VFieldSelect from './v-field-select';
import VFieldTemplate from './v-field-template';
import VForm from './v-form';
import VHover from './v-hover/';
import VHighlight from './v-highlight.vue';
import VIcon from './v-icon/';
import VInfo from './v-info/';
import VInput from './v-input/';
@@ -72,6 +73,7 @@ export function registerComponents(app: App): void {
app.component('VFieldSelect', VFieldSelect);
app.component('VForm', VForm);
app.component('VHover', VHover);
app.component('VHighlight', VHighlight);
app.component('VIcon', VIcon);
app.component('VInfo', VInfo);
app.component('VInput', VInput);

View File

@@ -65,7 +65,7 @@ body {
}
.large {
--v-avatar-size: 64px;
--v-avatar-size: 60px;
}
.x-large {

Some files were not shown because too many files have changed in this diff Show More