Merge branch 'main' into aggregation

This commit is contained in:
rijkvanzanten
2021-08-06 16:14:29 -04:00
676 changed files with 15648 additions and 8806 deletions

View File

@@ -103,6 +103,7 @@ ACCESS_TOKEN_TTL="15m"
REFRESH_TOKEN_TTL="7d"
REFRESH_TOKEN_COOKIE_SECURE="false"
REFRESH_TOKEN_COOKIE_SAME_SITE="lax"
REFRESH_TOKEN_COOKIE_NAME="directus_refresh_token"
CORS_ENABLED="true"
CORS_ORIGIN="true"

View File

@@ -1,6 +1,6 @@
{
"name": "directus",
"version": "9.0.0-rc.83",
"version": "9.0.0-rc.88",
"license": "GPL-3.0-only",
"homepage": "https://github.com/directus/directus#readme",
"description": "Directus is a real-time API and App dashboard for managing SQL database content.",
@@ -55,9 +55,8 @@
"prebuild": "npm run cleanup",
"build": "tsc --build && copyfiles \"src/**/*.*\" -e \"src/**/*.ts\" -u 1 dist",
"cleanup": "rimraf dist",
"dev": "cross-env DIRECTUS_DEV=true NODE_ENV=development ts-node-dev --files --transpile-only --respawn --watch \".env\" --inspect --exit-child -- src/start.ts",
"cli": "cross-env DIRECTUS_DEV=true NODE_ENV=development ts-node --script-mode --transpile-only src/cli/index.ts",
"prepublishOnly": "npm run build"
"dev": "cross-env NODE_ENV=development SERVE_APP=false ts-node-dev --files --transpile-only --respawn --watch \".env\" --inspect --exit-child -- src/start.ts",
"cli": "cross-env NODE_ENV=development SERVE_APP=false ts-node --script-mode --transpile-only src/cli/index.ts"
},
"engines": {
"node": ">=12.20.0"
@@ -69,15 +68,15 @@
"example.env"
],
"dependencies": {
"@directus/app": "9.0.0-rc.83",
"@directus/drive": "9.0.0-rc.83",
"@directus/drive-azure": "9.0.0-rc.83",
"@directus/drive-gcs": "9.0.0-rc.83",
"@directus/drive-s3": "9.0.0-rc.83",
"@directus/format-title": "9.0.0-rc.83",
"@directus/schema": "9.0.0-rc.83",
"@directus/shared": "9.0.0-rc.83",
"@directus/specs": "9.0.0-rc.83",
"@directus/app": "9.0.0-rc.88",
"@directus/drive": "9.0.0-rc.88",
"@directus/drive-azure": "9.0.0-rc.88",
"@directus/drive-gcs": "9.0.0-rc.88",
"@directus/drive-s3": "9.0.0-rc.88",
"@directus/format-title": "9.0.0-rc.88",
"@directus/schema": "9.0.0-rc.88",
"@directus/shared": "9.0.0-rc.88",
"@directus/specs": "9.0.0-rc.88",
"@godaddy/terminus": "^4.9.0",
"@rollup/plugin-alias": "^3.1.2",
"@rollup/plugin-virtual": "^2.0.3",
@@ -99,14 +98,13 @@
"dotenv": "^10.0.0",
"eventemitter2": "^6.4.3",
"execa": "^5.1.1",
"exif-reader": "^1.0.3",
"exifr": "^7.1.2",
"express": "^4.17.1",
"express-session": "^1.17.2",
"fs-extra": "^10.0.0",
"grant": "^5.4.14",
"graphql": "^15.5.0",
"graphql-compose": "^9.0.1",
"icc": "^2.0.0",
"inquirer": "^8.1.1",
"joi": "^17.3.0",
"js-yaml": "^4.1.0",
@@ -115,7 +113,7 @@
"jsonwebtoken": "^8.5.1",
"keyv": "^4.0.3",
"knex": "^0.95.6",
"knex-schema-inspector": "^1.5.7",
"knex-schema-inspector": "1.5.13",
"liquidjs": "^9.25.0",
"lodash": "^4.17.21",
"macos-release": "^2.4.1",
@@ -125,12 +123,13 @@
"node-cron": "^3.0.0",
"node-machine-id": "^1.1.12",
"nodemailer": "^6.6.1",
"object-hash": "^2.2.0",
"openapi3-ts": "^2.0.0",
"ora": "^5.4.0",
"otplib": "^12.0.1",
"pino": "^6.11.3",
"pino": "6.13.0",
"pino-colada": "^2.1.0",
"pino-http": "^5.5.0",
"pino-http": "5.6.0",
"prettier": "^2.3.1",
"qs": "^6.9.4",
"rate-limiter-flexible": "^2.2.2",
@@ -138,6 +137,7 @@
"rollup": "^2.52.1",
"sharp": "^0.28.3",
"stream-json": "^1.7.1",
"update-check": "^1.5.4",
"uuid": "^8.3.2",
"uuid-validate": "0.0.3"
},
@@ -151,43 +151,43 @@
"memcached": "^2.2.2",
"mysql": "^2.18.1",
"nodemailer-mailgun-transport": "^2.1.3",
"oracledb": "^5.0.0",
"pg": "^8.6.0",
"sqlite3": "^5.0.2",
"tedious": "^11.0.8"
},
"gitHead": "24621f3934dc77eb23441331040ed13c676ceffd",
"devDependencies": {
"@types/async": "3.2.6",
"@types/async": "3.2.7",
"@types/atob": "2.1.2",
"@types/body-parser": "1.19.0",
"@types/busboy": "0.2.3",
"@types/body-parser": "1.19.1",
"@types/busboy": "0.2.4",
"@types/cookie-parser": "1.4.2",
"@types/cors": "2.8.10",
"@types/cors": "2.8.12",
"@types/destroy": "1.0.0",
"@types/express": "4.17.12",
"@types/express": "4.17.13",
"@types/express-pino-logger": "4.0.2",
"@types/express-session": "1.17.3",
"@types/fs-extra": "9.0.11",
"@types/inquirer": "7.3.2",
"@types/js-yaml": "4.0.1",
"@types/json2csv": "5.0.2",
"@types/jsonwebtoken": "8.5.2",
"@types/keyv": "3.1.1",
"@types/lodash": "4.14.170",
"@types/express-session": "1.17.4",
"@types/fs-extra": "9.0.12",
"@types/inquirer": "7.3.3",
"@types/js-yaml": "4.0.2",
"@types/json2csv": "5.0.3",
"@types/jsonwebtoken": "8.5.4",
"@types/keyv": "3.1.2",
"@types/lodash": "4.14.172",
"@types/mime-types": "2.1.0",
"@types/ms": "0.7.31",
"@types/node": "15.12.2",
"@types/node-cron": "2.0.3",
"@types/nodemailer": "6.4.2",
"@types/qs": "6.9.6",
"@types/sharp": "0.28.3",
"@types/stream-json": "1.7.0",
"@types/uuid": "8.3.0",
"@types/node-cron": "2.0.4",
"@types/nodemailer": "6.4.4",
"@types/object-hash": "2.1.1",
"@types/qs": "6.9.7",
"@types/sharp": "0.28.5",
"@types/stream-json": "1.7.1",
"@types/uuid": "8.3.1",
"@types/uuid-validate": "0.0.1",
"copyfiles": "2.4.1",
"cross-env": "7.0.3",
"ts-node-dev": "1.1.7",
"typescript": "4.3.4"
"ts-node-dev": "1.1.8",
"typescript": "4.3.5"
}
}

View File

@@ -24,7 +24,7 @@ import settingsRouter from './controllers/settings';
import usersRouter from './controllers/users';
import utilsRouter from './controllers/utils';
import webhooksRouter from './controllers/webhooks';
import { isInstalled, validateDBConnection } from './database';
import { isInstalled, validateDBConnection, validateMigrations } from './database';
import { emitAsyncSafe } from './emitter';
import env from './env';
import { InvalidPayloadException } from './exceptions';
@@ -47,6 +47,12 @@ import { session } from './middleware/session';
export default async function createApp(): Promise<express.Application> {
validateEnv(['KEY', 'SECRET']);
try {
new URL(env.PUBLIC_URL);
} catch {
logger.warn('PUBLIC_URL is not a valid URL');
}
await validateDBConnection();
if ((await isInstalled()) === false) {
@@ -54,6 +60,10 @@ export default async function createApp(): Promise<express.Application> {
process.exit(1);
}
if ((await validateMigrations()) === false) {
logger.warn(`Database migrations have not all been run`);
}
await initializeExtensions();
registerExtensionHooks();
@@ -99,7 +109,15 @@ export default async function createApp(): Promise<express.Application> {
app.use(cors);
}
if (!('DIRECTUS_DEV' in process.env)) {
app.get('/', (req, res, next) => {
if (env.ROOT_REDIRECT) {
res.redirect(env.ROOT_REDIRECT);
} else {
next();
}
});
if (env.SERVE_APP) {
const adminPath = require.resolve('@directus/app/dist/index.html');
const publicUrl = env.PUBLIC_URL.endsWith('/') ? env.PUBLIC_URL : env.PUBLIC_URL + '/';
@@ -107,14 +125,6 @@ export default async function createApp(): Promise<express.Application> {
let html = fse.readFileSync(adminPath, 'utf-8');
html = html.replace(/<meta charset="utf-8" \/>/, `<meta charset="utf-8" />\n\t\t<base href="${publicUrl}admin/">`);
app.get('/', (req, res, next) => {
if (env.ROOT_REDIRECT) {
res.redirect(env.ROOT_REDIRECT);
} else {
next();
}
});
app.get('/admin', (req, res) => res.send(html));
app.use('/admin', express.static(path.join(adminPath, '..')));
app.use('/admin/*', (req, res) => {

View File

@@ -12,12 +12,12 @@ export function getCache(): { cache: Keyv | null; schemaCache: Keyv | null } {
if (env.CACHE_ENABLED === true && cache === null) {
validateEnv(['CACHE_NAMESPACE', 'CACHE_TTL', 'CACHE_STORE']);
cache = getKeyvInstance(ms(env.CACHE_TTL as string));
cache.on('error', (err) => logger.error(err));
cache.on('error', (err) => logger.warn(err, `[cache] ${err}`));
}
if (env.CACHE_SCHEMA !== false && schemaCache === null) {
schemaCache = getKeyvInstance(typeof env.CACHE_SCHEMA === 'string' ? ms(env.CACHE_SCHEMA) : undefined);
schemaCache.on('error', (err) => logger.error(err));
schemaCache.on('error', (err) => logger.warn(err, `[cache] ${err}`));
}
return { cache, schemaCache };
@@ -43,7 +43,11 @@ function getConfig(store: 'memory' | 'redis' | 'memcache' = 'memory', ttl: numbe
if (store === 'redis') {
const KeyvRedis = require('@keyv/redis');
config.store = new KeyvRedis(env.CACHE_REDIS || getConfigFromEnv('CACHE_REDIS_'));
config.store = new KeyvRedis(env.CACHE_REDIS || getConfigFromEnv('CACHE_REDIS_'), {
commandTimeout: 500,
retryStrategy: false,
});
}
if (store === 'memcache') {

View File

@@ -1,3 +1,4 @@
import { Knex } from 'knex';
import { nanoid } from 'nanoid';
import runMigrations from '../../../database/migrations/run';
import installDatabase from '../../../database/seeds/run';
@@ -5,19 +6,16 @@ import env from '../../../env';
import logger from '../../../logger';
import { getSchema } from '../../../utils/get-schema';
import { RolesService, UsersService, SettingsService } from '../../../services';
import getDatabase, { isInstalled, hasDatabaseConnection } from '../../../database';
import getDatabase, { isInstalled, validateDBConnection, hasDatabaseConnection } from '../../../database';
import { SchemaOverview } from '../../../types';
export default async function bootstrap({ skipAdminInit }: { skipAdminInit?: boolean }): Promise<void> {
logger.info('Initializing bootstrap...');
if ((await isDatabaseAvailable()) === false) {
logger.error(`Can't connect to the database`);
process.exit(1);
}
const database = getDatabase();
await waitForDatabase(database);
if ((await isInstalled()) === false) {
logger.info('Installing Directus system tables...');
@@ -48,19 +46,20 @@ export default async function bootstrap({ skipAdminInit }: { skipAdminInit?: boo
process.exit(0);
}
async function isDatabaseAvailable() {
async function waitForDatabase(database: Knex) {
const tries = 5;
const secondsBetweenTries = 5;
for (let i = 0; i < tries; i++) {
if (await hasDatabaseConnection()) {
if (await hasDatabaseConnection(database)) {
return true;
}
await new Promise((resolve) => setTimeout(resolve, secondsBetweenTries * 1000));
}
return false;
// This will throw and exit the process if the database is not available
await validateDBConnection(database);
}
async function createDefaultAdmin(schema: SchemaOverview) {

View File

@@ -50,6 +50,13 @@ const password = (): Record<string, string> => ({
mask: '*',
});
const encrypt = (): Record<string, string | boolean> => ({
type: 'confirm',
name: 'options__encrypt',
message: 'Encrypt Connection:',
default: false,
});
const ssl = (): Record<string, string | boolean> => ({
type: 'confirm',
name: 'ssl',
@@ -62,5 +69,5 @@ export const databaseQuestions = {
mysql: [host, port, database, user, password],
pg: [host, port, database, user, password, ssl],
oracledb: [host, port, database, user, password],
mssql: [host, port, database, user, password],
mssql: [host, port, database, user, password, encrypt],
};

View File

@@ -9,6 +9,7 @@ export type Credentials = {
user?: string;
password?: string;
ssl?: boolean;
options__encrypt?: boolean;
};
export default function createDBConnection(
client: 'sqlite3' | 'mysql' | 'pg' | 'oracledb' | 'mssql',
@@ -23,26 +24,26 @@ export default function createDBConnection(
filename: filename as string,
};
} else {
if (client !== 'pg') {
const { host, port, database, user, password } = credentials as Credentials;
const { host, port, database, user, password } = credentials as Credentials;
connection = {
host: host,
port: Number(port),
database: database,
user: user,
password: password,
};
} else {
const { host, port, database, user, password, ssl } = credentials as Credentials;
connection = {
host: host,
port: Number(port),
database: database,
user: user,
password: password,
};
connection = {
host: host,
port: Number(port),
database: database,
user: user,
password: password,
ssl: ssl,
if (client === 'pg') {
const { ssl } = credentials as Credentials;
connection['ssl'] = ssl;
}
if (client === 'mssql') {
const { options__encrypt } = credentials as Credentials;
(connection as Knex.MsSqlConnectionConfig)['options'] = {
encrypt: options__encrypt,
};
}
}

View File

@@ -38,6 +38,7 @@ ACCESS_TOKEN_TTL="15m"
REFRESH_TOKEN_TTL="7d"
REFRESH_TOKEN_COOKIE_SECURE=false
REFRESH_TOKEN_COOKIE_SAME_SITE="lax"
REFRESH_TOKEN_COOKIE_NAME="directus_refresh_token"
####################################################################################################
## SSO (OAuth) Providers

View File

@@ -1,42 +1,42 @@
import { Transformation } from './types';
import { TransformationParams } from './types';
export const SYSTEM_ASSET_ALLOW_LIST: Transformation[] = [
export const SYSTEM_ASSET_ALLOW_LIST: TransformationParams[] = [
{
key: 'system-small-cover',
width: 64,
height: 64,
fit: 'cover',
transforms: [['resize', { width: 64, height: 64, fit: 'cover' }]],
},
{
key: 'system-small-contain',
width: 64,
fit: 'contain',
transforms: [['resize', { width: 64, fit: 'contain' }]],
},
{
key: 'system-medium-cover',
width: 300,
height: 300,
fit: 'cover',
transforms: [['resize', { width: 300, height: 300, fit: 'cover' }]],
},
{
key: 'system-medium-contain',
width: 300,
fit: 'contain',
transforms: [['resize', { width: 300, fit: 'contain' }]],
},
{
key: 'system-large-cover',
width: 800,
height: 600,
fit: 'cover',
transforms: [['resize', { width: 800, height: 800, fit: 'cover' }]],
},
{
key: 'system-large-contain',
width: 800,
fit: 'contain',
transforms: [['resize', { width: 800, fit: 'contain' }]],
},
];
export const ASSET_TRANSFORM_QUERY_KEYS = ['key', 'width', 'height', 'fit', 'withoutEnlargement', 'quality'];
export const ASSET_TRANSFORM_QUERY_KEYS = [
'key',
'transforms',
'width',
'height',
'format',
'fit',
'quality',
'withoutEnlargement',
];
export const FILTER_VARIABLES = ['$NOW', '$CURRENT_USER', '$CURRENT_ROLE'];

View File

@@ -10,7 +10,7 @@ import { ForbiddenException, InvalidQueryException, RangeNotSatisfiableException
import useCollection from '../middleware/use-collection';
import { AssetsService, PayloadService } from '../services';
import storage from '../storage';
import { Transformation } from '../types/assets';
import { TransformationParams, TransformationMethods, TransformationPreset } from '../types/assets';
import asyncHandler from '../utils/async-handler';
const router = Router();
@@ -68,26 +68,63 @@ router.get(
if ('key' in transformation && Object.keys(transformation).length > 1) {
throw new InvalidQueryException(`You can't combine the "key" query parameter with any other transformation.`);
}
if ('quality' in transformation && (Number(transformation.quality) < 1 || Number(transformation.quality) > 100)) {
throw new InvalidQueryException(`"quality" Parameter has to between 1 to 100`);
if ('transforms' in transformation) {
let transforms: unknown;
// Try parse the JSON array
try {
transforms = JSON.parse(transformation['transforms'] as string);
} catch {
throw new InvalidQueryException(`"transforms" Parameter needs to be a JSON array of allowed transformations.`);
}
// Check if it is actually an array.
if (!Array.isArray(transforms)) {
throw new InvalidQueryException(`"transforms" Parameter needs to be a JSON array of allowed transformations.`);
}
// Check against ASSETS_TRANSFORM_MAX_OPERATIONS
if (transforms.length > Number(env.ASSETS_TRANSFORM_MAX_OPERATIONS)) {
throw new InvalidQueryException(
`"transforms" Parameter is only allowed ${env.ASSETS_TRANSFORM_MAX_OPERATIONS} transformations.`
);
}
// Check the transformations are valid
transforms.forEach((transform) => {
const name = transform[0];
if (!TransformationMethods.includes(name)) {
throw new InvalidQueryException(`"transforms" Parameter does not allow "${name}" as a transformation.`);
}
});
transformation.transforms = transforms;
}
const systemKeys = SYSTEM_ASSET_ALLOW_LIST.map((transformation) => transformation.key);
const systemKeys = SYSTEM_ASSET_ALLOW_LIST.map((transformation) => transformation.key!);
const allKeys: string[] = [
...systemKeys,
...(assetSettings.storage_asset_presets || []).map((transformation: Transformation) => transformation.key),
...(assetSettings.storage_asset_presets || []).map((transformation: TransformationParams) => transformation.key),
];
// For use in the next request handler
res.locals.shortcuts = [...SYSTEM_ASSET_ALLOW_LIST, ...(assetSettings.storage_asset_presets || [])];
res.locals.transformation = transformation;
if (Object.keys(transformation).length === 0) {
if (
Object.keys(transformation).length === 0 ||
('transforms' in transformation && transformation.transforms!.length === 0)
) {
return next();
}
if (assetSettings.storage_asset_transform === 'all') {
if (transformation.key && allKeys.includes(transformation.key as string) === false)
if (transformation.key && allKeys.includes(transformation.key as string) === false) {
throw new InvalidQueryException(`Key "${transformation.key}" isn't configured.`);
}
return next();
} else if (assetSettings.storage_asset_transform === 'presets') {
if (allKeys.includes(transformation.key as string)) return next();
@@ -107,9 +144,9 @@ router.get(
schema: req.schema,
});
const transformation: Transformation = res.locals.transformation.key
? res.locals.shortcuts.find(
(transformation: Transformation) => transformation.key === res.locals.transformation.key
const transformation: TransformationParams | TransformationPreset = res.locals.transformation.key
? (res.locals.shortcuts as TransformationPreset[]).find(
(transformation) => transformation.key === res.locals.transformation.key
)
: res.locals.transformation;

View File

@@ -11,7 +11,8 @@ import { respond } from '../middleware/respond';
import { AuthenticationService, UsersService } from '../services';
import asyncHandler from '../utils/async-handler';
import getEmailFromProfile from '../utils/get-email-from-profile';
import { toArray } from '../utils/to-array';
import { toArray } from '@directus/shared/utils';
import logger from '../logger';
const router = Router();
@@ -59,7 +60,7 @@ router.post(
}
if (mode === 'cookie') {
res.cookie('directus_refresh_token', refreshToken, {
res.cookie(env.REFRESH_TOKEN_COOKIE_NAME, refreshToken, {
httpOnly: true,
domain: env.REFRESH_TOKEN_COOKIE_DOMAIN,
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
@@ -88,7 +89,7 @@ router.post(
schema: req.schema,
});
const currentRefreshToken = req.body.refresh_token || req.cookies.directus_refresh_token;
const currentRefreshToken = req.body.refresh_token || req.cookies[env.REFRESH_TOKEN_COOKIE_NAME];
if (!currentRefreshToken) {
throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`);
@@ -107,7 +108,7 @@ router.post(
}
if (mode === 'cookie') {
res.cookie('directus_refresh_token', refreshToken, {
res.cookie(env.REFRESH_TOKEN_COOKIE_NAME, refreshToken, {
httpOnly: true,
domain: env.REFRESH_TOKEN_COOKIE_DOMAIN,
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
@@ -136,7 +137,7 @@ router.post(
schema: req.schema,
});
const currentRefreshToken = req.body.refresh_token || req.cookies.directus_refresh_token;
const currentRefreshToken = req.body.refresh_token || req.cookies[env.REFRESH_TOKEN_COOKIE_NAME];
if (!currentRefreshToken) {
throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`);
@@ -144,8 +145,8 @@ router.post(
await authenticationService.logout(currentRefreshToken);
if (req.cookies.directus_refresh_token) {
res.clearCookie('directus_refresh_token', {
if (req.cookies[env.REFRESH_TOKEN_COOKIE_NAME]) {
res.clearCookie(env.REFRESH_TOKEN_COOKIE_NAME, {
httpOnly: true,
domain: env.REFRESH_TOKEN_COOKIE_DOMAIN,
secure: env.REFRESH_TOKEN_COOKIE_SECURE ?? false,
@@ -161,7 +162,7 @@ router.post(
router.post(
'/password/request',
asyncHandler(async (req, res, next) => {
if (!req.body.email) {
if (typeof req.body.email !== 'string') {
throw new InvalidPayloadException(`"email" field is required.`);
}
@@ -180,6 +181,7 @@ router.post(
if (err instanceof InvalidPayloadException) {
throw err;
} else {
logger.warn(err, `[email] ${err}`);
return next();
}
}
@@ -190,11 +192,11 @@ router.post(
router.post(
'/password/reset',
asyncHandler(async (req, res, next) => {
if (!req.body.token) {
if (typeof req.body.token !== 'string') {
throw new InvalidPayloadException(`"token" field is required.`);
}
if (!req.body.password) {
if (typeof req.body.password !== 'string') {
throw new InvalidPayloadException(`"password" field is required.`);
}
@@ -320,6 +322,9 @@ router.get(
});
} catch (error) {
emitStatus('fail');
logger.warn(error);
if (redirect) {
let reason = 'UNKNOWN_EXCEPTION';
@@ -340,7 +345,7 @@ router.get(
emitStatus('success');
if (redirect) {
res.cookie('directus_refresh_token', refreshToken, {
res.cookie(env.REFRESH_TOKEN_COOKIE_NAME, refreshToken, {
httpOnly: true,
domain: env.REFRESH_TOKEN_COOKIE_DOMAIN,
maxAge: ms(env.REFRESH_TOKEN_TTL as string),

View File

@@ -3,18 +3,17 @@ import asyncHandler from '../utils/async-handler';
import { RouteNotFoundException } from '../exceptions';
import { listExtensions, getAppExtensionSource } from '../extensions';
import { respond } from '../middleware/respond';
import { depluralize } from '@directus/shared/utils';
import { AppExtensionType, Plural } from '@directus/shared/types';
import { APP_EXTENSION_TYPES } from '@directus/shared/constants';
import { depluralize, isAppExtension } from '@directus/shared/utils';
import { Plural } from '@directus/shared/types';
const router = Router();
router.get(
'/:type',
asyncHandler(async (req, res, next) => {
const type = depluralize(req.params.type as Plural<AppExtensionType>);
const type = depluralize(req.params.type as Plural<string>);
if (APP_EXTENSION_TYPES.includes(type) === false) {
if (!isAppExtension(type)) {
throw new RouteNotFoundException(req.path);
}
@@ -32,9 +31,9 @@ router.get(
router.get(
'/:type/index.js',
asyncHandler(async (req, res) => {
const type = depluralize(req.params.type as Plural<AppExtensionType>);
const type = depluralize(req.params.type as Plural<string>);
if (APP_EXTENSION_TYPES.includes(type) === false) {
if (!isAppExtension(type)) {
throw new RouteNotFoundException(req.path);
}

View File

@@ -6,7 +6,8 @@ import validateCollection from '../middleware/collection-exists';
import { respond } from '../middleware/respond';
import useCollection from '../middleware/use-collection';
import { FieldsService } from '../services/fields';
import { Field, types } from '../types';
import { Field, Type } from '@directus/shared/types';
import { TYPES } from '@directus/shared/constants';
import asyncHandler from '../utils/async-handler';
const router = Router();
@@ -65,7 +66,7 @@ const newFieldSchema = Joi.object({
collection: Joi.string().optional(),
field: Joi.string().required(),
type: Joi.string()
.valid(...types, ...ALIAS_TYPES)
.valid(...TYPES, ...ALIAS_TYPES)
.allow(null)
.optional(),
schema: Joi.object({
@@ -93,7 +94,7 @@ router.post(
throw new InvalidPayloadException(error.message);
}
const field: Partial<Field> & { field: string; type: typeof types[number] | null } = req.body;
const field: Partial<Field> & { field: string; type: Type | null } = req.body;
await service.createField(req.params.collection, field);
@@ -152,7 +153,7 @@ router.patch(
const updateSchema = Joi.object({
type: Joi.string()
.valid(...types, ...ALIAS_TYPES)
.valid(...TYPES, ...ALIAS_TYPES)
.allow(null),
schema: Joi.object({
default_value: Joi.any(),
@@ -183,7 +184,7 @@ router.patch(
throw new InvalidPayloadException(`You need to provide "type" when providing "schema".`);
}
const fieldData: Partial<Field> & { field: string; type: typeof types[number] } = req.body;
const fieldData: Partial<Field> & { field: string; type: Type } = req.body;
if (!fieldData.field) fieldData.field = req.params.field;

View File

@@ -11,7 +11,7 @@ import { validateBatch } from '../middleware/validate-batch';
import { FilesService, MetaService } from '../services';
import { File, PrimaryKey } from '../types';
import asyncHandler from '../utils/async-handler';
import { toArray } from '../utils/to-array';
import { toArray } from '@directus/shared/utils';
const router = express.Router();

View File

@@ -2,12 +2,13 @@ import argon2 from 'argon2';
import { Router } from 'express';
import Joi from 'joi';
import { nanoid } from 'nanoid';
import { InvalidPayloadException, InvalidQueryException } from '../exceptions';
import { ForbiddenException, InvalidPayloadException, InvalidQueryException } from '../exceptions';
import collectionExists from '../middleware/collection-exists';
import { respond } from '../middleware/respond';
import { RevisionsService, UtilsService, ImportService } from '../services';
import asyncHandler from '../utils/async-handler';
import Busboy from 'busboy';
import { getCache } from '../cache';
const router = Router();
@@ -115,4 +116,20 @@ router.post(
})
);
router.post(
'/cache/clear',
asyncHandler(async (req, res) => {
if (req.accountability?.admin !== true) {
throw new ForbiddenException();
}
const { cache, schemaCache } = getCache();
await cache?.clear();
await schemaCache?.clear();
res.status(200).end();
})
);
export default router;

View File

@@ -5,6 +5,9 @@ import env from '../env';
import logger from '../logger';
import { getConfigFromEnv } from '../utils/get-config-from-env';
import { validateEnv } from '../utils/validate-env';
import fse from 'fs-extra';
import path from 'path';
import { merge } from 'lodash';
let database: Knex | null = null;
let inspector: ReturnType<typeof SchemaInspector> | null = null;
@@ -65,6 +68,13 @@ export default function getDatabase(): Knex {
};
}
if (env.DB_CLIENT === 'mssql') {
// This brings MS SQL in line with the other DB vendors. We shouldn't do any automatic
// timezone conversion on the database level, especially not when other database vendors don't
// act the same
merge(knexConfig, { connection: { options: { useUTC: false } } });
}
database = knex(knexConfig);
const times: Record<string, number> = {};
@@ -94,8 +104,8 @@ export function getSchemaInspector(): ReturnType<typeof SchemaInspector> {
return inspector;
}
export async function hasDatabaseConnection(): Promise<boolean> {
const database = getDatabase();
export async function hasDatabaseConnection(database?: Knex): Promise<boolean> {
database = database ?? getDatabase();
try {
if (env.DB_CLIENT === 'oracledb') {
@@ -103,15 +113,22 @@ export async function hasDatabaseConnection(): Promise<boolean> {
} else {
await database.raw('SELECT 1');
}
return true;
} catch {
return false;
}
}
export async function validateDBConnection(): Promise<void> {
export async function validateDBConnection(database?: Knex): Promise<void> {
database = database ?? getDatabase();
try {
await hasDatabaseConnection();
if (env.DB_CLIENT === 'oracledb') {
await database.raw('select 1 from DUAL');
} else {
await database.raw('SELECT 1');
}
} catch (error) {
logger.error(`Can't connect to the database.`);
logger.error(error);
@@ -127,3 +144,35 @@ export async function isInstalled(): Promise<boolean> {
// exists when using the installer CLI.
return await inspector.hasTable('directus_collections');
}
export async function validateMigrations(): Promise<boolean> {
const database = getDatabase();
try {
let migrationFiles = await fse.readdir(path.join(__dirname, 'migrations'));
const customMigrationsPath = path.resolve(env.EXTENSIONS_PATH, 'migrations');
let customMigrationFiles =
((await fse.pathExists(customMigrationsPath)) && (await fse.readdir(customMigrationsPath))) || [];
migrationFiles = migrationFiles.filter(
(file: string) => file.startsWith('run') === false && file.endsWith('.d.ts') === false
);
customMigrationFiles = customMigrationFiles.filter((file: string) => file.endsWith('.js'));
migrationFiles.push(...customMigrationFiles);
const requiredVersions = migrationFiles.map((filePath) => filePath.split('-')[0]);
const completedVersions = (await database.select('version').from('directus_migrations')).map(
({ version }) => version
);
return requiredVersions.every((version) => completedVersions.includes(version));
} catch (error) {
logger.error(`Database migrations cannot be found`);
logger.error(error);
throw process.exit(1);
}
}

View File

@@ -1,7 +1,6 @@
import { Knex } from 'knex';
// @ts-ignore
import Client_Oracledb from 'knex/lib/dialects/oracledb';
import env from '../../env';
async function oracleAlterUrl(knex: Knex, type: string): Promise<void> {
await knex.raw('ALTER TABLE "directus_webhooks" ADD "url__temp" ?', [knex.raw(type)]);
@@ -23,7 +22,7 @@ export async function up(knex: Knex): Promise<void> {
}
export async function down(knex: Knex): Promise<void> {
if (env.DB_CLIENT === 'oracledb') {
if (knex.client instanceof Client_Oracledb) {
await oracleAlterUrl(knex, 'VARCHAR2(255)');
return;
}

View File

@@ -1,7 +1,6 @@
import { Knex } from 'knex';
// @ts-ignore
import Client_Oracledb from 'knex/lib/dialects/oracledb';
import env from '../../env';
async function oracleAlterCollections(knex: Knex, type: string): Promise<void> {
await knex.raw('ALTER TABLE "directus_webhooks" ADD "collections__temp" ?', [knex.raw(type)]);
@@ -23,7 +22,7 @@ export async function up(knex: Knex): Promise<void> {
}
export async function down(knex: Knex): Promise<void> {
if (env.DB_CLIENT === 'oracledb') {
if (knex.client instanceof Client_Oracledb) {
await oracleAlterCollections(knex, 'VARCHAR2(255)');
return;
}

View File

@@ -1,12 +1,22 @@
import { Knex } from 'knex';
// @ts-ignore
import Client_Oracledb from 'knex/lib/dialects/oracledb';
export async function up(knex: Knex): Promise<void> {
if (knex.client instanceof Client_Oracledb) {
return;
}
await knex.schema.alterTable('directus_files', (table) => {
table.bigInteger('filesize').nullable().defaultTo(null).alter();
});
}
export async function down(knex: Knex): Promise<void> {
if (knex.client instanceof Client_Oracledb) {
return;
}
await knex.schema.alterTable('directus_files', (table) => {
table.integer('filesize').nullable().defaultTo(null).alter();
});

View File

@@ -0,0 +1,13 @@
import { Knex } from 'knex';
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_fields', (table) => {
table.json('conditions');
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_files', (table) => {
table.dropColumn('conditions');
});
}

View File

@@ -0,0 +1,22 @@
import { Knex } from 'knex';
import { getDefaultIndexName } from '../../utils/get-default-index-name';
const indexName = getDefaultIndexName('foreign', 'directus_settings', 'storage_default_folder');
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_settings', (table) => {
table
.uuid('storage_default_folder')
.references('id')
.inTable('directus_folders')
.withKeyName(indexName)
.onDelete('SET NULL');
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_files', (table) => {
table.dropForeign(['storage_default_folder'], indexName);
table.dropColumn('storage_default_folder');
});
}

View File

@@ -0,0 +1,49 @@
import { Knex } from 'knex';
import logger from '../../logger';
export async function up(knex: Knex): Promise<void> {
const dividerGroups = await knex.select('*').from('directus_fields').where('interface', '=', 'group-divider');
for (const dividerGroup of dividerGroups) {
const newOptions: { showHeader: true; headerIcon?: string; headerColor?: string } = { showHeader: true };
if (dividerGroup.options) {
try {
const options =
typeof dividerGroup.options === 'string' ? JSON.parse(dividerGroup.options) : dividerGroup.options;
if (options.icon) newOptions.headerIcon = options.icon;
if (options.color) newOptions.headerColor = options.color;
} catch (err) {
logger.warn(`Couldn't convert previous options from field ${dividerGroup.collection}.${dividerGroup.field}`);
logger.warn(err);
}
}
try {
await knex('directus_fields')
.update({
interface: 'group-standard',
options: JSON.stringify(newOptions),
})
.where('id', '=', dividerGroup.id);
} catch (err) {
logger.warn(`Couldn't update ${dividerGroup.collection}.${dividerGroup.field} to new group interface`);
logger.warn(err);
}
}
await knex('directus_fields')
.update({
interface: 'group-standard',
})
.where({ interface: 'group-raw' });
}
export async function down(knex: Knex): Promise<void> {
await knex('directus_fields')
.update({
interface: 'group-raw',
})
.where('interface', '=', 'group-standard');
}

View File

@@ -0,0 +1,13 @@
import { Knex } from 'knex';
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_fields', (table) => {
table.boolean('required').defaultTo(false);
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_fields', (table) => {
table.dropColumn('required');
});
}

View File

@@ -0,0 +1,35 @@
import { Knex } from 'knex';
export async function up(knex: Knex): Promise<void> {
const groups = await knex.select('*').from('directus_fields').where({ interface: 'group-standard' });
const raw = [];
const detail = [];
for (const group of groups) {
const options = typeof group.options === 'string' ? JSON.parse(group.options) : group.options || {};
if (options.showHeader === true) {
detail.push(group);
} else {
raw.push(group);
}
}
for (const field of raw) {
await knex('directus_fields').update({ interface: 'group-raw' }).where({ id: field.id });
}
for (const field of detail) {
await knex('directus_fields').update({ interface: 'group-detail' }).where({ id: field.id });
}
}
export async function down(knex: Knex): Promise<void> {
await knex('directus_fields')
.update({
interface: 'group-standard',
})
.where({ interface: 'group-detail' })
.orWhere({ interface: 'group-raw' });
}

View File

@@ -0,0 +1,94 @@
import { Knex } from 'knex';
// Change image metadata structure to match the output from 'exifr'
export async function up(knex: Knex): Promise<void> {
const files = await knex
.select<{ id: number; metadata: string }[]>('id', 'metadata')
.from('directus_files')
.whereNotNull('metadata');
for (const { id, metadata } of files) {
let prevMetadata;
try {
prevMetadata = JSON.parse(metadata);
} catch {
continue;
}
// Update only required if metadata has 'exif' data
if (prevMetadata.exif) {
// Get all data from 'exif' and rename the following keys:
// - 'image' to 'ifd0'
// - 'thumbnail to 'ifd1'
// - 'interoperability' to 'interop'
const newMetadata = prevMetadata.exif;
if (newMetadata.image) {
newMetadata.ifd0 = newMetadata.image;
delete newMetadata.image;
}
if (newMetadata.thumbnail) {
newMetadata.ifd1 = newMetadata.thumbnail;
delete newMetadata.thumbnail;
}
if (newMetadata.interoperability) {
newMetadata.interop = newMetadata.interoperability;
delete newMetadata.interoperability;
}
if (prevMetadata.icc) {
newMetadata.icc = prevMetadata.icc;
}
if (prevMetadata.iptc) {
newMetadata.iptc = prevMetadata.iptc;
}
await knex('directus_files')
.update({ metadata: JSON.stringify(newMetadata) })
.where({ id });
}
}
}
export async function down(knex: Knex): Promise<void> {
const files = await knex
.select<{ id: number; metadata: string }[]>('id', 'metadata')
.from('directus_files')
.whereNotNull('metadata')
.whereNot('metadata', '{}');
for (const { id, metadata } of files) {
const prevMetadata = JSON.parse(metadata);
// Update only required if metadata has keys other than 'icc' and 'iptc'
if (Object.keys(prevMetadata).filter((key) => key !== 'icc' && key !== 'iptc').length > 0) {
// Put all data under 'exif' and rename/move keys afterwards
const newMetadata: { exif: Record<string, unknown>; icc?: unknown; iptc?: unknown } = { exif: prevMetadata };
if (newMetadata.exif.ifd0) {
newMetadata.exif.image = newMetadata.exif.ifd0;
delete newMetadata.exif.ifd0;
}
if (newMetadata.exif.ifd1) {
newMetadata.exif.thumbnail = newMetadata.exif.ifd1;
delete newMetadata.exif.ifd1;
}
if (newMetadata.exif.interop) {
newMetadata.exif.interoperability = newMetadata.exif.interop;
delete newMetadata.exif.interop;
}
if (newMetadata.exif.icc) {
newMetadata.icc = newMetadata.exif.icc;
delete newMetadata.exif.icc;
}
if (newMetadata.exif.iptc) {
newMetadata.iptc = newMetadata.exif.iptc;
delete newMetadata.exif.iptc;
}
await knex('directus_files')
.update({ metadata: JSON.stringify(newMetadata) })
.where({ id });
}
}
}

View File

@@ -5,12 +5,7 @@ import fse from 'fs-extra';
import { Knex } from 'knex';
import path from 'path';
import env from '../../env';
type Migration = {
version: string;
name: string;
timestamp: Date;
};
import { Migration } from '../../types';
export default async function run(database: Knex, direction: 'up' | 'down' | 'latest'): Promise<void> {
let migrationFiles = await fse.readdir(__dirname);

View File

@@ -7,7 +7,7 @@ import { applyFunctionToColumnName } from '../utils/apply-function-to-column-nam
import applyQuery from '../utils/apply-query';
import { getColumn } from '../utils/get-column';
import { stripFunction } from '../utils/strip-function';
import { toArray } from '../utils/to-array';
import { toArray } from '@directus/shared/utils';
import getDatabase from './index';
type RunASTOptions = {

View File

@@ -3,13 +3,13 @@ import yaml from 'js-yaml';
import { Knex } from 'knex';
import { isObject } from 'lodash';
import path from 'path';
import { types } from '../../types';
import { Type } from '@directus/shared/types';
type TableSeed = {
table: string;
columns: {
[column: string]: {
type?: typeof types[number];
type?: Type;
primary?: boolean;
nullable?: boolean;
default?: any;
@@ -45,6 +45,8 @@ export default async function runSeed(database: Knex): Promise<void> {
for (const [columnName, columnInfo] of Object.entries(seedData.columns)) {
let column: Knex.ColumnBuilder;
if (columnInfo.type === 'alias' || columnInfo.type === 'unknown') return;
if (columnInfo.type === 'string') {
column = tableBuilder.string(columnName, columnInfo.length);
} else if (columnInfo.increments) {

View File

@@ -8,6 +8,7 @@ defaults:
note: null
translations: null
display_template: null
accountability: 'all'
data:
- collection: directus_activity

View File

@@ -73,3 +73,8 @@ fields:
- collection: directus_fields
field: note
width: half
- collection: directus_fields
field: conditions
hidden: true
special: json

View File

@@ -1,7 +1,7 @@
import fse from 'fs-extra';
import { merge } from 'lodash';
import path from 'path';
import { FieldMeta } from '../../../types';
import { FieldMeta } from '@directus/shared/types';
import { requireYAML } from '../../../utils/require-yaml';
const defaults = requireYAML(require.resolve('./_defaults.yaml'));

View File

@@ -124,7 +124,7 @@ fields:
options:
slug: true
onlyOnCreate: false
width: half
width: full
- field: fit
name: Fit
type: string
@@ -173,6 +173,7 @@ fields:
step: 1
width: half
- field: withoutEnlargement
name: Upscaling
type: boolean
schema:
default_value: false
@@ -181,6 +182,51 @@ fields:
width: half
options:
label: Don't upscale images
- field: format
name: Format
type: string
schema:
is_nullable: false
default_value: ''
meta:
interface: select-dropdown
options:
allowNone: true
choices:
- value: jpeg
text: JPEG
- value: png
text: PNG
- value: webp
text: WebP
- value: tiff
text: Tiff
width: half
- field: transforms
name: Additional Transformations
type: json
schema:
is_nullable: false
default_value: []
meta:
note:
The Sharp method name and its arguments. See https://sharp.pixelplumbing.com/api-constructor for more
information.
interface: json
options:
template: >
[
["blur", 45],
["grayscale"],
["extend", { "right": 500, "background": "rgb(255, 0, 0)" }]
]
placeholder: >
[
["blur", 45],
["grayscale"],
["extend", { "right": 500, "background": "rgb(255, 0, 0)" }]
]
width: full
template: '{{key}}'
special: json
width: full
@@ -197,6 +243,11 @@ fields:
text: Presets Only
width: half
- field: storage_default_folder
interface: system-folder
width: half
note: Default folder where new files are uploaded
- field: overrides_divider
interface: presentation-divider
options:

View File

@@ -8,7 +8,7 @@ import fs from 'fs';
import { clone, toNumber, toString } from 'lodash';
import path from 'path';
import { requireYAML } from './utils/require-yaml';
import { toArray } from './utils/to-array';
import { toArray } from '@directus/shared/utils';
const acceptedEnvTypes = ['string', 'number', 'regex', 'array'];
@@ -16,7 +16,7 @@ const defaults: Record<string, any> = {
CONFIG_PATH: path.resolve(process.cwd(), '.env'),
PORT: 8055,
PUBLIC_URL: 'http://localhost:8055',
PUBLIC_URL: '/',
MAX_PAYLOAD_SIZE: '100kb',
STORAGE_LOCATIONS: 'local',
@@ -34,6 +34,7 @@ const defaults: Record<string, any> = {
REFRESH_TOKEN_TTL: '7d',
REFRESH_TOKEN_COOKIE_SECURE: false,
REFRESH_TOKEN_COOKIE_SAME_SITE: 'lax',
REFRESH_TOKEN_COOKIE_NAME: 'directus_refresh_token',
ROOT_REDIRECT: './admin',
@@ -64,9 +65,12 @@ const defaults: Record<string, any> = {
TELEMETRY: true,
ASSETS_CACHE_TTL: '30m',
ASSETS_CACHE_TTL: '30d',
ASSETS_TRANSFORM_MAX_CONCURRENT: 1,
ASSETS_TRANSFORM_IMAGE_MAX_DIMENSION: 6000,
ASSETS_TRANSFORM_MAX_OPERATIONS: 5,
SERVE_APP: true,
};
// Allows us to force certain environment variable into a type, instead of relying
@@ -170,6 +174,8 @@ function getEnvironmentValueByType(envVariableString: string) {
return new RegExp(envVariableValue);
case 'string':
return envVariableValue;
case 'json':
return tryJSON(envVariableValue);
}
}
@@ -181,14 +187,14 @@ function processValues(env: Record<string, any>) {
// and store it in the variable with the same name but without '_FILE' at the end
let newKey;
if (key.length > 5 && key.endsWith('_FILE')) {
newKey = key.slice(0, -5);
if (newKey in env) {
throw new Error(
`Duplicate environment variable encountered: you can't use "${newKey}" and "${key}" simultaneously.`
);
}
try {
value = fs.readFileSync(value, { encoding: 'utf8' });
newKey = key.slice(0, -5);
if (newKey in env) {
throw new Error(
`Duplicate environment variable encountered: you can't use "${key}" and "${newKey}" simultaneously.`
);
}
key = newKey;
} catch {
throw new Error(`Failed to read value from file "${value}", defined in environment variable "${key}".`);
@@ -214,6 +220,9 @@ function processValues(env: Record<string, any>) {
case 'array':
env[key] = toArray(value);
break;
case 'json':
env[key] = tryJSON(value);
break;
}
continue;
}
@@ -247,6 +256,14 @@ function processValues(env: Record<string, any>) {
continue;
}
if (String(value).includes(',')) {
env[key] = toArray(value);
}
// Try converting the value to a JS object. This allows JSON objects to be passed for nested
// config flags, or custom param names (that aren't camelCased)
env[key] = tryJSON(value);
// If '_FILE' variable hasn't been processed yet, store it as it is (string)
if (newKey) {
env[key] = value;
@@ -255,3 +272,11 @@ function processValues(env: Record<string, any>) {
return env;
}
function tryJSON(value: any) {
try {
return JSON.parse(value);
} catch {
return value;
}
}

View File

@@ -1,13 +0,0 @@
export class BaseException extends Error {
status: number;
code: string;
extensions: Record<string, any>;
constructor(message: string, status: number, code: string, extensions?: Record<string, any>) {
super(message);
this.status = status;
this.code = code;
this.extensions = extensions || {};
}
}

View File

@@ -1,4 +1,4 @@
import { BaseException } from '../base';
import { BaseException } from '@directus/shared/exceptions';
type Exceptions = {
collection: string;

View File

@@ -46,7 +46,7 @@ async function uniqueViolation(error: MSSQLError) {
* information_schema when this happens
*/
const betweenQuotes = /'([^']+)'/;
const betweenQuotes = /'([^']+)'/g;
const betweenParens = /\(([^)]+)\)/g;
const quoteMatches = error.message.match(betweenQuotes);
@@ -54,21 +54,35 @@ async function uniqueViolation(error: MSSQLError) {
if (!quoteMatches || !parenMatches) return error;
const keyName = quoteMatches[1];
const keyName = quoteMatches[1]?.slice(1, -1);
const database = getDatabase();
let collection = quoteMatches[0]?.slice(1, -1);
let field: string | null = null;
const constraintUsage = await database
.select('*')
.from('INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE')
.where({
CONSTRAINT_NAME: keyName,
})
.first();
if (keyName) {
const database = getDatabase();
const collection = constraintUsage.TABLE_NAME;
const field = constraintUsage.COLUMN_NAME;
const invalid = parenMatches[parenMatches.length - 1].slice(1, -1);
const constraintUsage = await database
.select('sys.columns.name as field', database.raw('OBJECT_NAME(??) as collection', ['sys.columns.object_id']))
.from('sys.indexes')
.innerJoin('sys.index_columns', (join) => {
join
.on('sys.indexes.object_id', '=', 'sys.index_columns.object_id')
.andOn('sys.indexes.index_id', '=', 'sys.index_columns.index_id');
})
.innerJoin('sys.columns', (join) => {
join
.on('sys.index_columns.object_id', '=', 'sys.columns.object_id')
.andOn('sys.index_columns.column_id', '=', 'sys.columns.column_id');
})
.where('sys.indexes.name', '=', keyName)
.first();
collection = constraintUsage?.collection;
field = constraintUsage?.field;
}
const invalid = parenMatches[parenMatches.length - 1]?.slice(1, -1);
return new RecordNotUniqueException(field, {
collection,

View File

@@ -1,4 +1,4 @@
import { BaseException } from '../base';
import { BaseException } from '@directus/shared/exceptions';
type Extensions = {
collection: string;

View File

@@ -1,4 +1,4 @@
import { BaseException } from '../base';
import { BaseException } from '@directus/shared/exceptions';
type Exceptions = {
collection: string;

View File

@@ -1,4 +1,4 @@
import { BaseException } from '../base';
import { BaseException } from '@directus/shared/exceptions';
type Extensions = {
collection: string;

View File

@@ -1,4 +1,4 @@
import { BaseException } from '../base';
import { BaseException } from '@directus/shared/exceptions';
type Exceptions = {
collection: string;

View File

@@ -1,4 +1,4 @@
import { BaseException } from '../base';
import { BaseException } from '@directus/shared/exceptions';
type Extensions = {
collection: string;

View File

@@ -1,107 +0,0 @@
import { ValidationErrorItem } from 'joi';
import { FilterOperator, ValidationOperator } from '../types';
import { BaseException } from './base';
type FailedValidationExtensions = {
field: string;
type: FilterOperator | ValidationOperator;
valid?: number | string | (number | string)[];
invalid?: number | string | (number | string)[];
substring?: string;
};
export class FailedValidationException extends BaseException {
constructor(error: ValidationErrorItem) {
const extensions: Partial<FailedValidationExtensions> = {
field: error.path[0] as string,
};
const joiType = error.type;
// eq | in | null | empty
if (joiType.endsWith('only')) {
if (error.context?.valids.length > 1) {
extensions.type = 'in';
extensions.valid = error.context?.valids;
} else {
const valid = error.context?.valids[0];
if (valid === null) {
extensions.type = 'null';
} else if (valid === '') {
extensions.type = 'empty';
} else {
extensions.type = 'eq';
extensions.valid = error.context?.valids[0];
}
}
}
// neq | nin | nnull | nempty
if (joiType.endsWith('invalid')) {
if (error.context?.invalids.length > 1) {
extensions.type = 'nin';
extensions.invalid = error.context?.invalids;
} else {
const invalid = error.context?.invalids[0];
if (invalid === null) {
extensions.type = 'nnull';
} else if (invalid === '') {
extensions.type = 'nempty';
} else {
extensions.type = 'neq';
extensions.invalid = invalid;
}
}
}
// gt
if (joiType.endsWith('greater')) {
extensions.type = 'gt';
extensions.valid = error.context?.limit;
}
// gte
if (joiType.endsWith('min')) {
extensions.type = 'gte';
extensions.valid = error.context?.limit;
}
// lt
if (joiType.endsWith('less')) {
extensions.type = 'lt';
extensions.valid = error.context?.limit;
}
// lte
if (joiType.endsWith('max')) {
extensions.type = 'lte';
extensions.valid = error.context?.limit;
}
// contains
if (joiType.endsWith('contains')) {
extensions.type = 'contains';
extensions.substring = error.context?.substring;
}
// ncontains
if (joiType.endsWith('ncontains')) {
extensions.type = 'ncontains';
extensions.substring = error.context?.substring;
}
// required
if (joiType.endsWith('required')) {
extensions.type = 'required';
}
if (joiType.endsWith('.pattern.base')) {
extensions.type = 'regex';
extensions.invalid = error.context?.value;
}
super(error.message, 400, 'FAILED_VALIDATION', extensions);
}
}

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class ForbiddenException extends BaseException {
constructor() {

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class GraphQLValidationException extends BaseException {
constructor(extensions: Record<string, any>) {

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
type Extensions = {
limit: number;

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class IllegalAssetTransformation extends BaseException {
constructor(message: string) {

View File

@@ -1,5 +1,3 @@
export * from './base';
export * from './failed-validation';
export * from './forbidden';
export * from './graphql-validation';
export * from './hit-rate-limit';

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class InvalidCredentialsException extends BaseException {
constructor(message = 'Invalid user credentials.') {

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class InvalidIPException extends BaseException {
constructor(message = 'Invalid IP address.') {

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class InvalidOTPException extends BaseException {
constructor(message = 'Invalid user OTP.') {

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class InvalidPayloadException extends BaseException {
constructor(message: string, extensions?: Record<string, unknown>) {

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class InvalidQueryException extends BaseException {
constructor(message: string) {

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
type Extensions = {
allow: string[];

View File

@@ -1,5 +1,5 @@
import { Range } from '@directus/drive';
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class RangeNotSatisfiableException extends BaseException {
constructor(range: Range) {

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class RouteNotFoundException extends BaseException {
constructor(path: string) {

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
type Extensions = {
service: string;

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class UnprocessableEntityException extends BaseException {
constructor(message: string) {

View File

@@ -1,4 +1,4 @@
import { BaseException } from './base';
import { BaseException } from '@directus/shared/exceptions';
export class UserSuspendedException extends BaseException {
constructor(message = 'User suspended.') {

View File

@@ -7,8 +7,8 @@ import {
getLocalExtensions,
getPackageExtensions,
resolvePackage,
} from '@directus/shared/utils';
import { APP_EXTENSION_TYPES, SHARED_DEPS } from '@directus/shared/constants';
} from '@directus/shared/utils/node';
import { APP_EXTENSION_TYPES, APP_SHARED_DEPS } from '@directus/shared/constants';
import getDatabase from './database';
import emitter from './emitter';
import env from './env';
@@ -31,10 +31,15 @@ let extensions: Extension[] = [];
let extensionBundles: Partial<Record<AppExtensionType, string>> = {};
export async function initializeExtensions(): Promise<void> {
await ensureExtensionDirs(env.EXTENSIONS_PATH);
extensions = await getExtensions();
try {
await ensureExtensionDirs(env.EXTENSIONS_PATH);
extensions = await getExtensions();
} catch (err) {
logger.warn(`Couldn't load extensions`);
logger.warn(err);
}
if (!('DIRECTUS_DEV' in process.env)) {
if (env.SERVE_APP ?? env.NODE_ENV !== 'development') {
extensionBundles = await generateExtensionBundles();
}
@@ -74,7 +79,7 @@ async function getExtensions(): Promise<Extension[]> {
}
async function generateExtensionBundles() {
const sharedDepsMapping = await getSharedDepsMapping(SHARED_DEPS);
const sharedDepsMapping = await getSharedDepsMapping(APP_SHARED_DEPS);
const internalImports = Object.entries(sharedDepsMapping).map(([name, path]) => ({
find: name,
replacement: path,

View File

@@ -3,7 +3,7 @@
*/
import env from './env';
import { toArray } from './utils/to-array';
import { toArray } from '@directus/shared/utils';
import { getConfigFromEnv } from './utils/get-config-from-env';
const enabledProviders = toArray(env.OAUTH_PROVIDERS).map((provider) => provider.toLowerCase());

View File

@@ -7,7 +7,7 @@ import env from './env';
const pinoOptions: LoggerOptions = {
level: env.LOG_LEVEL || 'info',
redact: {
paths: ['req.headers.authorization', 'req.cookies.directus_refresh_token'],
paths: ['req.headers.authorization', `req.cookies.${env.REFRESH_TOKEN_COOKIE_NAME}`],
censor: '--redact--',
},
};
@@ -19,16 +19,20 @@ if (env.LOG_STYLE !== 'raw') {
const logger = pino(pinoOptions);
export const expressLogger = pinoHTTP({
logger,
serializers: {
req(request: Request) {
const output = stdSerializers.req(request);
output.url = redactQuery(output.url);
return output;
},
export const expressLogger = pinoHTTP(
{
logger,
},
}) as RequestHandler;
{
serializers: {
req(request: Request) {
const output = stdSerializers.req(request);
output.url = redactQuery(output.url);
return output;
},
},
}
) as RequestHandler;
export default logger;

View File

@@ -1,6 +1,7 @@
import nodemailer, { Transporter } from 'nodemailer';
import env from './env';
import logger from './logger';
import { getConfigFromEnv } from './utils/get-config-from-env';
let transporter: Transporter;
@@ -23,13 +24,16 @@ export default function getMailer(): Transporter {
};
}
const tls: Record<string, unknown> = getConfigFromEnv('EMAIL_SMTP_TLS_');
transporter = nodemailer.createTransport({
pool: env.EMAIL_SMTP_POOL,
host: env.EMAIL_SMTP_HOST,
port: env.EMAIL_SMTP_PORT,
secure: env.EMAIL_SMTP_SECURE,
ignoreTLS: env.EMAIL_SMTP_IGNORE_TLS,
auth: auth,
auth,
tls,
} as Record<string, unknown>);
} else if (env.EMAIL_TRANSPORT.toLowerCase() === 'mailgun') {
const mg = require('nodemailer-mailgun-transport');
@@ -39,6 +43,7 @@ export default function getMailer(): Transporter {
api_key: env.EMAIL_MAILGUN_API_KEY,
domain: env.EMAIL_MAILGUN_DOMAIN,
},
host: env.EMAIL_MAILGUN_HOST || 'https://api.mailgun.net',
}) as any
);
} else {

View File

@@ -4,6 +4,7 @@ import env from '../env';
import asyncHandler from '../utils/async-handler';
import { getCacheControlHeader } from '../utils/get-cache-headers';
import { getCacheKey } from '../utils/get-cache-key';
import logger from '../logger';
const checkCacheMiddleware: RequestHandler = asyncHandler(async (req, res, next) => {
const { cache } = getCache();
@@ -17,10 +18,26 @@ const checkCacheMiddleware: RequestHandler = asyncHandler(async (req, res, next)
}
const key = getCacheKey(req);
const cachedData = await cache.get(key);
let cachedData;
try {
cachedData = await cache.get(key);
} catch (err) {
logger.warn(err, `[cache] Couldn't read key ${key}. ${err.message}`);
return next();
}
if (cachedData) {
const cacheExpiryDate = (await cache.get(`${key}__expires_at`)) as number | null;
let cacheExpiryDate;
try {
cacheExpiryDate = (await cache.get(`${key}__expires_at`)) as number | null;
} catch (err) {
logger.warn(err, `[cache] Couldn't read key ${`${key}__expires_at`}. ${err.message}`);
return next();
}
const cacheTTL = cacheExpiryDate ? cacheExpiryDate - Date.now() : null;
res.setHeader('Cache-Control', getCacheControlHeader(req, cacheTTL));

View File

@@ -1,9 +1,10 @@
import { ErrorRequestHandler } from 'express';
import { emitAsyncSafe } from '../emitter';
import env from '../env';
import { BaseException, MethodNotAllowedException } from '../exceptions';
import { MethodNotAllowedException } from '../exceptions';
import { BaseException } from '@directus/shared/exceptions';
import logger from '../logger';
import { toArray } from '../utils/to-array';
import { toArray } from '@directus/shared/utils';
// Note: keep all 4 parameters here. That's how Express recognizes it's the error handler, even if
// we don't use next

View File

@@ -8,6 +8,7 @@ import asyncHandler from '../utils/async-handler';
import { getCacheKey } from '../utils/get-cache-key';
import { parse as toXML } from 'js2xmlparser';
import { getCacheControlHeader } from '../utils/get-cache-headers';
import logger from '../logger';
export const respond: RequestHandler = asyncHandler(async (req, res) => {
const { cache } = getCache();
@@ -20,8 +21,14 @@ export const respond: RequestHandler = asyncHandler(async (req, res) => {
res.locals.cache !== false
) {
const key = getCacheKey(req);
await cache.set(key, res.locals.payload, ms(env.CACHE_TTL as string));
await cache.set(`${key}__expires_at`, Date.now() + ms(env.CACHE_TTL as string));
try {
await cache.set(key, res.locals.payload, ms(env.CACHE_TTL as string));
await cache.set(`${key}__expires_at`, Date.now() + ms(env.CACHE_TTL as string));
} catch (err) {
logger.warn(err, `[cache] Couldn't set key ${key}. ${err}`);
}
res.setHeader('Cache-Control', getCacheControlHeader(req, ms(env.CACHE_TTL as string)));
res.setHeader('Vary', 'Origin, Cache-Control');
} else {

View File

@@ -1,6 +1,7 @@
import { RequestHandler } from 'express';
import Joi from 'joi';
import { FailedValidationException, InvalidPayloadException } from '../exceptions';
import { InvalidPayloadException } from '../exceptions';
import { FailedValidationException } from '@directus/shared/exceptions';
import asyncHandler from '../utils/async-handler';
import { sanitizeQuery } from '../utils/sanitize-query';

View File

@@ -8,6 +8,7 @@ import url from 'url';
import createApp from './app';
import getDatabase from './database';
import { emitAsyncSafe } from './emitter';
import env from './env';
import logger from './logger';
export default async function createServer(): Promise<http.Server> {
@@ -86,9 +87,7 @@ export default async function createServer(): Promise<http.Server> {
async function beforeShutdown() {
emitAsyncSafe('server.stop.before', { server });
if ('DIRECTUS_DEV' in process.env) {
logger.info('Restarting...');
} else {
if (env.NODE_ENV !== 'development') {
logger.info('Shutting down...');
}
}
@@ -102,7 +101,7 @@ export default async function createServer(): Promise<http.Server> {
async function onShutdown() {
emitAsyncSafe('server.stop');
if (!('DIRECTUS_DEV' in process.env)) {
if (env.NODE_ENV !== 'development') {
logger.info('Directus shut down OK. Bye bye!');
}
}

View File

@@ -1,15 +1,18 @@
import { Range, StatResponse } from '@directus/drive';
import { Knex } from 'knex';
import path from 'path';
import sharp, { ResizeOptions } from 'sharp';
import getDatabase from '../database';
import { RangeNotSatisfiableException, IllegalAssetTransformation } from '../exceptions';
import storage from '../storage';
import { AbstractServiceOptions, Accountability, Transformation } from '../types';
import { AuthorizationService } from './authorization';
import { Semaphore } from 'async-mutex';
import { Knex } from 'knex';
import { contentType } from 'mime-types';
import ObjectHash from 'object-hash';
import path from 'path';
import sharp from 'sharp';
import getDatabase from '../database';
import env from '../env';
import { File } from '../types';
import { IllegalAssetTransformation, RangeNotSatisfiableException } from '../exceptions';
import storage from '../storage';
import { AbstractServiceOptions, File, Transformation, TransformationParams, TransformationPreset } from '../types';
import { Accountability } from '@directus/shared/types';
import { AuthorizationService } from './authorization';
import * as TransformationUtils from '../utils/transformations';
sharp.concurrency(1);
@@ -30,7 +33,7 @@ export class AssetsService {
async getAsset(
id: string,
transformation: Transformation,
transformation: TransformationParams | TransformationPreset,
range?: Range
): Promise<{ stream: NodeJS.ReadableStream; file: any; stat: StatResponse }> {
const publicSettings = await this.knex
@@ -53,18 +56,23 @@ export class AssetsService {
}
const type = file.type;
const transforms = TransformationUtils.resolvePreset(transformation, file);
// We can only transform JPEG, PNG, and WebP
if (type && Object.keys(transformation).length > 0 && ['image/jpeg', 'image/png', 'image/webp'].includes(type)) {
const resizeOptions = this.parseTransformation(transformation);
if (type && transforms.length > 0 && ['image/jpeg', 'image/png', 'image/webp', 'image/tiff'].includes(type)) {
const maybeNewFormat = TransformationUtils.maybeExtractFormat(transforms);
const assetFilename =
path.basename(file.filename_disk, path.extname(file.filename_disk)) +
this.getAssetSuffix(transformation) +
path.extname(file.filename_disk);
getAssetSuffix(transforms) +
(maybeNewFormat ? `.${maybeNewFormat}` : path.extname(file.filename_disk));
const { exists } = await storage.disk(file.storage).exists(assetFilename);
if (maybeNewFormat) {
file.type = contentType(assetFilename) || null;
}
if (exists) {
return {
stream: storage.disk(file.storage).getStream(assetFilename, range),
@@ -94,15 +102,9 @@ export class AssetsService {
const transformer = sharp({
limitInputPixels: Math.pow(env.ASSETS_TRANSFORM_IMAGE_MAX_DIMENSION, 2),
sequentialRead: true,
})
.rotate()
.resize(resizeOptions);
}).rotate();
if (transformation.quality) {
transformer.toFormat(type.substring(6) as 'jpeg' | 'png' | 'webp', {
quality: Number(transformation.quality),
});
}
transforms.forEach(([method, ...args]) => (transformer[method] as any).apply(transformer, args));
await storage.disk(file.storage).put(assetFilename, readStream.pipe(transformer), type);
@@ -118,28 +120,9 @@ export class AssetsService {
return { stream: readStream, file, stat };
}
}
private parseTransformation(transformation: Transformation): ResizeOptions {
const resizeOptions: ResizeOptions = {};
if (transformation.width) resizeOptions.width = Number(transformation.width);
if (transformation.height) resizeOptions.height = Number(transformation.height);
if (transformation.fit) resizeOptions.fit = transformation.fit;
if (transformation.withoutEnlargement)
resizeOptions.withoutEnlargement = Boolean(transformation.withoutEnlargement);
return resizeOptions;
}
private getAssetSuffix(transformation: Transformation) {
if (Object.keys(transformation).length === 0) return '';
return (
'__' +
Object.entries(transformation)
.sort((a, b) => (a[0] > b[0] ? 1 : -1))
.map((e) => e.join('_'))
.join(',')
);
}
}
const getAssetSuffix = (transforms: Transformation[]) => {
if (Object.keys(transforms).length === 0) return '';
return `__${ObjectHash.sha1(transforms)}`;
};

View File

@@ -15,9 +15,12 @@ import {
} from '../exceptions';
import { createRateLimiter } from '../rate-limiter';
import { ActivityService } from '../services/activity';
import { AbstractServiceOptions, Accountability, Action, SchemaOverview, Session } from '../types';
import { AbstractServiceOptions, Action, SchemaOverview, Session } from '../types';
import { Accountability } from '@directus/shared/types';
import { SettingsService } from './settings';
import { merge } from 'lodash';
import { performance } from 'perf_hooks';
import { stall } from '../utils/stall';
type AuthenticateOptions = {
email: string;
@@ -52,6 +55,9 @@ export class AuthenticationService {
async authenticate(
options: AuthenticateOptions
): Promise<{ accessToken: any; refreshToken: any; expires: any; id?: any }> {
const STALL_TIME = 100;
const timeStart = performance.now();
const settingsService = new SettingsService({
knex: this.knex,
schema: this.schema,
@@ -97,8 +103,10 @@ export class AuthenticationService {
emitStatus('fail');
if (user?.status === 'suspended') {
await stall(STALL_TIME, timeStart);
throw new UserSuspendedException();
} else {
await stall(STALL_TIME, timeStart);
throw new InvalidCredentialsException();
}
}
@@ -125,17 +133,20 @@ export class AuthenticationService {
if (password !== undefined) {
if (!user.password) {
emitStatus('fail');
await stall(STALL_TIME, timeStart);
throw new InvalidCredentialsException();
}
if ((await argon2.verify(user.password, password)) === false) {
emitStatus('fail');
await stall(STALL_TIME, timeStart);
throw new InvalidCredentialsException();
}
}
if (user.tfa_secret && !otp) {
emitStatus('fail');
await stall(STALL_TIME, timeStart);
throw new InvalidOTPException(`"otp" is required`);
}
@@ -144,6 +155,7 @@ export class AuthenticationService {
if (otpValid === false) {
emitStatus('fail');
await stall(STALL_TIME, timeStart);
throw new InvalidOTPException(`"otp" is invalid`);
}
}
@@ -193,6 +205,8 @@ export class AuthenticationService {
await loginAttemptsLimiter.set(user.id, 0, 0);
}
await stall(STALL_TIME, timeStart);
return {
accessToken,
refreshToken,

View File

@@ -1,13 +1,14 @@
import { Knex } from 'knex';
import { cloneDeep, flatten, merge, uniq, uniqWith } from 'lodash';
import { cloneDeep, merge, uniq, uniqWith, flatten, isNil } from 'lodash';
import getDatabase from '../database';
import { FailedValidationException, ForbiddenException } from '../exceptions';
import { ForbiddenException } from '../exceptions';
import { FailedValidationException } from '@directus/shared/exceptions';
import { validatePayload, parseFilter } from '@directus/shared/utils';
import { Accountability } from '@directus/shared/types';
import {
AbstractServiceOptions,
Accountability,
AST,
FieldNode,
Filter,
Item,
NestedCollectionNode,
Permission,
@@ -15,9 +16,8 @@ import {
PrimaryKey,
Query,
SchemaOverview,
Aggregate,
} from '../types';
import generateJoi from '../utils/generate-joi';
import { parseFilter } from '../utils/parse-filter';
import { ItemsService } from './items';
import { PayloadService } from './payload';
@@ -71,7 +71,13 @@ export class AuthorizationService {
if (ast.type === 'm2a') {
collections.push(...ast.names.map((name) => ({ collection: name, field: ast.fieldKey })));
/** @TODO add nestedNode */
for (const children of Object.values(ast.children)) {
for (const nestedNode of children) {
if (nestedNode.type !== 'field') {
collections.push(...getCollectionsFromAST(nestedNode));
}
}
}
} else {
collections.push({
collection: ast.name,
@@ -89,17 +95,23 @@ export class AuthorizationService {
}
function validateFields(ast: AST | NestedCollectionNode | FieldNode) {
if (ast.type !== 'field' && ast.type !== 'm2a') {
/** @TODO remove m2a check */
const collection = ast.name;
if (ast.type !== 'field') {
if (ast.type === 'm2a') {
for (const [collection, children] of Object.entries(ast.children)) {
checkFields(collection, children, ast.query?.[collection]?.aggregate);
}
} else {
checkFields(ast.name, ast.children, ast.query?.aggregate);
}
}
function checkFields(collection: string, children: (NestedCollectionNode | FieldNode)[], aggregate?: Aggregate) {
// We check the availability of the permissions in the step before this is run
const permissions = permissionsForCollections.find((permission) => permission.collection === collection)!;
const allowedFields = permissions.fields || [];
if (ast.query.aggregate && allowedFields.includes('*') === false) {
for (const [_operation, aliasMap] of Object.entries(ast.query.aggregate)) {
if (aggregate && allowedFields.includes('*') === false) {
for (const [_operation, aliasMap] of Object.entries(aggregate)) {
if (!aliasMap) continue;
for (const [column, _alias] of Object.entries(aliasMap)) {
@@ -108,7 +120,7 @@ export class AuthorizationService {
}
}
for (const childNode of ast.children) {
for (const childNode of children) {
if (childNode.type !== 'field') {
validateFields(childNode);
continue;
@@ -129,43 +141,61 @@ export class AuthorizationService {
ast: AST | NestedCollectionNode | FieldNode,
accountability: Accountability | null
): AST | NestedCollectionNode | FieldNode {
if (ast.type !== 'field' && ast.type !== 'm2a') {
/** @TODO remove m2a check */
const collection = ast.name;
if (ast.type !== 'field') {
if (ast.type === 'm2a') {
const collections = Object.keys(ast.children);
for (const collection of collections) {
updateFilterQuery(collection, ast.query[collection]);
}
for (const [collection, children] of Object.entries(ast.children)) {
ast.children[collection] = children.map((child) => applyFilters(child, accountability)) as (
| NestedCollectionNode
| FieldNode
)[];
}
} else {
const collection = ast.name;
updateFilterQuery(collection, ast.query);
ast.children = ast.children.map((child) => applyFilters(child, accountability)) as (
| NestedCollectionNode
| FieldNode
)[];
}
}
return ast;
function updateFilterQuery(collection: string, query: Query) {
// We check the availability of the permissions in the step before this is run
const permissions = permissionsForCollections.find((permission) => permission.collection === collection)!;
const parsedPermissions = parseFilter(permissions.permissions, accountability);
if (!ast.query.filter || Object.keys(ast.query.filter).length === 0) {
ast.query.filter = { _and: [] };
if (!query.filter || Object.keys(query.filter).length === 0) {
query.filter = { _and: [] };
} else {
ast.query.filter = { _and: [ast.query.filter] };
query.filter = { _and: [query.filter] };
}
if (parsedPermissions && Object.keys(parsedPermissions).length > 0) {
ast.query.filter._and.push(parsedPermissions);
query.filter._and.push(parsedPermissions);
}
if (ast.query.filter._and.length === 0) delete ast.query.filter._and;
if (query.filter._and.length === 0) delete query.filter._and;
if (permissions.limit && ast.query.limit && ast.query.limit > permissions.limit) {
if (permissions.limit && query.limit && query.limit > permissions.limit) {
throw new ForbiddenException();
}
// Default to the permissions limit if limit hasn't been set
if (permissions.limit && !ast.query.limit) {
ast.query.limit = permissions.limit;
if (permissions.limit && !query.limit) {
query.limit = permissions.limit;
}
ast.children = ast.children.map((child) => applyFilters(child, accountability)) as (
| NestedCollectionNode
| FieldNode
)[];
}
return ast;
}
}
@@ -173,8 +203,6 @@ export class AuthorizationService {
* Checks if the provided payload matches the configured permissions, and adds the presets to the payload.
*/
validatePayload(action: PermissionsAction, collection: string, data: Partial<Item>): Promise<Partial<Item>> {
const validationErrors: FailedValidationException[] = [];
const payload = cloneDeep(data);
let permission: Permission | undefined;
@@ -216,44 +244,57 @@ export class AuthorizationService {
const payloadWithPresets = merge({}, preset, payload);
const requiredColumns: string[] = [];
const hasValidationRules =
isNil(permission.validation) === false && Object.keys(permission.validation ?? {}).length > 0;
for (const [name, field] of Object.entries(this.schema.collections[collection].fields)) {
const requiredColumns: SchemaOverview['collections'][string]['fields'][string][] = [];
for (const field of Object.values(this.schema.collections[collection].fields)) {
const specials = field?.special ?? [];
const hasGenerateSpecial = ['uuid', 'date-created', 'role-created', 'user-created'].some((name) =>
specials.includes(name)
);
const isRequired = field.nullable === false && field.defaultValue === null && hasGenerateSpecial === false;
const notNullable = field.nullable === false && hasGenerateSpecial === false;
if (isRequired) {
requiredColumns.push(name);
if (notNullable) {
requiredColumns.push(field);
}
}
if (hasValidationRules === false && requiredColumns.length === 0) {
return payloadWithPresets;
}
if (requiredColumns.length > 0) {
permission.validation = {
_and: [permission.validation, {}],
};
permission.validation = hasValidationRules ? { _and: [permission.validation] } : { _and: [] };
if (action === 'create') {
for (const name of requiredColumns) {
permission.validation._and[1][name] = {
_submitted: true,
};
for (const field of requiredColumns) {
if (action === 'create' && field.defaultValue === null) {
permission.validation._and.push({
[field.field]: {
_submitted: true,
},
});
}
} else {
for (const name of requiredColumns) {
permission.validation._and[1][name] = {
permission.validation._and.push({
[field.field]: {
_nnull: true,
};
}
},
});
}
}
const validationErrors: FailedValidationException[] = [];
validationErrors.push(
...this.validateJoi(parseFilter(permission.validation || {}, this.accountability), payloadWithPresets)
...flatten(
validatePayload(parseFilter(permission.validation!, this.accountability), payloadWithPresets).map((error) =>
error.details.map((details) => new FailedValidationException(details))
)
)
);
if (validationErrors.length > 0) throw validationErrors;
@@ -261,48 +302,6 @@ export class AuthorizationService {
return payloadWithPresets;
}
validateJoi(validation: Filter, payload: Partial<Item>): FailedValidationException[] {
if (!validation) return [];
const errors: FailedValidationException[] = [];
/**
* Note there can only be a single _and / _or per level
*/
if (Object.keys(validation)[0] === '_and') {
const subValidation = Object.values(validation)[0];
const nestedErrors = flatten<FailedValidationException>(
subValidation.map((subObj: Record<string, any>) => {
return this.validateJoi(subObj, payload);
})
).filter((err?: FailedValidationException) => err);
errors.push(...nestedErrors);
} else if (Object.keys(validation)[0] === '_or') {
const subValidation = Object.values(validation)[0];
const nestedErrors = flatten<FailedValidationException>(
subValidation.map((subObj: Record<string, any>) => this.validateJoi(subObj, payload))
);
const allErrored = subValidation.length === nestedErrors.length;
if (allErrored) {
errors.push(...nestedErrors);
}
} else {
const schema = generateJoi(validation);
const { error } = schema.validate(payload, { abortEarly: false });
if (error) {
errors.push(...error.details.map((details) => new FailedValidationException(details)));
}
}
return errors;
}
async checkAccess(action: PermissionsAction, collection: string, pk: PrimaryKey | PrimaryKey[]): Promise<void> {
if (this.accountability?.admin === true) return;

View File

@@ -10,14 +10,8 @@ import logger from '../logger';
import { FieldsService, RawField } from '../services/fields';
import { ItemsService, MutationOptions } from '../services/items';
import Keyv from 'keyv';
import {
AbstractServiceOptions,
Accountability,
Collection,
CollectionMeta,
FieldMeta,
SchemaOverview,
} from '../types';
import { AbstractServiceOptions, Collection, CollectionMeta, SchemaOverview } from '../types';
import { Accountability, FieldMeta } from '@directus/shared/types';
export type RawCollection = {
collection: string;
@@ -213,6 +207,11 @@ export class CollectionsService {
const collections: Collection[] = [];
/**
* The collections as known in the schema cache.
*/
const knownCollections = Object.keys(this.schema.collections);
for (const table of tablesInDatabase) {
const collection: Collection = {
collection: table.name,
@@ -220,7 +219,12 @@ export class CollectionsService {
schema: table,
};
collections.push(collection);
// By only returning collections that are known in the schema cache, we prevent weird
// situations where the collections endpoint returns different info from every other
// collection
if (knownCollections.includes(table.name)) {
collections.push(collection);
}
}
return collections;
@@ -272,6 +276,8 @@ export class CollectionsService {
const collections: Collection[] = [];
const knownCollections = Object.keys(this.schema.collections);
for (const table of tables) {
const collection: Collection = {
collection: table.name,
@@ -279,7 +285,12 @@ export class CollectionsService {
schema: table,
};
collections.push(collection);
// By only returning collections that are known in the schema cache, we prevent weird
// situations where the collections endpoint returns different info from every other
// collection
if (knownCollections.includes(table.name)) {
collections.push(collection);
}
}
return collections;

View File

@@ -11,16 +11,18 @@ import { ForbiddenException, InvalidPayloadException } from '../exceptions';
import { translateDatabaseError } from '../exceptions/database/translate';
import { ItemsService } from '../services/items';
import { PayloadService } from '../services/payload';
import { AbstractServiceOptions, Accountability, FieldMeta, SchemaOverview, types } from '../types';
import { Field } from '../types/field';
import { AbstractServiceOptions, SchemaOverview } from '../types';
import { Accountability } from '@directus/shared/types';
import { Field, FieldMeta, Type } from '@directus/shared/types';
import getDefaultValue from '../utils/get-default-value';
import getLocalType from '../utils/get-local-type';
import { toArray } from '../utils/to-array';
import { isEqual } from 'lodash';
import { toArray } from '@directus/shared/utils';
import { isEqual, isNil } from 'lodash';
import { RelationsService } from './relations';
import Keyv from 'keyv';
import { DeepPartial } from '@directus/shared/types';
export type RawField = DeepPartial<Field> & { field: string; type: typeof types[number] };
export type RawField = DeepPartial<Field> & { field: string; type: Type };
export class FieldsService {
knex: Knex;
@@ -213,15 +215,20 @@ export class FieldsService {
async createField(
collection: string,
field: Partial<Field> & { field: string; type: typeof types[number] | null },
field: Partial<Field> & { field: string; type: Type | null },
table?: Knex.CreateTableBuilder // allows collection creation to
): Promise<void> {
if (this.accountability && this.accountability.admin !== true) {
throw new ForbiddenException();
}
const exists =
field.field in this.schema.collections[collection].fields ||
isNil(await this.knex.select('id').from('directus_fields').where({ collection, field: field.field }).first()) ===
false;
// Check if field already exists, either as a column, or as a row in directus_fields
if (field.field in this.schema.collections[collection].fields) {
if (exists) {
throw new InvalidPayloadException(`Field "${field.field}" already exists in collection "${collection}"`);
}
@@ -313,7 +320,6 @@ export class FieldsService {
return field.field;
}
/** @todo save accountability */
async deleteField(collection: string, field: string): Promise<void> {
if (this.accountability && this.accountability.admin !== true) {
throw new ForbiddenException();
@@ -434,6 +440,9 @@ export class FieldsService {
public addColumnToTable(table: Knex.CreateTableBuilder, field: RawField | Field, alter: Column | null = null): void {
let column: Knex.ColumnBuilder;
// Don't attempt to add a DB column for alias / corrupt fields
if (field.type === 'alias' || field.type === 'unknown') return;
if (field.schema?.has_auto_increment) {
column = table.increments(field.field);
} else if (field.type === 'string') {
@@ -445,6 +454,10 @@ export class FieldsService {
column = table.string(field.field);
} else if (field.type === 'hash') {
column = table.string(field.field, 255);
} else if (field.type === 'dateTime') {
column = table.dateTime(field.field, { useTz: false });
} else if (field.type === 'timestamp') {
column = table.timestamp(field.field, { useTz: true });
} else {
column = table[field.type](field.field);
}

View File

@@ -1,7 +1,6 @@
import formatTitle from '@directus/format-title';
import axios, { AxiosResponse } from 'axios';
import parseEXIF from 'exif-reader';
import { parse as parseICC } from 'icc';
import exifr from 'exifr';
import { clone } from 'lodash';
import { extension } from 'mime-types';
import path from 'path';
@@ -13,8 +12,7 @@ import { ForbiddenException, ServiceUnavailableException } from '../exceptions';
import logger from '../logger';
import storage from '../storage';
import { AbstractServiceOptions, File, PrimaryKey } from '../types';
import parseIPTC from '../utils/parse-iptc';
import { toArray } from '../utils/to-array';
import { toArray } from '@directus/shared/utils';
import { ItemsService, MutationOptions } from './items';
export class FilesService extends ItemsService {
@@ -32,6 +30,14 @@ export class FilesService extends ItemsService {
): Promise<PrimaryKey> {
const payload = clone(data);
if ('folder' in payload === false) {
const settings = await this.knex.select('storage_default_folder').from('directus_settings').first();
if (settings?.storage_default_folder) {
payload.folder = settings.storage_default_folder;
}
}
if (primaryKey !== undefined) {
await this.updateOne(primaryKey, payload, { emitEvents: false });
@@ -46,9 +52,10 @@ export class FilesService extends ItemsService {
primaryKey = await this.createOne(payload, { emitEvents: false });
}
const fileExtension = path.extname(payload.filename_download) || (payload.type && extension(payload.type));
const fileExtension =
path.extname(payload.filename_download) || (payload.type && '.' + extension(payload.type)) || '';
payload.filename_disk = primaryKey + '.' + fileExtension;
payload.filename_disk = primaryKey + (fileExtension || '');
if (!payload.type) {
payload.type = 'application/octet-stream';
@@ -77,37 +84,30 @@ export class FilesService extends ItemsService {
payload.height = meta.height;
}
payload.filesize = meta.size;
payload.metadata = {};
if (meta.icc) {
try {
payload.metadata.icc = parseICC(meta.icc);
} catch (err) {
logger.warn(`Couldn't extract ICC information from file`);
logger.warn(err);
try {
payload.metadata = await exifr.parse(buffer.content, {
icc: true,
iptc: true,
ifd1: true,
interop: true,
translateValues: true,
reviveValues: true,
mergeOutput: false,
});
if (payload.metadata?.iptc?.Headline) {
payload.title = payload.metadata.iptc.Headline;
}
}
if (meta.exif) {
try {
payload.metadata.exif = parseEXIF(meta.exif);
} catch (err) {
logger.warn(`Couldn't extract EXIF information from file`);
logger.warn(err);
if (!payload.description && payload.metadata?.iptc?.Caption) {
payload.description = payload.metadata.iptc.Caption;
}
}
if (meta.iptc) {
try {
payload.metadata.iptc = parseIPTC(meta.iptc);
payload.title = payload.metadata.iptc.headline || payload.title;
payload.description = payload.description || payload.metadata.iptc.caption;
payload.tags = payload.metadata.iptc.keywords;
} catch (err) {
logger.warn(`Couldn't extract IPTC information from file`);
logger.warn(err);
if (payload.metadata?.iptc?.Keywords) {
payload.tags = payload.metadata.iptc.Keywords;
}
} catch (err) {
logger.warn(`Couldn't extract metadata from file`);
logger.warn(err);
}
}

View File

@@ -44,11 +44,14 @@ import {
import { Knex } from 'knex';
import { flatten, get, mapKeys, merge, set, uniq } from 'lodash';
import ms from 'ms';
import { getCache } from '../cache';
import getDatabase from '../database';
import env from '../env';
import { BaseException, GraphQLValidationException, InvalidPayloadException } from '../exceptions';
import { ForbiddenException, GraphQLValidationException, InvalidPayloadException } from '../exceptions';
import { BaseException } from '@directus/shared/exceptions';
import { listExtensions } from '../extensions';
import { AbstractServiceOptions, Accountability, Action, GraphQLParams, Item, Query, SchemaOverview } from '../types';
import { Accountability } from '@directus/shared/types';
import { AbstractServiceOptions, Action, GraphQLParams, Item, Query, SchemaOverview } from '../types';
import { getGraphQLType } from '../utils/get-graphql-type';
import { reduceSchema } from '../utils/reduce-schema';
import { sanitizeQuery } from '../utils/sanitize-query';
@@ -1103,7 +1106,7 @@ export class GraphQLService {
* Select the correct service for the given collection. This allows the individual services to run
* their custom checks (f.e. it allows UsersService to prevent updating TFA secret from outside)
*/
getService(collection: string): RolesService {
getService(collection: string): ItemsService {
const opts = {
knex: this.knex,
accountability: this.accountability,
@@ -1376,7 +1379,7 @@ export class GraphQLService {
userAgent: req?.get('user-agent'),
});
if (args.mode === 'cookie') {
res?.cookie('directus_refresh_token', result.refreshToken, {
res?.cookie(env.REFRESH_TOKEN_COOKIE_NAME, result.refreshToken, {
httpOnly: true,
domain: env.REFRESH_TOKEN_COOKIE_DOMAIN,
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
@@ -1407,13 +1410,13 @@ export class GraphQLService {
accountability: accountability,
schema: this.schema,
});
const currentRefreshToken = args.refresh_token || req?.cookies.directus_refresh_token;
const currentRefreshToken = args.refresh_token || req?.cookies[env.REFRESH_TOKEN_COOKIE_NAME];
if (!currentRefreshToken) {
throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`);
}
const result = await authenticationService.refresh(currentRefreshToken);
if (args.mode === 'cookie') {
res?.cookie('directus_refresh_token', result.refreshToken, {
res?.cookie(env.REFRESH_TOKEN_COOKIE_NAME, result.refreshToken, {
httpOnly: true,
domain: env.REFRESH_TOKEN_COOKIE_DOMAIN,
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
@@ -1443,7 +1446,7 @@ export class GraphQLService {
accountability: accountability,
schema: this.schema,
});
const currentRefreshToken = args.refresh_token || req?.cookies.directus_refresh_token;
const currentRefreshToken = args.refresh_token || req?.cookies[env.REFRESH_TOKEN_COOKIE_NAME];
if (!currentRefreshToken) {
throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`);
}
@@ -1609,6 +1612,21 @@ export class GraphQLService {
return true;
},
},
utils_cache_clear: {
type: GraphQLVoid,
resolve: async () => {
if (this.accountability?.admin !== true) {
throw new ForbiddenException();
}
const { cache, schemaCache } = getCache();
await cache?.clear();
await schemaCache?.clear();
return;
},
},
users_invite_accept: {
type: GraphQLBoolean,
args: {

View File

@@ -1,6 +1,7 @@
import { Knex } from 'knex';
import getDatabase from '../database';
import { AbstractServiceOptions, Accountability, SchemaOverview } from '../types';
import { AbstractServiceOptions, SchemaOverview } from '../types';
import { Accountability } from '@directus/shared/types';
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
import StreamArray from 'stream-json/streamers/StreamArray';
import { ItemsService } from './items';

View File

@@ -9,10 +9,10 @@ import env from '../env';
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
import { translateDatabaseError } from '../exceptions/database/translate';
import logger from '../logger';
import { Accountability } from '@directus/shared/types';
import {
AbstractService,
AbstractServiceOptions,
Accountability,
Action,
Item as AnyItem,
PermissionsAction,
@@ -21,7 +21,7 @@ import {
SchemaOverview,
} from '../types';
import getASTFromQuery from '../utils/get-ast-from-query';
import { toArray } from '../utils/to-array';
import { toArray } from '@directus/shared/utils';
import { AuthorizationService } from './authorization';
import { PayloadService } from './payload';
@@ -279,6 +279,17 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
throw new ForbiddenException();
}
emitAsyncSafe(`${this.eventScope}.read`, {
event: `${this.eventScope}.read`,
accountability: this.accountability,
collection: this.collection,
query,
action: 'read',
payload: records,
schema: this.schema,
database: getDatabase(),
});
return records as Item[];
}
@@ -306,17 +317,6 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
throw new ForbiddenException();
}
emitAsyncSafe(`${this.eventScope}.read`, {
event: `${this.eventScope}.read`,
accountability: this.accountability,
collection: this.collection,
item: key,
action: 'read',
payload: results,
schema: this.schema,
database: getDatabase(),
});
return results[0];
}
@@ -344,17 +344,6 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
const results = await this.readByQuery(queryWithKeys, opts);
emitAsyncSafe(`${this.eventScope}.read`, {
event: `${this.eventScope}.read`,
accountability: this.accountability,
collection: this.collection,
item: keys,
action: 'read',
payload: results,
schema: this.schema,
database: getDatabase(),
});
return results;
}

View File

@@ -6,7 +6,8 @@ import getDatabase from '../../database';
import env from '../../env';
import { InvalidPayloadException } from '../../exceptions';
import logger from '../../logger';
import { AbstractServiceOptions, Accountability, SchemaOverview } from '../../types';
import { AbstractServiceOptions, SchemaOverview } from '../../types';
import { Accountability } from '@directus/shared/types';
import getMailer from '../../mailer';
import { Transporter, SendMailOptions } from 'nodemailer';
import prettier from 'prettier';

View File

@@ -1,10 +1,11 @@
import { Knex } from 'knex';
import getDatabase from '../database';
import { ForbiddenException } from '../exceptions';
import { AbstractServiceOptions, Accountability, SchemaOverview } from '../types';
import { AbstractServiceOptions, SchemaOverview } from '../types';
import { Accountability } from '@directus/shared/types';
import { Query } from '../types/query';
import { applyFilter, applySearch } from '../utils/apply-query';
import { parseFilter } from '../utils/parse-filter';
import { parseFilter } from '@directus/shared/utils';
export class MetaService {
knex: Knex;

View File

@@ -1,13 +1,14 @@
import argon2 from 'argon2';
import { format, formatISO, parse, parseISO } from 'date-fns';
import { format, parseISO } from 'date-fns';
import Joi from 'joi';
import { Knex } from 'knex';
import { clone, cloneDeep, isObject, isPlainObject, omit } from 'lodash';
import { v4 as uuidv4 } from 'uuid';
import getDatabase from '../database';
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
import { AbstractServiceOptions, Accountability, Item, PrimaryKey, Query, SchemaOverview } from '../types';
import { toArray } from '../utils/to-array';
import { AbstractServiceOptions, Item, PrimaryKey, Query, SchemaOverview, Alterations } from '../types';
import { Accountability } from '@directus/shared/types';
import { toArray } from '@directus/shared/utils';
import { ItemsService } from './items';
type Action = 'create' | 'read' | 'update';
@@ -21,16 +22,6 @@ type Transformers = {
}) => Promise<any>;
};
type Alterations = {
create: {
[key: string]: any;
}[];
update: {
[key: string]: any;
}[];
delete: (number | string)[];
};
/**
* Process a given payload for a collection to ensure the special fields (hash, uuid, date etc) are
* handled correctly.
@@ -50,13 +41,6 @@ export class PayloadService {
return this;
}
/**
* @todo allow this to be extended
*
* @todo allow these extended special types to have "field dependencies"?
* f.e. the file-links transformer needs the id and filename_download to be fetched from the DB
* in order to work
*/
public transformers: Transformers = {
async hash({ action, value }) {
if (!value) return;
@@ -222,11 +206,15 @@ export class PayloadService {
['dateTime', 'date', 'timestamp'].includes(field.type)
);
if (dateColumns.length === 0) return payloads;
const timeColumns = fieldsInCollection.filter(([_name, field]) => {
return field.type === 'time';
});
if (dateColumns.length === 0 && timeColumns.length === 0) return payloads;
for (const [name, dateColumn] of dateColumns) {
for (const payload of payloads) {
let value = payload[name];
let value: number | string | Date = payload[name];
if (value === null || value === '0000-00-00') {
payload[name] = null;
@@ -236,32 +224,54 @@ export class PayloadService {
if (!value) continue;
if (action === 'read') {
if (typeof value === 'string') value = new Date(value);
if (typeof value === 'number' || typeof value === 'string') {
value = new Date(value);
}
if (dateColumn.type === 'timestamp') {
const newValue = formatISO(value);
const newValue = value.toISOString();
payload[name] = newValue;
}
if (dateColumn.type === 'dateTime') {
// Strip off the Z at the end of a non-timezone datetime value
const newValue = format(value, "yyyy-MM-dd'T'HH:mm:ss");
const year = String(value.getUTCFullYear());
const month = String(value.getUTCMonth() + 1).padStart(2, '0');
const date = String(value.getUTCDate()).padStart(2, '0');
const hours = String(value.getUTCHours()).padStart(2, '0');
const minutes = String(value.getUTCMinutes()).padStart(2, '0');
const seconds = String(value.getUTCSeconds()).padStart(2, '0');
const newValue = `${year}-${month}-${date}T${hours}:${minutes}:${seconds}`;
payload[name] = newValue;
}
if (dateColumn.type === 'date') {
const [year, month, day] = value.toISOString().substr(0, 10).split('-');
// Strip off the time / timezone information from a date-only value
const newValue = format(value, 'yyyy-MM-dd');
const newValue = `${year}-${month}-${day}`;
payload[name] = newValue;
}
} else {
if (value instanceof Date === false) {
if (value instanceof Date === false && typeof value === 'string') {
if (dateColumn.type === 'date') {
const newValue = parse(value, 'yyyy-MM-dd', new Date());
payload[name] = newValue;
const [date] = value.split('T');
const [year, month, day] = date.split('-');
payload[name] = new Date(Date.UTC(Number(year), Number(month) - 1, Number(day)));
}
if (dateColumn.type === 'timestamp' || dateColumn.type === 'dateTime') {
if (dateColumn.type === 'dateTime') {
const [date, time] = value.split('T');
const [year, month, day] = date.split('-');
const [hours, minutes, seconds] = time.substring(0, 8).split(':');
payload[name] = new Date(
Date.UTC(Number(year), Number(month) - 1, Number(day), Number(hours), Number(minutes), Number(seconds))
);
}
if (dateColumn.type === 'timestamp') {
const newValue = parseISO(value);
payload[name] = newValue;
}
@@ -270,6 +280,22 @@ export class PayloadService {
}
}
/**
* Some DB drivers (MS SQL f.e.) return time values as Date objects. For consistencies sake,
* we'll abstract those back to hh:mm:ss
*/
for (const [name] of timeColumns) {
for (const payload of payloads) {
const value = payload[name];
if (!value) continue;
if (action === 'read') {
if (value instanceof Date) payload[name] = format(value, 'HH:mm:ss');
}
}
}
return payloads;
}
@@ -318,6 +344,9 @@ export class PayloadService {
const relatedPrimary = this.schema.collections[relatedCollection].primary;
const relatedRecord: Partial<Item> = payload[relation.field];
if (['string', 'number'].includes(typeof relatedRecord)) continue;
const hasPrimaryKey = relatedPrimary in relatedRecord;
let relatedPrimaryKey: PrimaryKey = relatedRecord[relatedPrimary];
@@ -455,7 +484,8 @@ export class PayloadService {
schema: this.schema,
});
const relatedRecords: Partial<Item>[] = [];
const recordsToUpsert: Partial<Item>[] = [];
const savedPrimaryKeys: PrimaryKey[] = [];
// Nested array of individual items
if (Array.isArray(payload[relation.meta!.one_field!])) {
@@ -465,30 +495,46 @@ export class PayloadService {
let record = cloneDeep(relatedRecord);
if (typeof relatedRecord === 'string' || typeof relatedRecord === 'number') {
const exists = !!(await this.knex
.select(relatedPrimaryKeyField)
const existingRecord = await this.knex
.select(relatedPrimaryKeyField, relation.field)
.from(relation.collection)
.where({ [relatedPrimaryKeyField]: record })
.first());
.first();
if (exists === false) {
if (!!existingRecord === false) {
throw new ForbiddenException();
}
// If the related item is already associated to the current item, and there's no
// other updates (which is indicated by the fact that this is just the PK, we can
// ignore updating this item. This makes sure we don't trigger any update logic
// for items that aren't actually being updated. NOTE: We use == here, as the
// primary key might be reported as a string instead of number, coming from the
// http route, and or a bigInteger in the DB
if (
existingRecord[relation.field] == parent ||
existingRecord[relation.field] == payload[currentPrimaryKeyField]
) {
savedPrimaryKeys.push(existingRecord[relatedPrimaryKeyField]);
continue;
}
record = {
[relatedPrimaryKeyField]: relatedRecord,
};
}
relatedRecords.push({
recordsToUpsert.push({
...record,
[relation.field]: parent || payload[currentPrimaryKeyField],
});
}
const savedPrimaryKeys = await itemsService.upsertMany(relatedRecords, {
onRevisionCreate: (id) => revisions.push(id),
});
savedPrimaryKeys.push(
...(await itemsService.upsertMany(recordsToUpsert, {
onRevisionCreate: (id) => revisions.push(id),
}))
);
const query: Query = {
filter: {
@@ -540,7 +586,7 @@ export class PayloadService {
}
if (alterations.update) {
const primaryKeyField = this.schema.collections[this.collection].primary;
const primaryKeyField = this.schema.collections[relation.collection].primary;
for (const item of alterations.update) {
await itemsService.updateOne(

View File

@@ -1,8 +1,9 @@
import { Knex } from 'knex';
import { systemRelationRows } from '../database/system-data/relations';
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
import { AbstractServiceOptions, SchemaOverview, Query, Relation, RelationMeta, Accountability } from '../types';
import { toArray } from '../utils/to-array';
import { AbstractServiceOptions, SchemaOverview, Query, Relation, RelationMeta } from '../types';
import { Accountability } from '@directus/shared/types';
import { toArray } from '@directus/shared/utils';
import { ItemsService, QueryOptions } from './items';
import { PermissionsService } from './permissions';
import SchemaInspector from '@directus/schema';

View File

@@ -1,6 +1,6 @@
import { UnprocessableEntityException } from '../exceptions';
import { AbstractServiceOptions, PrimaryKey } from '../types';
import { ItemsService } from './items';
import { ForbiddenException, UnprocessableEntityException } from '../exceptions';
import { AbstractServiceOptions, PrimaryKey, Query, Alterations, Item } from '../types';
import { ItemsService, MutationOptions } from './items';
import { PermissionsService } from './permissions';
import { PresetsService } from './presets';
import { UsersService } from './users';
@@ -10,21 +10,89 @@ export class RolesService extends ItemsService {
super('directus_roles', options);
}
private async checkForOtherAdminRoles(excludeKeys: PrimaryKey[]): Promise<void> {
// Make sure there's at least one admin role left after this deletion is done
const otherAdminRoles = await this.knex
.count('*', { as: 'count' })
.from('directus_roles')
.whereNotIn('id', excludeKeys)
.andWhere({ admin_access: true })
.first();
const otherAdminRolesCount = +(otherAdminRoles?.count || 0);
if (otherAdminRolesCount === 0) throw new UnprocessableEntityException(`You can't delete the last admin role.`);
}
private async checkForOtherAdminUsers(key: PrimaryKey, users: Alterations | Item[]): Promise<void> {
const role = await this.knex.select('admin_access').from('directus_roles').where('id', '=', key).first();
if (!role) throw new ForbiddenException();
// The users that will now be in this new non-admin role
let userKeys: PrimaryKey[] = [];
if (Array.isArray(users)) {
userKeys = users.map((user) => (typeof user === 'string' ? user : user.id)).filter((id) => id);
} else {
userKeys = users.update.map((user) => user.id).filter((id) => id);
}
const usersThatWereInRoleBefore = (await this.knex.select('id').from('directus_users').where('role', '=', key)).map(
(user) => user.id
);
const usersThatAreRemoved = usersThatWereInRoleBefore.filter((id) => userKeys.includes(id) === false);
const usersThatAreAdded = Array.isArray(users) ? users : users.create;
// If the role the users are moved to is an admin-role, and there's at least 1 (new) admin
// user, we don't have to check for other admin
// users
if ((role.admin_access === true || role.admin_access === 1) && usersThatAreAdded.length > 0) return;
const otherAdminUsers = await this.knex
.count('*', { as: 'count' })
.from('directus_users')
.whereNotIn('directus_users.id', [...userKeys, ...usersThatAreRemoved])
.andWhere({ 'directus_roles.admin_access': true })
.leftJoin('directus_roles', 'directus_users.role', 'directus_roles.id')
.first();
const otherAdminUsersCount = +(otherAdminUsers?.count || 0);
if (otherAdminUsersCount === 0) {
throw new UnprocessableEntityException(`You can't remove the last admin user from the admin role.`);
}
return;
}
async updateOne(key: PrimaryKey, data: Record<string, any>, opts?: MutationOptions): Promise<PrimaryKey> {
if ('admin_access' in data && data.admin_access === false) {
await this.checkForOtherAdminRoles([key]);
}
if ('users' in data) {
await this.checkForOtherAdminUsers(key, data.users);
}
return super.updateOne(key, data, opts);
}
async updateMany(keys: PrimaryKey[], data: Record<string, any>, opts?: MutationOptions): Promise<PrimaryKey[]> {
if ('admin_access' in data && data.admin_access === false) {
await this.checkForOtherAdminRoles(keys);
}
return super.updateMany(keys, data, opts);
}
async deleteOne(key: PrimaryKey): Promise<PrimaryKey> {
await this.deleteMany([key]);
return key;
}
async deleteMany(keys: PrimaryKey[]): Promise<PrimaryKey[]> {
// Make sure there's at least one admin role left after this deletion is done
const otherAdminRoles = await this.knex
.count('*', { as: 'count' })
.from('directus_roles')
.whereNotIn('id', keys)
.andWhere({ admin_access: true })
.first();
const otherAdminRolesCount = +(otherAdminRoles?.count || 0);
if (otherAdminRolesCount === 0) throw new UnprocessableEntityException(`You can't delete the last admin role.`);
await this.checkForOtherAdminRoles(keys);
await this.knex.transaction(async (trx) => {
const itemsService = new ItemsService('directus_roles', {
@@ -77,6 +145,10 @@ export class RolesService extends ItemsService {
return keys;
}
deleteByQuery(query: Query, opts?: MutationOptions): Promise<PrimaryKey[]> {
return super.deleteByQuery(query, opts);
}
/**
* @deprecated Use `deleteOne` or `deleteMany` instead
*/

View File

@@ -12,8 +12,9 @@ import env from '../env';
import logger from '../logger';
import { rateLimiter } from '../middleware/rate-limiter';
import storage from '../storage';
import { AbstractServiceOptions, Accountability, SchemaOverview } from '../types';
import { toArray } from '../utils/to-array';
import { AbstractServiceOptions, SchemaOverview } from '../types';
import { Accountability } from '@directus/shared/types';
import { toArray } from '@directus/shared/utils';
import getMailer from '../mailer';
import { SettingsService } from './settings';

View File

@@ -7,16 +7,8 @@ import { OpenAPIObject, OperationObject, PathItemObject, SchemaObject, TagObject
import { version } from '../../package.json';
import getDatabase from '../database';
import env from '../env';
import {
AbstractServiceOptions,
Accountability,
Collection,
Field,
Permission,
Relation,
SchemaOverview,
types,
} from '../types';
import { AbstractServiceOptions, Collection, Permission, Relation, SchemaOverview } from '../types';
import { Accountability, Field, Type } from '@directus/shared/types';
import { getRelationType } from '../utils/get-relation-type';
import { CollectionsService } from './collections';
import { FieldsService } from './fields';
@@ -459,20 +451,33 @@ class OASSpecsService implements SpecificationSubService {
}
private fieldTypes: Record<
typeof types[number],
Type,
{
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'integer' | 'null' | undefined;
format?: string;
items?: any;
}
> = {
alias: {
type: 'string',
},
bigInteger: {
type: 'integer',
format: 'int64',
},
binary: {
type: 'string',
format: 'binary',
},
boolean: {
type: 'boolean',
},
csv: {
type: 'array',
items: {
type: 'string',
},
},
date: {
type: 'string',
format: 'date',
@@ -488,6 +493,9 @@ class OASSpecsService implements SpecificationSubService {
type: 'number',
format: 'float',
},
hash: {
type: 'string',
},
integer: {
type: 'integer',
},
@@ -511,23 +519,13 @@ class OASSpecsService implements SpecificationSubService {
type: 'string',
format: 'timestamp',
},
binary: {
type: 'string',
format: 'binary',
unknown: {
type: undefined,
},
uuid: {
type: 'string',
format: 'uuid',
},
csv: {
type: 'array',
items: {
type: 'string',
},
},
hash: {
type: 'string',
},
};
}

View File

@@ -1,11 +1,11 @@
import argon2 from 'argon2';
import jwt from 'jsonwebtoken';
import { Knex } from 'knex';
import { clone } from 'lodash';
import { clone, cloneDeep } from 'lodash';
import getDatabase from '../database';
import env from '../env';
import { FailedValidationException } from '@directus/shared/exceptions';
import {
FailedValidationException,
ForbiddenException,
InvalidPayloadException,
UnprocessableEntityException,
@@ -13,13 +13,16 @@ import {
} from '../exceptions';
import { RecordNotUniqueException } from '../exceptions/database/record-not-unique';
import logger from '../logger';
import { AbstractServiceOptions, Accountability, Item, PrimaryKey, Query, SchemaOverview } from '../types';
import { AbstractServiceOptions, Item, PrimaryKey, Query, SchemaOverview } from '../types';
import { Accountability } from '@directus/shared/types';
import isUrlAllowed from '../utils/is-url-allowed';
import { toArray } from '../utils/to-array';
import { toArray } from '@directus/shared/utils';
import { AuthenticationService } from './authentication';
import { ItemsService, MutationOptions } from './items';
import { MailService } from './mail';
import { SettingsService } from './settings';
import { stall } from '../utils/stall';
import { performance } from 'perf_hooks';
export class UsersService extends ItemsService {
knex: Knex;
@@ -99,12 +102,33 @@ export class UsersService extends ItemsService {
return true;
}
private async checkRemainingAdminExistence(excludeKeys: PrimaryKey[]) {
// Make sure there's at least one admin user left after this deletion is done
const otherAdminUsers = await this.knex
.count('*', { as: 'count' })
.from('directus_users')
.whereNotIn('directus_users.id', excludeKeys)
.andWhere({ 'directus_roles.admin_access': true })
.leftJoin('directus_roles', 'directus_users.role', 'directus_roles.id')
.first();
const otherAdminUsersCount = +(otherAdminUsers?.count || 0);
if (otherAdminUsersCount === 0) {
throw new UnprocessableEntityException(`You can't remove the last admin user from the role.`);
}
}
/**
* Create a new user
*/
async createOne(data: Partial<Item>, opts?: MutationOptions): Promise<PrimaryKey> {
const email = data.email.toLowerCase();
await this.checkUniqueEmails([email]);
const email = data.email?.toLowerCase();
if (email) {
await this.checkUniqueEmails([email]);
}
return await this.service.createOne(data, opts);
}
@@ -129,6 +153,14 @@ export class UsersService extends ItemsService {
}
async updateOne(key: PrimaryKey, data: Partial<Item>, opts?: MutationOptions): Promise<PrimaryKey> {
if (data.role) {
const newRole = await this.knex.select('admin_access').from('directus_roles').where('id', data.role).first();
if (newRole && !newRole.admin_access) {
await this.checkRemainingAdminExistence([key]);
}
}
const email = data.email?.toLowerCase();
if (email) {
@@ -147,6 +179,14 @@ export class UsersService extends ItemsService {
}
async updateMany(keys: PrimaryKey[], data: Partial<Item>, opts?: MutationOptions): Promise<PrimaryKey[]> {
if (data.role) {
const newRole = await this.knex.select('admin_access').from('directus_roles').where('id', data.role).first();
if (newRole && !newRole.admin_access) {
await this.checkRemainingAdminExistence(keys);
}
}
const email = data.email?.toLowerCase();
if (email) {
@@ -165,6 +205,29 @@ export class UsersService extends ItemsService {
}
async updateByQuery(query: Query, data: Partial<Item>, opts?: MutationOptions): Promise<PrimaryKey[]> {
if (data.role) {
const newRole = await this.knex.select('admin_access').from('directus_roles').where('id', data.role).first();
if (newRole && !newRole.admin_access) {
// This is duplicated a touch, but we need to know the keys first
// Not authenticated:
const itemsService = new ItemsService('directus_users', {
knex: this.knex,
schema: this.schema,
});
const readQuery = cloneDeep(query);
readQuery.fields = ['id'];
// We read the IDs of the items based on the query, and then run `updateMany`. `updateMany` does it's own
// permissions check for the keys, so we don't have to make this an authenticated read
const itemsToUpdate = await itemsService.readByQuery(readQuery);
const keys = itemsToUpdate.map((item) => item.id);
await this.checkRemainingAdminExistence(keys);
}
}
const email = data.email?.toLowerCase();
if (email) {
@@ -183,20 +246,7 @@ export class UsersService extends ItemsService {
}
async deleteOne(key: PrimaryKey, opts?: MutationOptions): Promise<PrimaryKey> {
// Make sure there's at least one admin user left after this deletion is done
const otherAdminUsers = await this.knex
.count('*', { as: 'count' })
.from('directus_users')
.whereNot('directus_users.id', key)
.andWhere({ 'directus_roles.admin_access': true })
.leftJoin('directus_roles', 'directus_users.role', 'directus_roles.id')
.first();
const otherAdminUsersCount = +(otherAdminUsers?.count || 0);
if (otherAdminUsersCount === 0) {
throw new UnprocessableEntityException(`You can't delete the last admin user.`);
}
await this.checkRemainingAdminExistence([key]);
await this.service.deleteOne(key, opts);
@@ -204,26 +254,32 @@ export class UsersService extends ItemsService {
}
async deleteMany(keys: PrimaryKey[], opts?: MutationOptions): Promise<PrimaryKey[]> {
// Make sure there's at least one admin user left after this deletion is done
const otherAdminUsers = await this.knex
.count('*', { as: 'count' })
.from('directus_users')
.whereNotIn('directus_users.id', keys)
.andWhere({ 'directus_roles.admin_access': true })
.leftJoin('directus_roles', 'directus_users.role', 'directus_roles.id')
.first();
const otherAdminUsersCount = +(otherAdminUsers?.count || 0);
if (otherAdminUsersCount === 0) {
throw new UnprocessableEntityException(`You can't delete the last admin user.`);
}
await this.checkRemainingAdminExistence(keys);
await this.service.deleteMany(keys, opts);
return keys;
}
async deleteByQuery(query: Query, opts?: MutationOptions): Promise<PrimaryKey[]> {
const primaryKeyField = this.schema.collections[this.collection].primary;
const readQuery = cloneDeep(query);
readQuery.fields = [primaryKeyField];
// Not authenticated:
const itemsService = new ItemsService(this.collection, {
knex: this.knex,
schema: this.schema,
});
const itemsToDelete = await itemsService.readByQuery(readQuery);
const keys: PrimaryKey[] = itemsToDelete.map((item: Item) => item[primaryKeyField]);
if (keys.length === 0) return [];
return await this.deleteMany(keys, opts);
}
async inviteUser(email: string | string[], role: string, url: string | null, subject?: string | null): Promise<void> {
const emails = toArray(email);
@@ -292,8 +348,14 @@ export class UsersService extends ItemsService {
}
async requestPasswordReset(email: string, url: string | null, subject?: string | null): Promise<void> {
const STALL_TIME = 500;
const timeStart = performance.now();
const user = await this.knex.select('id').from('directus_users').where({ email }).first();
if (!user) throw new ForbiddenException();
if (!user) {
await stall(STALL_TIME, timeStart);
throw new ForbiddenException();
}
const mailService = new MailService({
schema: this.schema,
@@ -322,6 +384,8 @@ export class UsersService extends ItemsService {
},
},
});
await stall(STALL_TIME, timeStart);
}
async resetPassword(token: string, password: string): Promise<void> {

View File

@@ -2,7 +2,8 @@ import { Knex } from 'knex';
import getDatabase from '../database';
import { systemCollectionRows } from '../database/system-data/collections';
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
import { AbstractServiceOptions, Accountability, PrimaryKey, SchemaOverview } from '../types';
import { AbstractServiceOptions, PrimaryKey, SchemaOverview } from '../types';
import { Accountability } from '@directus/shared/types';
export class UtilsService {
knex: Knex;

View File

@@ -1,6 +1,8 @@
import emitter, { emitAsyncSafe } from './emitter';
import env from './env';
import logger from './logger';
import checkForUpdate from 'update-check';
import pkg from '../package.json';
// If this file is called directly using node, start the server
if (require.main === module) {
@@ -18,6 +20,16 @@ export default async function start(): Promise<void> {
server
.listen(port, () => {
checkForUpdate(pkg)
.then((update) => {
if (update) {
logger.warn(`Update available: ${pkg.version} -> ${update.latest}`);
}
})
.catch(() => {
// No need to log/warn here. The update message is only an informative nice-to-have
});
logger.info(`Server started at port ${port}`);
emitAsyncSafe('server.start');
})

View File

@@ -4,7 +4,7 @@ import { GoogleCloudStorage } from '@directus/drive-gcs';
import { AmazonWebServicesS3Storage } from '@directus/drive-s3';
import env from './env';
import { getConfigFromEnv } from './utils/get-config-from-env';
import { toArray } from './utils/to-array';
import { toArray } from '@directus/shared/utils';
import { validateEnv } from './utils/validate-env';
validateEnv(['STORAGE_LOCATIONS']);

View File

@@ -1,9 +0,0 @@
export type Accountability = {
role: string | null;
user?: string | null;
admin?: boolean;
app?: boolean;
ip?: string;
userAgent?: string;
};

View File

@@ -1,10 +1,84 @@
export type Transformation = {
import { ResizeOptions, Sharp } from 'sharp';
// List of allowed sharp methods to expose.
//
// This is a literal, so we can use it to validate request parameters.
export const TransformationMethods /*: readonly (keyof Sharp)[]*/ = [
// Output options
// https://sharp.pixelplumbing.com/api-output
'toFormat',
'jpeg',
'png',
'tiff',
'webp',
// Resizing
// https://sharp.pixelplumbing.com/api-resize
'resize',
'extend',
'extract',
'trim',
// Image operations
// https://sharp.pixelplumbing.com/api-operation
'rotate',
'flip',
'flop',
'sharpen',
'median',
'blur',
'flatten',
'gamma',
'negate',
'normalise',
'normalize',
'clahe',
'convolve',
'threshold',
'linear',
'recomb',
'modulate',
// Color manipulation
// https://sharp.pixelplumbing.com/api-colour
'tint',
'greyscale',
'grayscale',
'toColorspace',
'toColourspace',
// Channel manipulation
// https://sharp.pixelplumbing.com/api-channel
'removeAlpha',
'ensureAlpha',
'extractChannel',
'bandbool',
] as const;
// Helper types
type AllowedSharpMethods = Pick<Sharp, typeof TransformationMethods[number]>;
export type TransformationMap = {
[M in keyof AllowedSharpMethods]: readonly [M, ...Parameters<AllowedSharpMethods[M]>];
};
export type Transformation = TransformationMap[keyof TransformationMap];
export type TransformationParams = {
key?: string;
width?: number; // width
height?: number; // height
fit?: 'cover' | 'contain' | 'inside' | 'outside'; // fit
withoutEnlargement?: boolean; // Without Enlargement
transforms?: Transformation[];
};
// Transformation preset is defined in the admin UI.
export type TransformationPreset = TransformationPresetFormat &
TransformationPresetResize &
TransformationParams & { key: string };
export type TransformationPresetFormat = {
format?: 'jpg' | 'jpeg' | 'png' | 'webp' | 'tiff';
quality?: number;
};
// @NOTE Keys used in Transformation should match ASSET_GENERATION_QUERY_KEYS in constants.ts
export type TransformationPresetResize = Pick<ResizeOptions, 'width' | 'height' | 'fit' | 'withoutEnlargement'>;
// @NOTE Keys used in TransformationParams should match ASSET_GENERATION_QUERY_KEYS in constants.ts

View File

@@ -1,5 +1,5 @@
import { Table } from 'knex-schema-inspector/dist/types/table';
import { Field } from './field';
import { Field } from '@directus/shared/types';
export type CollectionMeta = {
collection: string;

View File

@@ -1,40 +0,0 @@
/* eslint-disable @typescript-eslint/no-unused-vars */
/* eslint-disable @typescript-eslint/ban-types */
type Primitive = string | number | boolean | bigint | symbol | undefined | null;
type Builtin = Primitive | Function | Date | Error | RegExp;
type IsTuple<T> = T extends [infer A]
? T
: T extends [infer A, infer B]
? T
: T extends [infer A, infer B, infer C]
? T
: T extends [infer A, infer B, infer C, infer D]
? T
: T extends [infer A, infer B, infer C, infer D, infer E]
? T
: never;
type DeepPartial<T> = T extends Primitive | Builtin
? T
: T extends Map<infer K, infer V>
? Map<DeepPartial<K>, DeepPartial<V>>
: T extends ReadonlyMap<infer K, infer V>
? ReadonlyMap<DeepPartial<K>, DeepPartial<V>>
: T extends WeakMap<infer K, infer V>
? WeakMap<DeepPartial<K>, DeepPartial<V>>
: T extends Set<infer U>
? Set<DeepPartial<U>>
: T extends ReadonlySet<infer U>
? ReadonlySet<DeepPartial<U>>
: T extends WeakSet<infer U>
? WeakSet<DeepPartial<U>>
: T extends Array<infer U>
? T extends IsTuple<T>
? { [K in keyof T]?: DeepPartial<T[K]> }
: Array<DeepPartial<U>>
: T extends Promise<infer U>
? Promise<DeepPartial<U>>
: T extends {}
? { [K in keyof T]?: DeepPartial<T[K]> }
: Partial<T>;

View File

@@ -2,7 +2,7 @@
* Custom properties on the req object in express
*/
import { Accountability } from './accountability';
import { Accountability } from '@directus/shared/types';
import { Query } from './query';
import { SchemaOverview } from './schema';

View File

@@ -1,44 +0,0 @@
import { Column } from 'knex-schema-inspector/dist/types/column';
export const types = [
'bigInteger',
'boolean',
'date',
'dateTime',
'decimal',
'float',
'integer',
'json',
'string',
'text',
'time',
'timestamp',
'binary',
'uuid',
'hash',
'csv',
] as const;
export type FieldMeta = {
id: number;
collection: string;
field: string;
special: string[] | null;
interface: string | null;
options: Record<string, any> | null;
readonly: boolean;
hidden: boolean;
sort: number | null;
width: string | null;
group: number | null;
note: string | null;
translations: null;
};
export type Field = {
collection: string;
field: string;
type: typeof types[number];
schema: Column | null;
meta: FieldMeta | null;
};

View File

@@ -1,4 +1,3 @@
/** @todo finalize */
export type File = {
id: string; // uuid
storage: string;

View File

@@ -1,14 +1,13 @@
export * from './accountability';
export * from './activity';
export * from './assets';
export * from './ast';
export * from './collection';
export * from './extensions';
export * from './field';
export * from './files';
export * from './graphql';
export * from './items';
export * from './meta';
export * from './migration';
export * from './permissions';
export * from './query';
export * from './relation';

View File

@@ -6,3 +6,13 @@
export type Item = Record<string, any>;
export type PrimaryKey = string | number;
export type Alterations = {
create: {
[key: string]: any;
}[];
update: {
[key: string]: any;
}[];
delete: (number | string)[];
};

View File

@@ -0,0 +1,5 @@
export type Migration = {
version: string;
name: string;
timestamp: Date;
};

View File

@@ -53,5 +53,3 @@ export type FilterOperator =
| 'nnull'
| 'empty'
| 'nempty';
export type ValidationOperator = 'required' | 'regex';

View File

@@ -1,4 +1,4 @@
import { types } from './field';
import { Type } from '@directus/shared/types';
import { Permission } from './permissions';
import { Relation } from './relation';
@@ -15,7 +15,7 @@ type CollectionsOverview = {
field: string;
defaultValue: any;
nullable: boolean;
type: typeof types[number] | 'unknown' | 'alias';
type: Type | 'unknown' | 'alias';
dbType: string | null;
precision: number | null;
scale: number | null;

View File

@@ -1,6 +1,6 @@
import { Knex } from 'knex';
import { SchemaOverview } from '../types';
import { Accountability } from './accountability';
import { Accountability } from '@directus/shared/types';
import { Item, PrimaryKey } from './items';
import { PermissionsAction } from './permissions';
import { Query } from './query';

Some files were not shown because too many files have changed in this diff Show More