mirror of
https://github.com/directus/directus.git
synced 2026-01-31 12:28:14 -05:00
Merge branch 'main' into aggregation
This commit is contained in:
@@ -1,6 +1,5 @@
|
||||
import bodyParser from 'body-parser';
|
||||
import cookieParser from 'cookie-parser';
|
||||
import express from 'express';
|
||||
import express, { RequestHandler } from 'express';
|
||||
import expressLogger from 'express-pino-logger';
|
||||
import fse from 'fs-extra';
|
||||
import path from 'path';
|
||||
@@ -72,12 +71,14 @@ export default async function createApp(): Promise<express.Application> {
|
||||
|
||||
await emitAsyncSafe('middlewares.init.before', { app });
|
||||
|
||||
app.use(expressLogger({ logger }));
|
||||
app.use(expressLogger({ logger }) as RequestHandler);
|
||||
|
||||
app.use((req, res, next) => {
|
||||
bodyParser.json({
|
||||
limit: env.MAX_PAYLOAD_SIZE,
|
||||
})(req, res, (err) => {
|
||||
(
|
||||
express.json({
|
||||
limit: env.MAX_PAYLOAD_SIZE,
|
||||
}) as RequestHandler
|
||||
)(req, res, (err: any) => {
|
||||
if (err) {
|
||||
return next(new InvalidPayloadException(err.message));
|
||||
}
|
||||
|
||||
@@ -4,6 +4,8 @@ import installDatabase from '../../../database/seeds/run';
|
||||
import env from '../../../env';
|
||||
import logger from '../../../logger';
|
||||
import { getSchema } from '../../../utils/get-schema';
|
||||
import { RolesService, UsersService, SettingsService } from '../../../services';
|
||||
import getDatabase, { isInstalled, hasDatabaseConnection } from '../../../database';
|
||||
|
||||
export default async function bootstrap(): Promise<void> {
|
||||
logger.info('Initializing bootstrap...');
|
||||
@@ -13,10 +15,7 @@ export default async function bootstrap(): Promise<void> {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const { isInstalled, default: database } = require('../../../database');
|
||||
const { RolesService } = require('../../../services/roles');
|
||||
const { UsersService } = require('../../../services/users');
|
||||
const { SettingsService } = require('../../../services/settings');
|
||||
const database = getDatabase();
|
||||
|
||||
if ((await isInstalled()) === false) {
|
||||
logger.info('Installing Directus system tables...');
|
||||
@@ -66,8 +65,6 @@ export default async function bootstrap(): Promise<void> {
|
||||
}
|
||||
|
||||
async function isDatabaseAvailable() {
|
||||
const { hasDatabaseConnection } = require('../../../database');
|
||||
|
||||
const tries = 5;
|
||||
const secondsBetweenTries = 5;
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import getDatabase from '../../../database';
|
||||
|
||||
export default async function count(collection: string): Promise<void> {
|
||||
const database = require('../../../database/index').default;
|
||||
const database = getDatabase();
|
||||
|
||||
if (!collection) {
|
||||
console.error('Collection is required');
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import { Knex } from 'knex';
|
||||
import runMigrations from '../../../database/migrations/run';
|
||||
import installSeeds from '../../../database/seeds/run';
|
||||
import getDatabase from '../../../database';
|
||||
|
||||
export default async function start(): Promise<void> {
|
||||
const database = require('../../../database/index').default as Knex;
|
||||
const database = getDatabase();
|
||||
|
||||
try {
|
||||
await installSeeds(database);
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import run from '../../../database/migrations/run';
|
||||
import getDatabase from '../../../database';
|
||||
|
||||
export default async function migrate(direction: 'latest' | 'up' | 'down'): Promise<void> {
|
||||
const database = require('../../../database').default;
|
||||
const database = getDatabase();
|
||||
|
||||
try {
|
||||
console.log('✨ Running migrations...');
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { getSchema } from '../../../utils/get-schema';
|
||||
import { RolesService } from '../../../services';
|
||||
import getDatabase from '../../../database';
|
||||
|
||||
export default async function rolesCreate({ role: name, admin }: { role: string; admin: boolean }): Promise<void> {
|
||||
const { default: database } = require('../../../database/index');
|
||||
const { RolesService } = require('../../../services/roles');
|
||||
const database = getDatabase();
|
||||
|
||||
if (!name) {
|
||||
console.error('Name is required');
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import { getSchema } from '../../../utils/get-schema';
|
||||
import { UsersService } from '../../../services';
|
||||
import getDatabase from '../../../database';
|
||||
|
||||
export default async function usersCreate({
|
||||
email,
|
||||
@@ -9,8 +11,7 @@ export default async function usersCreate({
|
||||
password?: string;
|
||||
role?: string;
|
||||
}): Promise<void> {
|
||||
const { default: database } = require('../../../database/index');
|
||||
const { UsersService } = require('../../../services/users');
|
||||
const database = getDatabase();
|
||||
|
||||
if (!email || !password || !role) {
|
||||
console.error('Email, password, role are required');
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import argon2 from 'argon2';
|
||||
import { getSchema } from '../../../utils/get-schema';
|
||||
import { UsersService } from '../../../services';
|
||||
import getDatabase from '../../../database';
|
||||
|
||||
export default async function usersPasswd({ email, password }: { email?: string; password?: string }): Promise<void> {
|
||||
const { default: database } = require('../../../database/index');
|
||||
const { UsersService } = require('../../../services/users');
|
||||
const database = getDatabase();
|
||||
|
||||
if (!email || !password) {
|
||||
console.error('Email and password are required');
|
||||
|
||||
@@ -4,7 +4,7 @@ import { pick } from 'lodash';
|
||||
import ms from 'ms';
|
||||
import validate from 'uuid-validate';
|
||||
import { ASSET_TRANSFORM_QUERY_KEYS, SYSTEM_ASSET_ALLOW_LIST } from '../constants';
|
||||
import database from '../database';
|
||||
import getDatabase from '../database';
|
||||
import env from '../env';
|
||||
import { ForbiddenException, InvalidQueryException, RangeNotSatisfiableException } from '../exceptions';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
@@ -32,11 +32,11 @@ router.get(
|
||||
* This is a little annoying. Postgres will error out if you're trying to search in `where`
|
||||
* with a wrong type. In case of directus_files where id is a uuid, we'll have to verify the
|
||||
* validity of the uuid ahead of time.
|
||||
* @todo move this to a validation middleware function
|
||||
*/
|
||||
const isValidUUID = validate(id, 4);
|
||||
if (isValidUUID === false) throw new ForbiddenException();
|
||||
|
||||
const database = getDatabase();
|
||||
const file = await database.select('id', 'storage', 'filename_disk').from('directus_files').where({ id }).first();
|
||||
if (!file) throw new ForbiddenException();
|
||||
|
||||
@@ -51,6 +51,7 @@ router.get(
|
||||
const payloadService = new PayloadService('directus_settings', { schema: req.schema });
|
||||
const defaults = { storage_asset_presets: [], storage_asset_transform: 'all' };
|
||||
|
||||
const database = getDatabase();
|
||||
const savedAssetSettings = await database
|
||||
.select('storage_asset_presets', 'storage_asset_transform')
|
||||
.from('directus_settings')
|
||||
|
||||
@@ -67,7 +67,7 @@ const newFieldSchema = Joi.object({
|
||||
type: Joi.string()
|
||||
.valid(...types, ...ALIAS_TYPES)
|
||||
.allow(null)
|
||||
.required(),
|
||||
.optional(),
|
||||
schema: Joi.object({
|
||||
default_value: Joi.any(),
|
||||
max_length: [Joi.number(), Joi.string(), Joi.valid(null)],
|
||||
|
||||
@@ -1,60 +1,98 @@
|
||||
import SchemaInspector from '@directus/schema';
|
||||
import dotenv from 'dotenv';
|
||||
import { knex, Knex } from 'knex';
|
||||
import path from 'path';
|
||||
import { performance } from 'perf_hooks';
|
||||
import env from '../env';
|
||||
import logger from '../logger';
|
||||
import { getConfigFromEnv } from '../utils/get-config-from-env';
|
||||
import { validateEnv } from '../utils/validate-env';
|
||||
|
||||
dotenv.config({ path: path.resolve(__dirname, '../../', '.env') });
|
||||
let database: Knex | null = null;
|
||||
let inspector: ReturnType<typeof SchemaInspector> | null = null;
|
||||
|
||||
const connectionConfig: Record<string, any> = getConfigFromEnv('DB_', [
|
||||
'DB_CLIENT',
|
||||
'DB_SEARCH_PATH',
|
||||
'DB_CONNECTION_STRING',
|
||||
'DB_POOL',
|
||||
]);
|
||||
export default function getDatabase(): Knex {
|
||||
if (database) {
|
||||
return database;
|
||||
}
|
||||
|
||||
const poolConfig = getConfigFromEnv('DB_POOL');
|
||||
const connectionConfig: Record<string, any> = getConfigFromEnv('DB_', [
|
||||
'DB_CLIENT',
|
||||
'DB_SEARCH_PATH',
|
||||
'DB_CONNECTION_STRING',
|
||||
'DB_POOL',
|
||||
]);
|
||||
|
||||
validateEnv(['DB_CLIENT']);
|
||||
const poolConfig = getConfigFromEnv('DB_POOL');
|
||||
|
||||
const knexConfig: Knex.Config = {
|
||||
client: env.DB_CLIENT,
|
||||
searchPath: env.DB_SEARCH_PATH,
|
||||
connection: env.DB_CONNECTION_STRING || connectionConfig,
|
||||
log: {
|
||||
warn: (msg) => logger.warn(msg),
|
||||
error: (msg) => logger.error(msg),
|
||||
deprecate: (msg) => logger.info(msg),
|
||||
debug: (msg) => logger.debug(msg),
|
||||
},
|
||||
pool: poolConfig,
|
||||
};
|
||||
const requiredEnvVars = ['DB_CLIENT'];
|
||||
|
||||
if (env.DB_CLIENT === 'sqlite3') {
|
||||
knexConfig.useNullAsDefault = true;
|
||||
poolConfig.afterCreate = (conn: any, cb: any) => {
|
||||
conn.run('PRAGMA foreign_keys = ON', cb);
|
||||
if (env.DB_CLIENT && env.DB_CLIENT === 'sqlite3') {
|
||||
requiredEnvVars.push('DB_FILENAME');
|
||||
} else if (env.DB_CLIENT && env.DB_CLIENT === 'oracledb') {
|
||||
requiredEnvVars.push('DB_USER', 'DB_PASSWORD', 'DB_CONNECT_STRING');
|
||||
} else {
|
||||
if (env.DB_CLIENT === 'pg') {
|
||||
if (!env.DB_CONNECTION_STRING) {
|
||||
requiredEnvVars.push('DB_HOST', 'DB_PORT', 'DB_DATABASE', 'DB_USER');
|
||||
}
|
||||
} else {
|
||||
requiredEnvVars.push('DB_HOST', 'DB_PORT', 'DB_DATABASE', 'DB_USER', 'DB_PASSWORD');
|
||||
}
|
||||
}
|
||||
|
||||
validateEnv(requiredEnvVars);
|
||||
|
||||
const knexConfig: Knex.Config = {
|
||||
client: env.DB_CLIENT,
|
||||
searchPath: env.DB_SEARCH_PATH,
|
||||
connection: env.DB_CONNECTION_STRING || connectionConfig,
|
||||
log: {
|
||||
warn: (msg) => logger.warn(msg),
|
||||
error: (msg) => logger.error(msg),
|
||||
deprecate: (msg) => logger.info(msg),
|
||||
debug: (msg) => logger.debug(msg),
|
||||
},
|
||||
pool: poolConfig,
|
||||
};
|
||||
|
||||
if (env.DB_CLIENT === 'sqlite3') {
|
||||
knexConfig.useNullAsDefault = true;
|
||||
poolConfig.afterCreate = (conn: any, cb: any) => {
|
||||
conn.run('PRAGMA foreign_keys = ON', cb);
|
||||
};
|
||||
}
|
||||
|
||||
database = knex(knexConfig);
|
||||
|
||||
const times: Record<string, number> = {};
|
||||
|
||||
database
|
||||
.on('query', (queryInfo) => {
|
||||
times[queryInfo.__knexUid] = performance.now();
|
||||
})
|
||||
.on('query-response', (response, queryInfo) => {
|
||||
const delta = performance.now() - times[queryInfo.__knexUid];
|
||||
logger.trace(`[${delta.toFixed(3)}ms] ${queryInfo.sql} [${queryInfo.bindings.join(', ')}]`);
|
||||
delete times[queryInfo.__knexUid];
|
||||
});
|
||||
|
||||
return database;
|
||||
}
|
||||
|
||||
const database = knex(knexConfig);
|
||||
export function getSchemaInspector(): ReturnType<typeof SchemaInspector> {
|
||||
if (inspector) {
|
||||
return inspector;
|
||||
}
|
||||
|
||||
const times: Record<string, number> = {};
|
||||
const database = getDatabase();
|
||||
|
||||
database
|
||||
.on('query', (queryInfo) => {
|
||||
times[queryInfo.__knexUid] = performance.now();
|
||||
})
|
||||
.on('query-response', (response, queryInfo) => {
|
||||
const delta = performance.now() - times[queryInfo.__knexUid];
|
||||
logger.trace(`[${delta.toFixed(3)}ms] ${queryInfo.sql} [${queryInfo.bindings.join(', ')}]`);
|
||||
});
|
||||
inspector = SchemaInspector(database);
|
||||
|
||||
return inspector;
|
||||
}
|
||||
|
||||
export async function hasDatabaseConnection(): Promise<boolean> {
|
||||
const database = getDatabase();
|
||||
|
||||
try {
|
||||
if (env.DB_CLIENT === 'oracledb') {
|
||||
await database.raw('select 1 from DUAL');
|
||||
@@ -77,13 +115,11 @@ export async function validateDBConnection(): Promise<void> {
|
||||
}
|
||||
}
|
||||
|
||||
export const schemaInspector = SchemaInspector(database);
|
||||
|
||||
export async function isInstalled(): Promise<boolean> {
|
||||
const inspector = getSchemaInspector();
|
||||
|
||||
// The existence of a directus_collections table alone isn't a "proper" check to see if everything
|
||||
// is installed correctly of course, but it's safe enough to assume that this collection only
|
||||
// exists when using the installer CLI.
|
||||
return await schemaInspector.hasTable('directus_collections');
|
||||
return await inspector.hasTable('directus_collections');
|
||||
}
|
||||
|
||||
export default database;
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
import { Knex } from 'knex';
|
||||
import SchemaInspector from 'knex-schema-inspector';
|
||||
import { schemaInspector } from '..';
|
||||
import logger from '../../logger';
|
||||
import { RelationMeta } from '../../types';
|
||||
import { getDefaultIndexName } from '../../utils/get-default-index-name';
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const inspector = SchemaInspector(knex);
|
||||
|
||||
const foreignKeys = await inspector.foreignKeys();
|
||||
const relations = await knex
|
||||
.select<RelationMeta[]>('many_collection', 'many_field', 'one_collection')
|
||||
.select<RelationMeta[]>('id', 'many_collection', 'many_field', 'one_collection')
|
||||
.from('directus_relations');
|
||||
|
||||
const constraintsToAdd = relations.filter((relation) => {
|
||||
@@ -18,45 +19,82 @@ export async function up(knex: Knex): Promise<void> {
|
||||
return exists === false;
|
||||
});
|
||||
|
||||
await knex.transaction(async (trx) => {
|
||||
for (const constraint of constraintsToAdd) {
|
||||
if (!constraint.one_collection) continue;
|
||||
const corruptedRelations: number[] = [];
|
||||
|
||||
const currentPrimaryKeyField = await schemaInspector.primary(constraint.many_collection);
|
||||
const relatedPrimaryKeyField = await schemaInspector.primary(constraint.one_collection);
|
||||
if (!currentPrimaryKeyField || !relatedPrimaryKeyField) continue;
|
||||
for (const constraint of constraintsToAdd) {
|
||||
if (!constraint.one_collection) continue;
|
||||
|
||||
const rowsWithIllegalFKValues = await trx
|
||||
.select(`${constraint.many_collection}.${currentPrimaryKeyField}`)
|
||||
.from(constraint.many_collection)
|
||||
.leftJoin(
|
||||
constraint.one_collection,
|
||||
`${constraint.many_collection}.${constraint.many_field}`,
|
||||
`${constraint.one_collection}.${relatedPrimaryKeyField}`
|
||||
)
|
||||
.whereNull(`${constraint.one_collection}.${relatedPrimaryKeyField}`);
|
||||
if (
|
||||
(await inspector.hasTable(constraint.many_collection)) === false ||
|
||||
(await inspector.hasTable(constraint.one_collection)) === false
|
||||
) {
|
||||
logger.warn(
|
||||
`Ignoring ${constraint.many_collection}.${constraint.many_field}<->${constraint.one_collection}. Tables don't exist.`
|
||||
);
|
||||
|
||||
if (rowsWithIllegalFKValues.length > 0) {
|
||||
const ids: (string | number)[] = rowsWithIllegalFKValues.map<string | number>(
|
||||
(row) => row[currentPrimaryKeyField]
|
||||
);
|
||||
corruptedRelations.push(constraint.id);
|
||||
continue;
|
||||
}
|
||||
|
||||
await trx(constraint.many_collection)
|
||||
const currentPrimaryKeyField = await inspector.primary(constraint.many_collection);
|
||||
const relatedPrimaryKeyField = await inspector.primary(constraint.one_collection);
|
||||
|
||||
if (constraint.many_field === currentPrimaryKeyField) {
|
||||
logger.warn(
|
||||
`Illegal relationship ${constraint.many_collection}.${constraint.many_field}<->${constraint.one_collection} encountered. Many field equals collections primary key.`
|
||||
);
|
||||
corruptedRelations.push(constraint.id);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!currentPrimaryKeyField || !relatedPrimaryKeyField) continue;
|
||||
|
||||
const rowsWithIllegalFKValues = await knex
|
||||
.select(`main.${currentPrimaryKeyField}`)
|
||||
.from({ main: constraint.many_collection })
|
||||
.leftJoin(
|
||||
{ related: constraint.one_collection },
|
||||
`main.${constraint.many_field}`,
|
||||
`related.${relatedPrimaryKeyField}`
|
||||
)
|
||||
.whereNull(`related.${relatedPrimaryKeyField}`);
|
||||
|
||||
if (rowsWithIllegalFKValues.length > 0) {
|
||||
const ids: (string | number)[] = rowsWithIllegalFKValues.map<string | number>(
|
||||
(row) => row[currentPrimaryKeyField]
|
||||
);
|
||||
|
||||
try {
|
||||
await knex(constraint.many_collection)
|
||||
.update({ [constraint.many_field]: null })
|
||||
.whereIn(currentPrimaryKeyField, ids);
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`${constraint.many_collection}.${constraint.many_field} contains illegal foreign keys which couldn't be set to NULL. Please fix these references and rerun this migration to complete the upgrade.`
|
||||
);
|
||||
|
||||
if (ids.length < 25) {
|
||||
logger.error(`Items with illegal foreign keys: ${ids.join(', ')}`);
|
||||
} else {
|
||||
logger.error(`Items with illegal foreign keys: ${ids.slice(0, 25).join(', ')} and ${ids.length} others`);
|
||||
}
|
||||
|
||||
throw 'Migration aborted';
|
||||
}
|
||||
}
|
||||
|
||||
// Can't reliably have circular cascade
|
||||
const action = constraint.many_collection === constraint.one_collection ? 'NO ACTION' : 'SET NULL';
|
||||
// Can't reliably have circular cascade
|
||||
const action = constraint.many_collection === constraint.one_collection ? 'NO ACTION' : 'SET NULL';
|
||||
|
||||
// MySQL doesn't accept FKs from `int` to `int unsigned`. `knex` defaults `.increments()`
|
||||
// to `unsigned`, but defaults `.integer()` to `int`. This means that created m2o fields
|
||||
// have the wrong type. This step will force the m2o `int` field into `unsigned`, but only
|
||||
// if both types are integers, and only if we go from `int` to `int unsigned`.
|
||||
const columnInfo = await schemaInspector.columnInfo(constraint.many_collection, constraint.many_field);
|
||||
const relatedColumnInfo = await schemaInspector.columnInfo(constraint.one_collection!, relatedPrimaryKeyField);
|
||||
// MySQL doesn't accept FKs from `int` to `int unsigned`. `knex` defaults `.increments()`
|
||||
// to `unsigned`, but defaults `.integer()` to `int`. This means that created m2o fields
|
||||
// have the wrong type. This step will force the m2o `int` field into `unsigned`, but only
|
||||
// if both types are integers, and only if we go from `int` to `int unsigned`.
|
||||
const columnInfo = await inspector.columnInfo(constraint.many_collection, constraint.many_field);
|
||||
const relatedColumnInfo = await inspector.columnInfo(constraint.one_collection!, relatedPrimaryKeyField);
|
||||
|
||||
await trx.schema.alterTable(constraint.many_collection, (table) => {
|
||||
try {
|
||||
await knex.schema.alterTable(constraint.many_collection, (table) => {
|
||||
if (
|
||||
columnInfo.data_type !== relatedColumnInfo.data_type &&
|
||||
columnInfo.data_type === 'int' &&
|
||||
@@ -65,21 +103,48 @@ export async function up(knex: Knex): Promise<void> {
|
||||
table.specificType(constraint.many_field, 'int unsigned').alter();
|
||||
}
|
||||
|
||||
const indexName = getDefaultIndexName('foreign', constraint.many_collection, constraint.many_field);
|
||||
|
||||
table
|
||||
.foreign(constraint.many_field)
|
||||
.foreign(constraint.many_field, indexName)
|
||||
.references(relatedPrimaryKeyField)
|
||||
.inTable(constraint.one_collection!)
|
||||
.onDelete(action);
|
||||
});
|
||||
} catch (err) {
|
||||
logger.warn(
|
||||
`Couldn't add foreign key constraint for ${constraint.many_collection}.${constraint.many_field}<->${constraint.one_collection}`
|
||||
);
|
||||
logger.warn(err);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (corruptedRelations.length > 0) {
|
||||
logger.warn(
|
||||
`Encountered one or more corrupted relationships. Please check the following rows in "directus_relations": ${corruptedRelations.join(
|
||||
', '
|
||||
)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const relations = await knex.select<RelationMeta[]>('many_collection', 'many_field').from('directus_relations');
|
||||
const relations = await knex
|
||||
.select<RelationMeta[]>('many_collection', 'many_field', 'one_collection')
|
||||
.from('directus_relations');
|
||||
|
||||
for (const relation of relations) {
|
||||
await knex.schema.alterTable(relation.many_collection, (table) => {
|
||||
table.dropForeign([relation.many_field]);
|
||||
});
|
||||
if (!relation.one_collection) continue;
|
||||
|
||||
try {
|
||||
await knex.schema.alterTable(relation.many_collection, (table) => {
|
||||
table.dropForeign([relation.many_field]);
|
||||
});
|
||||
} catch (err) {
|
||||
logger.warn(
|
||||
`Couldn't drop foreign key constraint for ${relation.many_collection}.${relation.many_field}<->${relation.one_collection}`
|
||||
);
|
||||
logger.warn(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Knex } from 'knex';
|
||||
import logger from '../../logger';
|
||||
|
||||
/**
|
||||
* Things to keep in mind:
|
||||
@@ -80,22 +81,84 @@ const updates = [
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
for (const update of updates) {
|
||||
await knex.schema.alterTable(update.table, (table) => {
|
||||
for (const constraint of update.constraints) {
|
||||
table.dropForeign([constraint.column]);
|
||||
table.foreign(constraint.column).references(constraint.references).onDelete(constraint.on_delete);
|
||||
for (const constraint of update.constraints) {
|
||||
try {
|
||||
await knex.schema.alterTable(update.table, (table) => {
|
||||
table.dropForeign([constraint.column]);
|
||||
});
|
||||
} catch (err) {
|
||||
logger.warn(`Couldn't drop foreign key ${update.table}.${constraint.column}->${constraint.references}`);
|
||||
logger.warn(err);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* MySQL won't delete the index when you drop the foreign key constraint. Gotta make
|
||||
* sure to clean those up as well
|
||||
*/
|
||||
if (knex.client.constructor.name === 'Client_MySQL') {
|
||||
try {
|
||||
await knex.schema.alterTable(update.table, (table) => {
|
||||
// Knex uses a default convention for index names: `table_column_type`
|
||||
table.dropIndex([constraint.column], `${update.table}_${constraint.column}_foreign`);
|
||||
});
|
||||
} catch (err) {
|
||||
logger.warn(
|
||||
`Couldn't clean up index for foreign key ${update.table}.${constraint.column}->${constraint.references}`
|
||||
);
|
||||
logger.warn(err);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await knex.schema.alterTable(update.table, (table) => {
|
||||
table.foreign(constraint.column).references(constraint.references).onDelete(constraint.on_delete);
|
||||
});
|
||||
} catch (err) {
|
||||
logger.warn(`Couldn't add foreign key to ${update.table}.${constraint.column}->${constraint.references}`);
|
||||
logger.warn(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
for (const update of updates) {
|
||||
await knex.schema.alterTable(update.table, (table) => {
|
||||
for (const constraint of update.constraints) {
|
||||
table.dropForeign([constraint.column]);
|
||||
table.foreign(constraint.column).references(constraint.references);
|
||||
for (const constraint of update.constraints) {
|
||||
try {
|
||||
await knex.schema.alterTable(update.table, (table) => {
|
||||
table.dropForeign([constraint.column]);
|
||||
});
|
||||
} catch (err) {
|
||||
logger.warn(`Couldn't drop foreign key ${update.table}.${constraint.column}->${constraint.references}`);
|
||||
logger.warn(err);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* MySQL won't delete the index when you drop the foreign key constraint. Gotta make
|
||||
* sure to clean those up as well
|
||||
*/
|
||||
if (knex.client.constructor.name === 'Client_MySQL') {
|
||||
try {
|
||||
await knex.schema.alterTable(update.table, (table) => {
|
||||
// Knex uses a default convention for index names: `table_column_type`
|
||||
table.dropIndex([constraint.column], `${update.table}_${constraint.column}_foreign`);
|
||||
});
|
||||
} catch (err) {
|
||||
logger.warn(
|
||||
`Couldn't clean up index for foreign key ${update.table}.${constraint.column}->${constraint.references}`
|
||||
);
|
||||
logger.warn(err);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await knex.schema.alterTable(update.table, (table) => {
|
||||
table.foreign(constraint.column).references(constraint.references);
|
||||
});
|
||||
} catch (err) {
|
||||
logger.warn(`Couldn't add foreign key to ${update.table}.${constraint.column}->${constraint.references}`);
|
||||
logger.warn(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
import { Knex } from 'knex';
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable('directus_collections', (table) => {
|
||||
table.string('color').nullable();
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable('directus_collections', (table) => {
|
||||
table.dropColumn('color');
|
||||
});
|
||||
}
|
||||
@@ -5,7 +5,7 @@ import { Item, Query, SchemaOverview } from '../types';
|
||||
import { AST, FieldNode, NestedCollectionNode } from '../types/ast';
|
||||
import applyQuery from '../utils/apply-query';
|
||||
import { toArray } from '../utils/to-array';
|
||||
import database from './index';
|
||||
import getDatabase from './index';
|
||||
|
||||
type RunASTOptions = {
|
||||
/**
|
||||
@@ -39,7 +39,7 @@ export default async function runAST(
|
||||
): Promise<null | Item | Item[]> {
|
||||
const ast = cloneDeep(originalAST);
|
||||
|
||||
const knex = options?.knex || database;
|
||||
const knex = options?.knex || getDatabase();
|
||||
|
||||
if (ast.type === 'm2a') {
|
||||
const results: { [collection: string]: null | Item | Item[] } = {};
|
||||
@@ -295,7 +295,7 @@ function mergeWithParentItems(
|
||||
});
|
||||
|
||||
// We re-apply the requested limit here. This forces the _n_ nested items per parent concept
|
||||
if (nested) {
|
||||
if (nested && nestedNode.query.limit !== -1) {
|
||||
itemChildren = itemChildren.slice(0, nestedNode.query.limit ?? 100);
|
||||
}
|
||||
|
||||
|
||||
@@ -18,16 +18,22 @@ fields:
|
||||
readonly: true
|
||||
width: half
|
||||
|
||||
- field: note
|
||||
interface: input
|
||||
options:
|
||||
placeholder: A description of this collection...
|
||||
width: half
|
||||
|
||||
- field: icon
|
||||
interface: select-icon
|
||||
options:
|
||||
width: half
|
||||
|
||||
- field: note
|
||||
interface: input
|
||||
- field: color
|
||||
interface: select-color
|
||||
options:
|
||||
placeholder: A description of this collection...
|
||||
width: full
|
||||
placeholder: Choose a color...
|
||||
width: half
|
||||
|
||||
- field: display_template
|
||||
interface: system-display-template
|
||||
|
||||
@@ -71,7 +71,7 @@ const defaults: Record<string, any> = {
|
||||
// Allows us to force certain environment variable into a type, instead of relying
|
||||
// on the auto-parsed type in processValues. ref #3705
|
||||
const typeMap: Record<string, string> = {
|
||||
PORT: 'number',
|
||||
PORT: 'string',
|
||||
|
||||
DB_NAME: 'string',
|
||||
DB_USER: 'string',
|
||||
@@ -92,6 +92,22 @@ env = processValues(env);
|
||||
|
||||
export default env;
|
||||
|
||||
/**
|
||||
* When changes have been made during runtime, like in the CLI, we can refresh the env object with
|
||||
* the newly created variables
|
||||
*/
|
||||
export function refreshEnv(): void {
|
||||
env = {
|
||||
...defaults,
|
||||
...getEnv(),
|
||||
...process.env,
|
||||
};
|
||||
|
||||
process.env = env;
|
||||
|
||||
env = processValues(env);
|
||||
}
|
||||
|
||||
function getEnv() {
|
||||
const configPath = path.resolve(process.env.CONFIG_PATH || defaults.CONFIG_PATH);
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import database from '../../../database';
|
||||
import getDatabase from '../../../database';
|
||||
import { ContainsNullValuesException } from '../contains-null-values';
|
||||
import { InvalidForeignKeyException } from '../invalid-foreign-key';
|
||||
import { NotNullViolationException } from '../not-null-violation';
|
||||
@@ -56,6 +56,8 @@ async function uniqueViolation(error: MSSQLError) {
|
||||
|
||||
const keyName = quoteMatches[1];
|
||||
|
||||
const database = getDatabase();
|
||||
|
||||
const constraintUsage = await database
|
||||
.select('*')
|
||||
.from('INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE')
|
||||
|
||||
@@ -43,28 +43,51 @@ function uniqueViolation(error: MySQLError) {
|
||||
|
||||
if (!matches) return error;
|
||||
|
||||
const collection = matches[1].slice(1, -1).split('.')[0];
|
||||
|
||||
let field = null;
|
||||
|
||||
/**
|
||||
* MySQL's error doesn't return the field name in the error. In case the field is created through
|
||||
* Directus (/ Knex), the key name will be `<collection>_<field>_unique` in which case we can pull
|
||||
* the field name from the key name
|
||||
*/
|
||||
const indexName = matches[1].slice(1, -1).split('.')[1];
|
||||
|
||||
if (indexName?.startsWith(`${collection}_`) && indexName.endsWith('_unique')) {
|
||||
field = indexName.slice(collection.length + 1, -7);
|
||||
/** MySQL 8+ style error message */
|
||||
if (matches[1].includes('.')) {
|
||||
const collection = matches[1].slice(1, -1).split('.')[0];
|
||||
|
||||
let field = null;
|
||||
|
||||
const indexName = matches[1].slice(1, -1).split('.')[1];
|
||||
|
||||
if (indexName?.startsWith(`${collection}_`) && indexName.endsWith('_unique')) {
|
||||
field = indexName.slice(collection.length + 1, -7);
|
||||
}
|
||||
|
||||
const invalid = matches[0].slice(1, -1);
|
||||
|
||||
return new RecordNotUniqueException(field, {
|
||||
collection,
|
||||
field,
|
||||
invalid,
|
||||
});
|
||||
} else {
|
||||
/** MySQL 5.7 style error message */
|
||||
const indexName = matches[1].slice(1, -1);
|
||||
|
||||
const collection = indexName.split('_')[0];
|
||||
|
||||
let field = null;
|
||||
|
||||
if (indexName?.startsWith(`${collection}_`) && indexName.endsWith('_unique')) {
|
||||
field = indexName.slice(collection.length + 1, -7);
|
||||
}
|
||||
|
||||
const invalid = matches[0].slice(1, -1);
|
||||
|
||||
return new RecordNotUniqueException(field, {
|
||||
collection,
|
||||
field,
|
||||
invalid,
|
||||
});
|
||||
}
|
||||
|
||||
const invalid = matches[0].slice(1, -1);
|
||||
|
||||
return new RecordNotUniqueException(field, {
|
||||
collection,
|
||||
field,
|
||||
invalid,
|
||||
});
|
||||
}
|
||||
|
||||
function numericValueOutOfRange(error: MySQLError) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import database from '../../database';
|
||||
import getDatabase from '../../database';
|
||||
import { extractError as mssql } from './dialects/mssql';
|
||||
import { extractError as mysql } from './dialects/mysql';
|
||||
import { extractError as oracle } from './dialects/oracle';
|
||||
@@ -16,6 +16,8 @@ import { SQLError } from './dialects/types';
|
||||
* - Value Too Long
|
||||
*/
|
||||
export async function translateDatabaseError(error: SQLError): Promise<any> {
|
||||
const database = getDatabase();
|
||||
|
||||
switch (database.client.constructor.name) {
|
||||
case 'Client_MySQL':
|
||||
return mysql(error);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import express, { Router } from 'express';
|
||||
import { ensureDir } from 'fs-extra';
|
||||
import path from 'path';
|
||||
import database from './database';
|
||||
import getDatabase from './database';
|
||||
import emitter from './emitter';
|
||||
import env from './env';
|
||||
import * as exceptions from './exceptions';
|
||||
@@ -93,7 +93,7 @@ function registerHooks(hooks: string[]) {
|
||||
}
|
||||
}
|
||||
|
||||
const events = register({ services, exceptions, env, database, getSchema });
|
||||
const events = register({ services, exceptions, env, database: getDatabase(), getSchema });
|
||||
for (const [event, handler] of Object.entries(events)) {
|
||||
emitter.on(event, handler);
|
||||
}
|
||||
@@ -126,6 +126,6 @@ function registerEndpoints(endpoints: string[], router: Router) {
|
||||
const scopedRouter = express.Router();
|
||||
router.use(`/${endpoint}/`, scopedRouter);
|
||||
|
||||
register(scopedRouter, { services, exceptions, env, database, getSchema });
|
||||
register(scopedRouter, { services, exceptions, env, database: getDatabase(), getSchema });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { RequestHandler } from 'express';
|
||||
import jwt, { JsonWebTokenError, TokenExpiredError } from 'jsonwebtoken';
|
||||
import database from '../database';
|
||||
import getDatabase from '../database';
|
||||
import env from '../env';
|
||||
import { InvalidCredentialsException } from '../exceptions';
|
||||
import asyncHandler from '../utils/async-handler';
|
||||
@@ -21,6 +21,8 @@ const authenticate: RequestHandler = asyncHandler(async (req, res, next) => {
|
||||
|
||||
if (!req.token) return next();
|
||||
|
||||
const database = getDatabase();
|
||||
|
||||
if (isJWT(req.token)) {
|
||||
let payload: { id: string };
|
||||
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import { RequestHandler } from 'express';
|
||||
import database from '../database';
|
||||
import getDatabase from '../database';
|
||||
import { InvalidIPException } from '../exceptions';
|
||||
import asyncHandler from '../utils/async-handler';
|
||||
|
||||
export const checkIP: RequestHandler = asyncHandler(async (req, res, next) => {
|
||||
const database = getDatabase();
|
||||
|
||||
const role = await database
|
||||
.select('ip_access')
|
||||
.from('directus_roles')
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import expressSession, { Store } from 'express-session';
|
||||
import env from '../env';
|
||||
import { getConfigFromEnv } from '../utils/get-config-from-env';
|
||||
import database from '../database';
|
||||
import getDatabase from '../database';
|
||||
let store: Store | undefined = undefined;
|
||||
|
||||
if (env.SESSION_STORE === 'redis') {
|
||||
@@ -20,7 +20,7 @@ if (env.SESSION_STORE === 'memcache') {
|
||||
if (env.SESSION_STORE === 'database') {
|
||||
const KnexSessionStore = require('connect-session-knex')(expressSession);
|
||||
store = new KnexSessionStore({
|
||||
knex: database,
|
||||
knex: getDatabase(),
|
||||
tablename: 'oauth_sessions', // optional. Defaults to 'sessions'
|
||||
});
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ import { once } from 'lodash';
|
||||
import qs from 'qs';
|
||||
import url from 'url';
|
||||
import createApp from './app';
|
||||
import database from './database';
|
||||
import getDatabase from './database';
|
||||
import { emitAsyncSafe } from './emitter';
|
||||
import logger from './logger';
|
||||
|
||||
@@ -94,6 +94,7 @@ export default async function createServer(): Promise<http.Server> {
|
||||
}
|
||||
|
||||
async function onSignal() {
|
||||
const database = getDatabase();
|
||||
await database.destroy();
|
||||
logger.info('Database connections destroyed');
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Range, StatResponse } from '@directus/drive';
|
||||
import { Knex } from 'knex';
|
||||
import path from 'path';
|
||||
import sharp, { ResizeOptions } from 'sharp';
|
||||
import database from '../database';
|
||||
import getDatabase from '../database';
|
||||
import { RangeNotSatisfiableException, IllegalAssetTransformation } from '../exceptions';
|
||||
import storage from '../storage';
|
||||
import { AbstractServiceOptions, Accountability, Transformation } from '../types';
|
||||
@@ -23,7 +23,7 @@ export class AssetsService {
|
||||
authorizationService: AuthorizationService;
|
||||
|
||||
constructor(options: AbstractServiceOptions) {
|
||||
this.knex = options.knex || database;
|
||||
this.knex = options.knex || getDatabase();
|
||||
this.accountability = options.accountability || null;
|
||||
this.authorizationService = new AuthorizationService(options);
|
||||
}
|
||||
@@ -44,7 +44,7 @@ export class AssetsService {
|
||||
await this.authorizationService.checkAccess('read', 'directus_files', id);
|
||||
}
|
||||
|
||||
const file = (await database.select('*').from('directus_files').where({ id }).first()) as File;
|
||||
const file = (await this.knex.select('*').from('directus_files').where({ id }).first()) as File;
|
||||
|
||||
if (range) {
|
||||
if (range.start >= file.filesize || (range.end && range.end >= file.filesize)) {
|
||||
|
||||
@@ -4,7 +4,7 @@ import { Knex } from 'knex';
|
||||
import ms from 'ms';
|
||||
import { nanoid } from 'nanoid';
|
||||
import { authenticator } from 'otplib';
|
||||
import database from '../database';
|
||||
import getDatabase from '../database';
|
||||
import emitter, { emitAsyncSafe } from '../emitter';
|
||||
import env from '../env';
|
||||
import {
|
||||
@@ -37,7 +37,7 @@ export class AuthenticationService {
|
||||
schema: SchemaOverview;
|
||||
|
||||
constructor(options: AbstractServiceOptions) {
|
||||
this.knex = options.knex || database;
|
||||
this.knex = options.knex || getDatabase();
|
||||
this.accountability = options.accountability || null;
|
||||
this.activityService = new ActivityService({ knex: this.knex, schema: options.schema });
|
||||
this.schema = options.schema;
|
||||
@@ -59,7 +59,7 @@ export class AuthenticationService {
|
||||
|
||||
const { email, password, ip, userAgent, otp } = options;
|
||||
|
||||
let user = await database
|
||||
let user = await this.knex
|
||||
.select('id', 'password', 'role', 'tfa_secret', 'status')
|
||||
.from('directus_users')
|
||||
.whereRaw('LOWER(??) = ?', ['email', email.toLowerCase()])
|
||||
@@ -114,7 +114,7 @@ export class AuthenticationService {
|
||||
try {
|
||||
await loginAttemptsLimiter.consume(user.id);
|
||||
} catch (err) {
|
||||
await database('directus_users').update({ status: 'suspended' }).where({ id: user.id });
|
||||
await this.knex('directus_users').update({ status: 'suspended' }).where({ id: user.id });
|
||||
user.status = 'suspended';
|
||||
|
||||
// This means that new attempts after the user has been re-activated will be accepted
|
||||
@@ -164,7 +164,7 @@ export class AuthenticationService {
|
||||
const refreshToken = nanoid(64);
|
||||
const refreshTokenExpiration = new Date(Date.now() + ms(env.REFRESH_TOKEN_TTL as string));
|
||||
|
||||
await database('directus_sessions').insert({
|
||||
await this.knex('directus_sessions').insert({
|
||||
token: refreshToken,
|
||||
user: user.id,
|
||||
expires: refreshTokenExpiration,
|
||||
@@ -172,7 +172,7 @@ export class AuthenticationService {
|
||||
user_agent: userAgent,
|
||||
});
|
||||
|
||||
await database('directus_sessions').delete().where('expires', '<', new Date());
|
||||
await this.knex('directus_sessions').delete().where('expires', '<', new Date());
|
||||
|
||||
if (this.accountability) {
|
||||
await this.activityService.createOne({
|
||||
@@ -204,7 +204,7 @@ export class AuthenticationService {
|
||||
throw new InvalidCredentialsException();
|
||||
}
|
||||
|
||||
const record = await database
|
||||
const record = await this.knex
|
||||
.select<Session & { email: string; id: string }>(
|
||||
'directus_sessions.*',
|
||||
'directus_users.email',
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Knex } from 'knex';
|
||||
import { cloneDeep, flatten, merge, uniq, uniqWith } from 'lodash';
|
||||
import database from '../database';
|
||||
import getDatabase from '../database';
|
||||
import { FailedValidationException, ForbiddenException } from '../exceptions';
|
||||
import {
|
||||
AbstractServiceOptions,
|
||||
@@ -28,7 +28,7 @@ export class AuthorizationService {
|
||||
schema: SchemaOverview;
|
||||
|
||||
constructor(options: AbstractServiceOptions) {
|
||||
this.knex = options.knex || database;
|
||||
this.knex = options.knex || getDatabase();
|
||||
this.accountability = options.accountability || null;
|
||||
this.schema = options.schema;
|
||||
this.payloadService = new PayloadService('directus_permissions', {
|
||||
|
||||
@@ -2,7 +2,7 @@ import SchemaInspector from '@directus/schema';
|
||||
import { Knex } from 'knex';
|
||||
import cache from '../cache';
|
||||
import { ALIAS_TYPES } from '../constants';
|
||||
import database, { schemaInspector } from '../database';
|
||||
import getDatabase, { getSchemaInspector } from '../database';
|
||||
import { systemCollectionRows } from '../database/system-data/collections';
|
||||
import env from '../env';
|
||||
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
|
||||
@@ -27,13 +27,13 @@ export type RawCollection = {
|
||||
export class CollectionsService {
|
||||
knex: Knex;
|
||||
accountability: Accountability | null;
|
||||
schemaInspector: typeof schemaInspector;
|
||||
schemaInspector: ReturnType<typeof SchemaInspector>;
|
||||
schema: SchemaOverview;
|
||||
|
||||
constructor(options: AbstractServiceOptions) {
|
||||
this.knex = options.knex || database;
|
||||
this.knex = options.knex || getDatabase();
|
||||
this.accountability = options.accountability || null;
|
||||
this.schemaInspector = options.knex ? SchemaInspector(options.knex) : schemaInspector;
|
||||
this.schemaInspector = options.knex ? SchemaInspector(options.knex) : getSchemaInspector();
|
||||
this.schema = options.schema;
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Knex } from 'knex';
|
||||
import { Column } from 'knex-schema-inspector/dist/types/column';
|
||||
import cache from '../cache';
|
||||
import { ALIAS_TYPES } from '../constants';
|
||||
import database, { schemaInspector } from '../database';
|
||||
import getDatabase, { getSchemaInspector } from '../database';
|
||||
import { systemFieldRows } from '../database/system-data/fields/';
|
||||
import emitter, { emitAsyncSafe } from '../emitter';
|
||||
import env from '../env';
|
||||
@@ -26,12 +26,12 @@ export class FieldsService {
|
||||
accountability: Accountability | null;
|
||||
itemsService: ItemsService;
|
||||
payloadService: PayloadService;
|
||||
schemaInspector: typeof schemaInspector;
|
||||
schemaInspector: ReturnType<typeof SchemaInspector>;
|
||||
schema: SchemaOverview;
|
||||
|
||||
constructor(options: AbstractServiceOptions) {
|
||||
this.knex = options.knex || database;
|
||||
this.schemaInspector = options.knex ? SchemaInspector(options.knex) : schemaInspector;
|
||||
this.knex = options.knex || getDatabase();
|
||||
this.schemaInspector = options.knex ? SchemaInspector(options.knex) : getSchemaInspector();
|
||||
this.accountability = options.accountability || null;
|
||||
this.itemsService = new ItemsService('directus_fields', options);
|
||||
this.payloadService = new PayloadService('directus_fields', options);
|
||||
|
||||
@@ -102,8 +102,9 @@ export class FilesService extends ItemsService {
|
||||
if (meta.iptc) {
|
||||
try {
|
||||
payload.metadata.iptc = parseIPTC(meta.iptc);
|
||||
payload.title = payload.title || payload.metadata.iptc.headline;
|
||||
payload.title = payload.metadata.iptc.headline || payload.title;
|
||||
payload.description = payload.description || payload.metadata.iptc.caption;
|
||||
payload.tags = payload.metadata.iptc.keywords;
|
||||
} catch (err) {
|
||||
logger.warn(`Couldn't extract IPTC information from file`);
|
||||
logger.warn(err);
|
||||
|
||||
@@ -44,7 +44,7 @@ import {
|
||||
import { Knex } from 'knex';
|
||||
import { flatten, get, mapKeys, merge, set, uniq } from 'lodash';
|
||||
import ms from 'ms';
|
||||
import database from '../database';
|
||||
import getDatabase from '../database';
|
||||
import env from '../env';
|
||||
import { BaseException, GraphQLValidationException, InvalidPayloadException } from '../exceptions';
|
||||
import { listExtensions } from '../extensions';
|
||||
@@ -115,7 +115,7 @@ export class GraphQLService {
|
||||
|
||||
constructor(options: AbstractServiceOptions & { scope: 'items' | 'system' }) {
|
||||
this.accountability = options?.accountability || null;
|
||||
this.knex = options?.knex || database;
|
||||
this.knex = options?.knex || getDatabase();
|
||||
this.schema = options.schema;
|
||||
this.scope = options.scope;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Knex } from 'knex';
|
||||
import database from '../database';
|
||||
import getDatabase from '../database';
|
||||
import { AbstractServiceOptions, Accountability, SchemaOverview } from '../types';
|
||||
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
|
||||
import StreamArray from 'stream-json/streamers/StreamArray';
|
||||
@@ -15,7 +15,7 @@ export class ImportService {
|
||||
schema: SchemaOverview;
|
||||
|
||||
constructor(options: AbstractServiceOptions) {
|
||||
this.knex = options.knex || database;
|
||||
this.knex = options.knex || getDatabase();
|
||||
this.accountability = options.accountability || null;
|
||||
this.schema = options.schema;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Knex } from 'knex';
|
||||
import { clone, cloneDeep, merge, pick, without } from 'lodash';
|
||||
import cache from '../cache';
|
||||
import database from '../database';
|
||||
import getDatabase from '../database';
|
||||
import runAST from '../database/run-ast';
|
||||
import emitter, { emitAsyncSafe } from '../emitter';
|
||||
import env from '../env';
|
||||
@@ -55,7 +55,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
|
||||
constructor(collection: string, options: AbstractServiceOptions) {
|
||||
this.collection = collection;
|
||||
this.knex = options.knex || database;
|
||||
this.knex = options.knex || getDatabase();
|
||||
this.accountability = options.accountability || null;
|
||||
this.eventScope = this.collection.startsWith('directus_') ? this.collection.substring(9) : 'items';
|
||||
this.schema = options.schema;
|
||||
@@ -204,7 +204,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
schema: this.schema,
|
||||
// This hook is called async. If we would pass the transaction here, the hook can be
|
||||
// called after the transaction is done #5460
|
||||
database: database,
|
||||
database: getDatabase(),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -516,7 +516,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
schema: this.schema,
|
||||
// This hook is called async. If we would pass the transaction here, the hook can be
|
||||
// called after the transaction is done #5460
|
||||
database: database,
|
||||
database: getDatabase(),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -665,7 +665,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
schema: this.schema,
|
||||
// This hook is called async. If we would pass the transaction here, the hook can be
|
||||
// called after the transaction is done #5460
|
||||
database: database,
|
||||
database: getDatabase(),
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import fse from 'fs-extra';
|
||||
import { Knex } from 'knex';
|
||||
import { Liquid } from 'liquidjs';
|
||||
import path from 'path';
|
||||
import database from '../../database';
|
||||
import getDatabase from '../../database';
|
||||
import env from '../../env';
|
||||
import { InvalidPayloadException } from '../../exceptions';
|
||||
import logger from '../../logger';
|
||||
@@ -30,7 +30,7 @@ export class MailService {
|
||||
constructor(opts: AbstractServiceOptions) {
|
||||
this.schema = opts.schema;
|
||||
this.accountability = opts.accountability || null;
|
||||
this.knex = opts?.knex || database;
|
||||
this.knex = opts?.knex || getDatabase();
|
||||
}
|
||||
|
||||
async send(options: EmailOptions): Promise<void> {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Knex } from 'knex';
|
||||
import database from '../database';
|
||||
import getDatabase from '../database';
|
||||
import { ForbiddenException } from '../exceptions';
|
||||
import { AbstractServiceOptions, Accountability, SchemaOverview } from '../types';
|
||||
import { Query } from '../types/query';
|
||||
@@ -12,7 +12,7 @@ export class MetaService {
|
||||
schema: SchemaOverview;
|
||||
|
||||
constructor(options: AbstractServiceOptions) {
|
||||
this.knex = options.knex || database;
|
||||
this.knex = options.knex || getDatabase();
|
||||
this.accountability = options.accountability || null;
|
||||
this.schema = options.schema;
|
||||
}
|
||||
|
||||
@@ -2,9 +2,9 @@ import argon2 from 'argon2';
|
||||
import { format, formatISO, parse, parseISO } from 'date-fns';
|
||||
import Joi from 'joi';
|
||||
import { Knex } from 'knex';
|
||||
import { clone, cloneDeep, isObject, isPlainObject } from 'lodash';
|
||||
import { clone, cloneDeep, isObject, isPlainObject, omit } from 'lodash';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import database from '../database';
|
||||
import getDatabase from '../database';
|
||||
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
|
||||
import { AbstractServiceOptions, Accountability, Item, PrimaryKey, Query, SchemaOverview } from '../types';
|
||||
import { toArray } from '../utils/to-array';
|
||||
@@ -43,7 +43,7 @@ export class PayloadService {
|
||||
|
||||
constructor(collection: string, options: AbstractServiceOptions) {
|
||||
this.accountability = options.accountability || null;
|
||||
this.knex = options.knex || database;
|
||||
this.knex = options.knex || getDatabase();
|
||||
this.collection = collection;
|
||||
this.schema = options.schema;
|
||||
|
||||
@@ -331,7 +331,13 @@ export class PayloadService {
|
||||
.first());
|
||||
|
||||
if (exists) {
|
||||
await itemsService.updateOne(relatedPrimaryKey, relatedRecord);
|
||||
const fieldsToUpdate = omit(relatedRecord, relatedPrimary);
|
||||
|
||||
if (Object.keys(fieldsToUpdate).length > 0) {
|
||||
await itemsService.updateOne(relatedPrimaryKey, relatedRecord, {
|
||||
onRevisionCreate: (id) => revisions.push(id),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
relatedPrimaryKey = await itemsService.createOne(relatedRecord, {
|
||||
onRevisionCreate: (id) => revisions.push(id),
|
||||
@@ -393,9 +399,13 @@ export class PayloadService {
|
||||
.first());
|
||||
|
||||
if (exists) {
|
||||
await itemsService.updateOne(relatedPrimaryKey, relatedRecord, {
|
||||
onRevisionCreate: (id) => revisions.push(id),
|
||||
});
|
||||
const fieldsToUpdate = omit(relatedRecord, relatedPrimaryKeyField);
|
||||
|
||||
if (Object.keys(fieldsToUpdate).length > 0) {
|
||||
await itemsService.updateOne(relatedPrimaryKey, relatedRecord, {
|
||||
onRevisionCreate: (id) => revisions.push(id),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
relatedPrimaryKey = await itemsService.createOne(relatedRecord, {
|
||||
onRevisionCreate: (id) => revisions.push(id),
|
||||
|
||||
@@ -7,20 +7,21 @@ import { ItemsService, QueryOptions } from './items';
|
||||
import { PermissionsService } from './permissions';
|
||||
import SchemaInspector from '@directus/schema';
|
||||
import { ForeignKey } from 'knex-schema-inspector/dist/types/foreign-key';
|
||||
import database, { schemaInspector } from '../database';
|
||||
import getDatabase, { getSchemaInspector } from '../database';
|
||||
import { getDefaultIndexName } from '../utils/get-default-index-name';
|
||||
|
||||
export class RelationsService {
|
||||
knex: Knex;
|
||||
permissionsService: PermissionsService;
|
||||
schemaInspector: typeof schemaInspector;
|
||||
schemaInspector: ReturnType<typeof SchemaInspector>;
|
||||
accountability: Accountability | null;
|
||||
schema: SchemaOverview;
|
||||
relationsItemService: ItemsService<RelationMeta>;
|
||||
|
||||
constructor(options: AbstractServiceOptions) {
|
||||
this.knex = options.knex || database;
|
||||
this.knex = options.knex || getDatabase();
|
||||
this.permissionsService = new PermissionsService(options);
|
||||
this.schemaInspector = options.knex ? SchemaInspector(options.knex) : schemaInspector;
|
||||
this.schemaInspector = options.knex ? SchemaInspector(options.knex) : getSchemaInspector();
|
||||
this.schema = options.schema;
|
||||
this.accountability = options.accountability || null;
|
||||
this.relationsItemService = new ItemsService('directus_relations', {
|
||||
@@ -159,8 +160,10 @@ export class RelationsService {
|
||||
await trx.schema.alterTable(relation.collection!, async (table) => {
|
||||
this.alterType(table, relation);
|
||||
|
||||
const constraintName: string = getDefaultIndexName('foreign', relation.collection!, relation.field!);
|
||||
|
||||
table
|
||||
.foreign(relation.field!)
|
||||
.foreign(relation.field!, constraintName)
|
||||
.references(
|
||||
`${relation.related_collection!}.${this.schema.collections[relation.related_collection!].primary}`
|
||||
)
|
||||
@@ -168,7 +171,15 @@ export class RelationsService {
|
||||
});
|
||||
}
|
||||
|
||||
await this.relationsItemService.createOne(metaRow);
|
||||
const relationsItemService = new ItemsService('directus_relations', {
|
||||
knex: trx,
|
||||
schema: this.schema,
|
||||
// We don't set accountability here. If you have read access to certain fields, you are
|
||||
// allowed to extract the relations regardless of permissions to directus_relations. This
|
||||
// happens in `filterForbidden` down below
|
||||
});
|
||||
|
||||
await relationsItemService.createOne(metaRow);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -201,15 +212,18 @@ export class RelationsService {
|
||||
await this.knex.transaction(async (trx) => {
|
||||
if (existingRelation.related_collection) {
|
||||
await trx.schema.alterTable(collection, async (table) => {
|
||||
let constraintName: string = getDefaultIndexName('foreign', collection, field);
|
||||
|
||||
// If the FK already exists in the DB, drop it first
|
||||
if (existingRelation?.schema) {
|
||||
table.dropForeign(field);
|
||||
constraintName = existingRelation.schema.constraint_name || constraintName;
|
||||
table.dropForeign(field, constraintName);
|
||||
}
|
||||
|
||||
this.alterType(table, relation);
|
||||
|
||||
table
|
||||
.foreign(field)
|
||||
.foreign(field, constraintName || undefined)
|
||||
.references(
|
||||
`${existingRelation.related_collection!}.${
|
||||
this.schema.collections[existingRelation.related_collection!].primary
|
||||
@@ -219,11 +233,19 @@ export class RelationsService {
|
||||
});
|
||||
}
|
||||
|
||||
const relationsItemService = new ItemsService('directus_relations', {
|
||||
knex: trx,
|
||||
schema: this.schema,
|
||||
// We don't set accountability here. If you have read access to certain fields, you are
|
||||
// allowed to extract the relations regardless of permissions to directus_relations. This
|
||||
// happens in `filterForbidden` down below
|
||||
});
|
||||
|
||||
if (relation.meta) {
|
||||
if (existingRelation?.meta) {
|
||||
await this.relationsItemService.updateOne(existingRelation.meta.id, relation.meta);
|
||||
await relationsItemService.updateOne(existingRelation.meta.id, relation.meta);
|
||||
} else {
|
||||
await this.relationsItemService.createOne({
|
||||
await relationsItemService.createOne({
|
||||
...(relation.meta || {}),
|
||||
many_collection: relation.collection,
|
||||
many_field: relation.field,
|
||||
@@ -259,9 +281,9 @@ export class RelationsService {
|
||||
}
|
||||
|
||||
await this.knex.transaction(async (trx) => {
|
||||
if (existingRelation.schema) {
|
||||
if (existingRelation.schema?.constraint_name) {
|
||||
await trx.schema.alterTable(existingRelation.collection, (table) => {
|
||||
table.dropForeign(existingRelation.field);
|
||||
table.dropForeign(existingRelation.field, existingRelation.schema!.constraint_name!);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ import { performance } from 'perf_hooks';
|
||||
// @ts-ignore
|
||||
import { version } from '../../package.json';
|
||||
import cache from '../cache';
|
||||
import database, { hasDatabaseConnection } from '../database';
|
||||
import getDatabase, { hasDatabaseConnection } from '../database';
|
||||
import env from '../env';
|
||||
import logger from '../logger';
|
||||
import { rateLimiter } from '../middleware/rate-limiter';
|
||||
@@ -24,7 +24,7 @@ export class ServerService {
|
||||
schema: SchemaOverview;
|
||||
|
||||
constructor(options: AbstractServiceOptions) {
|
||||
this.knex = options.knex || database;
|
||||
this.knex = options.knex || getDatabase();
|
||||
this.accountability = options.accountability || null;
|
||||
this.schema = options.schema;
|
||||
this.settingsService = new SettingsService({ knex: this.knex, schema: this.schema });
|
||||
@@ -129,6 +129,7 @@ export class ServerService {
|
||||
}
|
||||
|
||||
async function testDatabase(): Promise<Record<string, HealthCheck[]>> {
|
||||
const database = getDatabase();
|
||||
const client = env.DB_CLIENT;
|
||||
|
||||
const checks: Record<string, HealthCheck[]> = {};
|
||||
|
||||
@@ -5,7 +5,7 @@ import { cloneDeep, mergeWith } from 'lodash';
|
||||
import { OpenAPIObject, OperationObject, PathItemObject, SchemaObject, TagObject } from 'openapi3-ts';
|
||||
// @ts-ignore
|
||||
import { version } from '../../package.json';
|
||||
import database from '../database';
|
||||
import getDatabase from '../database';
|
||||
import env from '../env';
|
||||
import {
|
||||
AbstractServiceOptions,
|
||||
@@ -37,7 +37,7 @@ export class SpecificationService {
|
||||
|
||||
constructor(options: AbstractServiceOptions) {
|
||||
this.accountability = options.accountability || null;
|
||||
this.knex = options.knex || database;
|
||||
this.knex = options.knex || getDatabase();
|
||||
this.schema = options.schema;
|
||||
|
||||
this.fieldsService = new FieldsService(options);
|
||||
@@ -80,7 +80,7 @@ class OASSpecsService implements SpecificationSubService {
|
||||
}
|
||||
) {
|
||||
this.accountability = options.accountability || null;
|
||||
this.knex = options.knex || database;
|
||||
this.knex = options.knex || getDatabase();
|
||||
this.schema = options.schema;
|
||||
|
||||
this.fieldsService = fieldsService;
|
||||
@@ -541,7 +541,7 @@ class GraphQLSpecsService implements SpecificationSubService {
|
||||
|
||||
constructor(options: AbstractServiceOptions) {
|
||||
this.accountability = options.accountability || null;
|
||||
this.knex = options.knex || database;
|
||||
this.knex = options.knex || getDatabase();
|
||||
this.schema = options.schema;
|
||||
|
||||
this.items = new GraphQLService({ ...options, scope: 'items' });
|
||||
|
||||
@@ -3,7 +3,7 @@ import jwt from 'jsonwebtoken';
|
||||
import { Knex } from 'knex';
|
||||
import { clone } from 'lodash';
|
||||
import cache from '../cache';
|
||||
import database from '../database';
|
||||
import getDatabase from '../database';
|
||||
import env from '../env';
|
||||
import {
|
||||
FailedValidationException,
|
||||
@@ -29,7 +29,7 @@ export class UsersService extends ItemsService {
|
||||
constructor(options: AbstractServiceOptions) {
|
||||
super('directus_users', options);
|
||||
|
||||
this.knex = options.knex || database;
|
||||
this.knex = options.knex || getDatabase();
|
||||
this.accountability = options.accountability || null;
|
||||
this.service = new ItemsService('directus_users', options);
|
||||
this.schema = options.schema;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Knex } from 'knex';
|
||||
import database from '../database';
|
||||
import getDatabase from '../database';
|
||||
import { systemCollectionRows } from '../database/system-data/collections';
|
||||
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
|
||||
import { AbstractServiceOptions, Accountability, PrimaryKey, SchemaOverview } from '../types';
|
||||
@@ -10,7 +10,7 @@ export class UtilsService {
|
||||
schema: SchemaOverview;
|
||||
|
||||
constructor(options: AbstractServiceOptions) {
|
||||
this.knex = options.knex || database;
|
||||
this.knex = options.knex || getDatabase();
|
||||
this.accountability = options.accountability || null;
|
||||
this.schema = options.schema;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { LocalFileSystemStorage, Storage, StorageManager, StorageManagerConfig } from '@directus/drive';
|
||||
import { AzureBlobWebServicesStorage } from '@directus/drive-azure';
|
||||
import { GoogleCloudStorage } from '@directus/drive-gcs';
|
||||
/** @todo dynamically load these storage adapters */
|
||||
import { AmazonWebServicesS3Storage } from '@directus/drive-s3';
|
||||
import env from './env';
|
||||
import { getConfigFromEnv } from './utils/get-config-from-env';
|
||||
|
||||
29
api/src/utils/get-default-index-name.ts
Normal file
29
api/src/utils/get-default-index-name.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { customAlphabet } from 'nanoid';
|
||||
|
||||
const generateID = customAlphabet('abcdefghijklmnopqrstuvxyz', 5);
|
||||
|
||||
/**
|
||||
* Generate an index name for a given collection + fields combination.
|
||||
*
|
||||
* Is based on the default index name generation of knex, but limits the index to a maximum of 64
|
||||
* characters (the max length for MySQL and MariaDB).
|
||||
*
|
||||
* @see
|
||||
* https://github.com/knex/knex/blob/fff6eb15d7088d4198650a2c6e673dedaf3b8f36/lib/schema/tablecompiler.js#L282-L297
|
||||
*/
|
||||
export function getDefaultIndexName(
|
||||
type: 'unique' | 'foreign' | 'index',
|
||||
collection: string,
|
||||
fields: string | string[]
|
||||
): string {
|
||||
if (!Array.isArray(fields)) fields = fields ? [fields] : [];
|
||||
const table = collection.replace(/\.|-/g, '_');
|
||||
const indexName = (table + '_' + fields.join('_') + '_' + type).toLowerCase();
|
||||
|
||||
if (indexName.length <= 64) return indexName;
|
||||
|
||||
const suffix = `__${generateID()}_${type}`;
|
||||
const prefix = indexName.substring(0, 64 - suffix.length);
|
||||
|
||||
return `${prefix}__${generateID()}_${type}`;
|
||||
}
|
||||
@@ -98,6 +98,11 @@ export default function getLocalType(
|
||||
return 'decimal';
|
||||
}
|
||||
|
||||
/** Handle MS SQL varchar(MAX) (eg TEXT) types */
|
||||
if (column.data_type === 'nvarchar' && column.max_length === -1) {
|
||||
return 'text';
|
||||
}
|
||||
|
||||
if (field?.special?.includes('json')) return 'json';
|
||||
if (field?.special?.includes('hash')) return 'hash';
|
||||
if (field?.special?.includes('csv')) return 'csv';
|
||||
|
||||
@@ -11,13 +11,14 @@ import { toArray } from '../utils/to-array';
|
||||
import getDefaultValue from './get-default-value';
|
||||
import getLocalType from './get-local-type';
|
||||
import { mergePermissions } from './merge-permissions';
|
||||
import getDatabase from '../database';
|
||||
|
||||
export async function getSchema(options?: {
|
||||
accountability?: Accountability;
|
||||
database?: Knex;
|
||||
}): Promise<SchemaOverview> {
|
||||
// Allows for use in the CLI
|
||||
const database = options?.database || (require('../database').default as Knex);
|
||||
const database = options?.database || getDatabase();
|
||||
const schemaInspector = SchemaInspector(database);
|
||||
|
||||
const result: SchemaOverview = {
|
||||
|
||||
@@ -2,20 +2,6 @@ import env from '../env';
|
||||
import logger from '../logger';
|
||||
|
||||
export function validateEnv(requiredKeys: string[]): void {
|
||||
if (env.DB_CLIENT && env.DB_CLIENT === 'sqlite3') {
|
||||
requiredKeys.push('DB_FILENAME');
|
||||
} else if (env.DB_CLIENT && env.DB_CLIENT === 'oracledb') {
|
||||
requiredKeys.push('DB_USER', 'DB_PASSWORD', 'DB_CONNECT_STRING');
|
||||
} else {
|
||||
if (env.DB_CLIENT === 'pg') {
|
||||
if (!env.DB_CONNECTION_STRING) {
|
||||
requiredKeys.push('DB_HOST', 'DB_PORT', 'DB_DATABASE', 'DB_USER');
|
||||
}
|
||||
} else {
|
||||
requiredKeys.push('DB_HOST', 'DB_PORT', 'DB_DATABASE', 'DB_USER', 'DB_PASSWORD');
|
||||
}
|
||||
}
|
||||
|
||||
for (const requiredKey of requiredKeys) {
|
||||
if (requiredKey in env === false) {
|
||||
logger.error(`"${requiredKey}" Environment Variable is missing.`);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import axios from 'axios';
|
||||
import { ListenerFn } from 'eventemitter2';
|
||||
import database from './database';
|
||||
import getDatabase from './database';
|
||||
import emitter from './emitter';
|
||||
import logger from './logger';
|
||||
import { Webhook } from './types';
|
||||
@@ -10,6 +10,8 @@ let registered: { event: string; handler: ListenerFn }[] = [];
|
||||
export async function register(): Promise<void> {
|
||||
unregister();
|
||||
|
||||
const database = getDatabase();
|
||||
|
||||
const webhooks = await database.select<Webhook[]>('*').from('directus_webhooks').where({ status: 'active' });
|
||||
|
||||
for (const webhook of webhooks) {
|
||||
|
||||
Reference in New Issue
Block a user