Merge branch 'main' into aggregation

This commit is contained in:
rijkvanzanten
2021-08-06 16:14:29 -04:00
676 changed files with 15648 additions and 8806 deletions

View File

@@ -5,6 +5,9 @@ import env from '../env';
import logger from '../logger';
import { getConfigFromEnv } from '../utils/get-config-from-env';
import { validateEnv } from '../utils/validate-env';
import fse from 'fs-extra';
import path from 'path';
import { merge } from 'lodash';
let database: Knex | null = null;
let inspector: ReturnType<typeof SchemaInspector> | null = null;
@@ -65,6 +68,13 @@ export default function getDatabase(): Knex {
};
}
if (env.DB_CLIENT === 'mssql') {
// This brings MS SQL in line with the other DB vendors. We shouldn't do any automatic
// timezone conversion on the database level, especially not when other database vendors don't
// act the same
merge(knexConfig, { connection: { options: { useUTC: false } } });
}
database = knex(knexConfig);
const times: Record<string, number> = {};
@@ -94,8 +104,8 @@ export function getSchemaInspector(): ReturnType<typeof SchemaInspector> {
return inspector;
}
export async function hasDatabaseConnection(): Promise<boolean> {
const database = getDatabase();
export async function hasDatabaseConnection(database?: Knex): Promise<boolean> {
database = database ?? getDatabase();
try {
if (env.DB_CLIENT === 'oracledb') {
@@ -103,15 +113,22 @@ export async function hasDatabaseConnection(): Promise<boolean> {
} else {
await database.raw('SELECT 1');
}
return true;
} catch {
return false;
}
}
export async function validateDBConnection(): Promise<void> {
export async function validateDBConnection(database?: Knex): Promise<void> {
database = database ?? getDatabase();
try {
await hasDatabaseConnection();
if (env.DB_CLIENT === 'oracledb') {
await database.raw('select 1 from DUAL');
} else {
await database.raw('SELECT 1');
}
} catch (error) {
logger.error(`Can't connect to the database.`);
logger.error(error);
@@ -127,3 +144,35 @@ export async function isInstalled(): Promise<boolean> {
// exists when using the installer CLI.
return await inspector.hasTable('directus_collections');
}
export async function validateMigrations(): Promise<boolean> {
const database = getDatabase();
try {
let migrationFiles = await fse.readdir(path.join(__dirname, 'migrations'));
const customMigrationsPath = path.resolve(env.EXTENSIONS_PATH, 'migrations');
let customMigrationFiles =
((await fse.pathExists(customMigrationsPath)) && (await fse.readdir(customMigrationsPath))) || [];
migrationFiles = migrationFiles.filter(
(file: string) => file.startsWith('run') === false && file.endsWith('.d.ts') === false
);
customMigrationFiles = customMigrationFiles.filter((file: string) => file.endsWith('.js'));
migrationFiles.push(...customMigrationFiles);
const requiredVersions = migrationFiles.map((filePath) => filePath.split('-')[0]);
const completedVersions = (await database.select('version').from('directus_migrations')).map(
({ version }) => version
);
return requiredVersions.every((version) => completedVersions.includes(version));
} catch (error) {
logger.error(`Database migrations cannot be found`);
logger.error(error);
throw process.exit(1);
}
}

View File

@@ -1,7 +1,6 @@
import { Knex } from 'knex';
// @ts-ignore
import Client_Oracledb from 'knex/lib/dialects/oracledb';
import env from '../../env';
async function oracleAlterUrl(knex: Knex, type: string): Promise<void> {
await knex.raw('ALTER TABLE "directus_webhooks" ADD "url__temp" ?', [knex.raw(type)]);
@@ -23,7 +22,7 @@ export async function up(knex: Knex): Promise<void> {
}
export async function down(knex: Knex): Promise<void> {
if (env.DB_CLIENT === 'oracledb') {
if (knex.client instanceof Client_Oracledb) {
await oracleAlterUrl(knex, 'VARCHAR2(255)');
return;
}

View File

@@ -1,7 +1,6 @@
import { Knex } from 'knex';
// @ts-ignore
import Client_Oracledb from 'knex/lib/dialects/oracledb';
import env from '../../env';
async function oracleAlterCollections(knex: Knex, type: string): Promise<void> {
await knex.raw('ALTER TABLE "directus_webhooks" ADD "collections__temp" ?', [knex.raw(type)]);
@@ -23,7 +22,7 @@ export async function up(knex: Knex): Promise<void> {
}
export async function down(knex: Knex): Promise<void> {
if (env.DB_CLIENT === 'oracledb') {
if (knex.client instanceof Client_Oracledb) {
await oracleAlterCollections(knex, 'VARCHAR2(255)');
return;
}

View File

@@ -1,12 +1,22 @@
import { Knex } from 'knex';
// @ts-ignore
import Client_Oracledb from 'knex/lib/dialects/oracledb';
export async function up(knex: Knex): Promise<void> {
if (knex.client instanceof Client_Oracledb) {
return;
}
await knex.schema.alterTable('directus_files', (table) => {
table.bigInteger('filesize').nullable().defaultTo(null).alter();
});
}
export async function down(knex: Knex): Promise<void> {
if (knex.client instanceof Client_Oracledb) {
return;
}
await knex.schema.alterTable('directus_files', (table) => {
table.integer('filesize').nullable().defaultTo(null).alter();
});

View File

@@ -0,0 +1,13 @@
import { Knex } from 'knex';
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_fields', (table) => {
table.json('conditions');
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_files', (table) => {
table.dropColumn('conditions');
});
}

View File

@@ -0,0 +1,22 @@
import { Knex } from 'knex';
import { getDefaultIndexName } from '../../utils/get-default-index-name';
const indexName = getDefaultIndexName('foreign', 'directus_settings', 'storage_default_folder');
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_settings', (table) => {
table
.uuid('storage_default_folder')
.references('id')
.inTable('directus_folders')
.withKeyName(indexName)
.onDelete('SET NULL');
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_files', (table) => {
table.dropForeign(['storage_default_folder'], indexName);
table.dropColumn('storage_default_folder');
});
}

View File

@@ -0,0 +1,49 @@
import { Knex } from 'knex';
import logger from '../../logger';
export async function up(knex: Knex): Promise<void> {
const dividerGroups = await knex.select('*').from('directus_fields').where('interface', '=', 'group-divider');
for (const dividerGroup of dividerGroups) {
const newOptions: { showHeader: true; headerIcon?: string; headerColor?: string } = { showHeader: true };
if (dividerGroup.options) {
try {
const options =
typeof dividerGroup.options === 'string' ? JSON.parse(dividerGroup.options) : dividerGroup.options;
if (options.icon) newOptions.headerIcon = options.icon;
if (options.color) newOptions.headerColor = options.color;
} catch (err) {
logger.warn(`Couldn't convert previous options from field ${dividerGroup.collection}.${dividerGroup.field}`);
logger.warn(err);
}
}
try {
await knex('directus_fields')
.update({
interface: 'group-standard',
options: JSON.stringify(newOptions),
})
.where('id', '=', dividerGroup.id);
} catch (err) {
logger.warn(`Couldn't update ${dividerGroup.collection}.${dividerGroup.field} to new group interface`);
logger.warn(err);
}
}
await knex('directus_fields')
.update({
interface: 'group-standard',
})
.where({ interface: 'group-raw' });
}
export async function down(knex: Knex): Promise<void> {
await knex('directus_fields')
.update({
interface: 'group-raw',
})
.where('interface', '=', 'group-standard');
}

View File

@@ -0,0 +1,13 @@
import { Knex } from 'knex';
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_fields', (table) => {
table.boolean('required').defaultTo(false);
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable('directus_fields', (table) => {
table.dropColumn('required');
});
}

View File

@@ -0,0 +1,35 @@
import { Knex } from 'knex';
export async function up(knex: Knex): Promise<void> {
const groups = await knex.select('*').from('directus_fields').where({ interface: 'group-standard' });
const raw = [];
const detail = [];
for (const group of groups) {
const options = typeof group.options === 'string' ? JSON.parse(group.options) : group.options || {};
if (options.showHeader === true) {
detail.push(group);
} else {
raw.push(group);
}
}
for (const field of raw) {
await knex('directus_fields').update({ interface: 'group-raw' }).where({ id: field.id });
}
for (const field of detail) {
await knex('directus_fields').update({ interface: 'group-detail' }).where({ id: field.id });
}
}
export async function down(knex: Knex): Promise<void> {
await knex('directus_fields')
.update({
interface: 'group-standard',
})
.where({ interface: 'group-detail' })
.orWhere({ interface: 'group-raw' });
}

View File

@@ -0,0 +1,94 @@
import { Knex } from 'knex';
// Change image metadata structure to match the output from 'exifr'
export async function up(knex: Knex): Promise<void> {
const files = await knex
.select<{ id: number; metadata: string }[]>('id', 'metadata')
.from('directus_files')
.whereNotNull('metadata');
for (const { id, metadata } of files) {
let prevMetadata;
try {
prevMetadata = JSON.parse(metadata);
} catch {
continue;
}
// Update only required if metadata has 'exif' data
if (prevMetadata.exif) {
// Get all data from 'exif' and rename the following keys:
// - 'image' to 'ifd0'
// - 'thumbnail to 'ifd1'
// - 'interoperability' to 'interop'
const newMetadata = prevMetadata.exif;
if (newMetadata.image) {
newMetadata.ifd0 = newMetadata.image;
delete newMetadata.image;
}
if (newMetadata.thumbnail) {
newMetadata.ifd1 = newMetadata.thumbnail;
delete newMetadata.thumbnail;
}
if (newMetadata.interoperability) {
newMetadata.interop = newMetadata.interoperability;
delete newMetadata.interoperability;
}
if (prevMetadata.icc) {
newMetadata.icc = prevMetadata.icc;
}
if (prevMetadata.iptc) {
newMetadata.iptc = prevMetadata.iptc;
}
await knex('directus_files')
.update({ metadata: JSON.stringify(newMetadata) })
.where({ id });
}
}
}
export async function down(knex: Knex): Promise<void> {
const files = await knex
.select<{ id: number; metadata: string }[]>('id', 'metadata')
.from('directus_files')
.whereNotNull('metadata')
.whereNot('metadata', '{}');
for (const { id, metadata } of files) {
const prevMetadata = JSON.parse(metadata);
// Update only required if metadata has keys other than 'icc' and 'iptc'
if (Object.keys(prevMetadata).filter((key) => key !== 'icc' && key !== 'iptc').length > 0) {
// Put all data under 'exif' and rename/move keys afterwards
const newMetadata: { exif: Record<string, unknown>; icc?: unknown; iptc?: unknown } = { exif: prevMetadata };
if (newMetadata.exif.ifd0) {
newMetadata.exif.image = newMetadata.exif.ifd0;
delete newMetadata.exif.ifd0;
}
if (newMetadata.exif.ifd1) {
newMetadata.exif.thumbnail = newMetadata.exif.ifd1;
delete newMetadata.exif.ifd1;
}
if (newMetadata.exif.interop) {
newMetadata.exif.interoperability = newMetadata.exif.interop;
delete newMetadata.exif.interop;
}
if (newMetadata.exif.icc) {
newMetadata.icc = newMetadata.exif.icc;
delete newMetadata.exif.icc;
}
if (newMetadata.exif.iptc) {
newMetadata.iptc = newMetadata.exif.iptc;
delete newMetadata.exif.iptc;
}
await knex('directus_files')
.update({ metadata: JSON.stringify(newMetadata) })
.where({ id });
}
}
}

View File

@@ -5,12 +5,7 @@ import fse from 'fs-extra';
import { Knex } from 'knex';
import path from 'path';
import env from '../../env';
type Migration = {
version: string;
name: string;
timestamp: Date;
};
import { Migration } from '../../types';
export default async function run(database: Knex, direction: 'up' | 'down' | 'latest'): Promise<void> {
let migrationFiles = await fse.readdir(__dirname);

View File

@@ -7,7 +7,7 @@ import { applyFunctionToColumnName } from '../utils/apply-function-to-column-nam
import applyQuery from '../utils/apply-query';
import { getColumn } from '../utils/get-column';
import { stripFunction } from '../utils/strip-function';
import { toArray } from '../utils/to-array';
import { toArray } from '@directus/shared/utils';
import getDatabase from './index';
type RunASTOptions = {

View File

@@ -3,13 +3,13 @@ import yaml from 'js-yaml';
import { Knex } from 'knex';
import { isObject } from 'lodash';
import path from 'path';
import { types } from '../../types';
import { Type } from '@directus/shared/types';
type TableSeed = {
table: string;
columns: {
[column: string]: {
type?: typeof types[number];
type?: Type;
primary?: boolean;
nullable?: boolean;
default?: any;
@@ -45,6 +45,8 @@ export default async function runSeed(database: Knex): Promise<void> {
for (const [columnName, columnInfo] of Object.entries(seedData.columns)) {
let column: Knex.ColumnBuilder;
if (columnInfo.type === 'alias' || columnInfo.type === 'unknown') return;
if (columnInfo.type === 'string') {
column = tableBuilder.string(columnName, columnInfo.length);
} else if (columnInfo.increments) {

View File

@@ -8,6 +8,7 @@ defaults:
note: null
translations: null
display_template: null
accountability: 'all'
data:
- collection: directus_activity

View File

@@ -73,3 +73,8 @@ fields:
- collection: directus_fields
field: note
width: half
- collection: directus_fields
field: conditions
hidden: true
special: json

View File

@@ -1,7 +1,7 @@
import fse from 'fs-extra';
import { merge } from 'lodash';
import path from 'path';
import { FieldMeta } from '../../../types';
import { FieldMeta } from '@directus/shared/types';
import { requireYAML } from '../../../utils/require-yaml';
const defaults = requireYAML(require.resolve('./_defaults.yaml'));

View File

@@ -124,7 +124,7 @@ fields:
options:
slug: true
onlyOnCreate: false
width: half
width: full
- field: fit
name: Fit
type: string
@@ -173,6 +173,7 @@ fields:
step: 1
width: half
- field: withoutEnlargement
name: Upscaling
type: boolean
schema:
default_value: false
@@ -181,6 +182,51 @@ fields:
width: half
options:
label: Don't upscale images
- field: format
name: Format
type: string
schema:
is_nullable: false
default_value: ''
meta:
interface: select-dropdown
options:
allowNone: true
choices:
- value: jpeg
text: JPEG
- value: png
text: PNG
- value: webp
text: WebP
- value: tiff
text: Tiff
width: half
- field: transforms
name: Additional Transformations
type: json
schema:
is_nullable: false
default_value: []
meta:
note:
The Sharp method name and its arguments. See https://sharp.pixelplumbing.com/api-constructor for more
information.
interface: json
options:
template: >
[
["blur", 45],
["grayscale"],
["extend", { "right": 500, "background": "rgb(255, 0, 0)" }]
]
placeholder: >
[
["blur", 45],
["grayscale"],
["extend", { "right": 500, "background": "rgb(255, 0, 0)" }]
]
width: full
template: '{{key}}'
special: json
width: full
@@ -197,6 +243,11 @@ fields:
text: Presets Only
width: half
- field: storage_default_folder
interface: system-folder
width: half
note: Default folder where new files are uploaded
- field: overrides_divider
interface: presentation-divider
options: