mirror of
https://github.com/directus/directus.git
synced 2026-02-02 03:25:03 -05:00
Merge branch 'main' into insights
This commit is contained in:
@@ -143,6 +143,7 @@ EMAIL_SENDMAIL_PATH="/usr/sbin/sendmail"
|
||||
# EMAIL_SMTP_HOST="localhost"
|
||||
# EMAIL_SMTP_PORT=465
|
||||
# EMAIL_SMTP_SECURE=false # Use TLS
|
||||
# EMAIL_SMTP_IGNORE_TLS=false
|
||||
# EMAIL_SMTP_USER="username"
|
||||
# EMAIL_SMTP_PASSWORD="password"
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "directus",
|
||||
"version": "9.0.0-rc.73",
|
||||
"version": "9.0.0-rc.75",
|
||||
"license": "GPL-3.0-only",
|
||||
"homepage": "https://github.com/directus/directus#readme",
|
||||
"description": "Directus is a real-time API and App dashboard for managing SQL database content.",
|
||||
@@ -66,14 +66,14 @@
|
||||
"example.env"
|
||||
],
|
||||
"dependencies": {
|
||||
"@directus/app": "9.0.0-rc.73",
|
||||
"@directus/drive": "9.0.0-rc.73",
|
||||
"@directus/drive-azure": "9.0.0-rc.73",
|
||||
"@directus/drive-gcs": "9.0.0-rc.73",
|
||||
"@directus/drive-s3": "9.0.0-rc.73",
|
||||
"@directus/format-title": "9.0.0-rc.73",
|
||||
"@directus/schema": "9.0.0-rc.73",
|
||||
"@directus/specs": "9.0.0-rc.73",
|
||||
"@directus/app": "9.0.0-rc.75",
|
||||
"@directus/drive": "9.0.0-rc.75",
|
||||
"@directus/drive-azure": "9.0.0-rc.75",
|
||||
"@directus/drive-gcs": "9.0.0-rc.75",
|
||||
"@directus/drive-s3": "9.0.0-rc.75",
|
||||
"@directus/format-title": "9.0.0-rc.75",
|
||||
"@directus/schema": "9.0.0-rc.75",
|
||||
"@directus/specs": "9.0.0-rc.75",
|
||||
"@godaddy/terminus": "^4.9.0",
|
||||
"argon2": "^0.28.1",
|
||||
"async": "^3.2.0",
|
||||
@@ -90,9 +90,9 @@
|
||||
"date-fns": "^2.21.1",
|
||||
"deep-map": "^2.0.0",
|
||||
"destroy": "^1.0.4",
|
||||
"dotenv": "^9.0.2",
|
||||
"dotenv": "^10.0.0",
|
||||
"eventemitter2": "^6.4.3",
|
||||
"execa": "^5.0.1",
|
||||
"execa": "^5.1.1",
|
||||
"exif-reader": "^1.0.3",
|
||||
"express": "^4.17.1",
|
||||
"express-pino-logger": "^6.0.0",
|
||||
@@ -110,13 +110,14 @@
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"keyv": "^4.0.3",
|
||||
"knex": "^0.95.6",
|
||||
"knex-schema-inspector": "^1.5.6",
|
||||
"knex-schema-inspector": "^1.5.7",
|
||||
"liquidjs": "^9.25.0",
|
||||
"lodash": "^4.17.21",
|
||||
"macos-release": "^2.4.1",
|
||||
"mime-types": "^2.1.31",
|
||||
"ms": "^2.1.3",
|
||||
"nanoid": "^3.1.23",
|
||||
"node-cron": "^3.0.0",
|
||||
"node-machine-id": "^1.1.12",
|
||||
"nodemailer": "^6.6.1",
|
||||
"openapi3-ts": "^2.0.0",
|
||||
@@ -135,7 +136,7 @@
|
||||
"optionalDependencies": {
|
||||
"@keyv/redis": "^2.1.2",
|
||||
"connect-memcached": "^1.0.0",
|
||||
"connect-redis": "^5.2.0",
|
||||
"connect-redis": "^6.0.0",
|
||||
"connect-session-knex": "^2.1.0",
|
||||
"ioredis": "^4.27.2",
|
||||
"keyv-memcache": "^1.2.5",
|
||||
@@ -169,9 +170,10 @@
|
||||
"@types/mime-types": "^2.1.0",
|
||||
"@types/ms": "^0.7.31",
|
||||
"@types/node": "^15.12.0",
|
||||
"@types/node-cron": "^2.0.3",
|
||||
"@types/nodemailer": "^6.4.1",
|
||||
"@types/qs": "^6.9.6",
|
||||
"@types/sharp": "^0.28.1",
|
||||
"@types/sharp": "^0.28.3",
|
||||
"@types/stream-json": "^1.7.0",
|
||||
"@types/uuid": "^8.3.0",
|
||||
"@types/uuid-validate": "^0.0.1",
|
||||
|
||||
@@ -110,10 +110,9 @@ export default async function createApp(): Promise<express.Application> {
|
||||
const adminPath = require.resolve('@directus/app/dist/index.html');
|
||||
const publicUrl = env.PUBLIC_URL.endsWith('/') ? env.PUBLIC_URL : env.PUBLIC_URL + '/';
|
||||
|
||||
// Prefix all href/src in the index html with the APIs public path
|
||||
// Set the App's base path according to the APIs public URL
|
||||
let html = fse.readFileSync(adminPath, 'utf-8');
|
||||
html = html.replace(/href="\//g, `href="${publicUrl}`);
|
||||
html = html.replace(/src="\//g, `src="${publicUrl}`);
|
||||
html = html.replace(/<meta charset="utf-8" \/>/, `<meta charset="utf-8" />\n\t\t<base href="${publicUrl}admin/">`);
|
||||
|
||||
app.get('/', (req, res, next) => {
|
||||
if (env.ROOT_REDIRECT) {
|
||||
|
||||
@@ -296,7 +296,7 @@ router.post(
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/me/tfa/enable/',
|
||||
'/me/tfa/generate/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (!req.accountability?.user) {
|
||||
throw new InvalidCredentialsException();
|
||||
@@ -317,7 +317,7 @@ router.post(
|
||||
});
|
||||
await authService.verifyPassword(req.accountability.user, req.body.password);
|
||||
|
||||
const { url, secret } = await service.enableTFA(req.accountability.user);
|
||||
const { url, secret } = await service.generateTFA(req.accountability.user);
|
||||
|
||||
res.locals.payload = { data: { secret, otpauth_url: url } };
|
||||
return next();
|
||||
@@ -325,6 +325,33 @@ router.post(
|
||||
respond
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/me/tfa/enable/',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (!req.accountability?.user) {
|
||||
throw new InvalidCredentialsException();
|
||||
}
|
||||
|
||||
if (!req.body.secret) {
|
||||
throw new InvalidPayloadException(`"secret" is required`);
|
||||
}
|
||||
|
||||
if (!req.body.otp) {
|
||||
throw new InvalidPayloadException(`"otp" is required`);
|
||||
}
|
||||
|
||||
const service = new UsersService({
|
||||
accountability: req.accountability,
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
await service.enableTFA(req.accountability.user, req.body.otp, req.body.secret);
|
||||
|
||||
return next();
|
||||
}),
|
||||
respond
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/me/tfa/disable',
|
||||
asyncHandler(async (req, res, next) => {
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
import { Knex } from 'knex';
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable('directus_collections', (table) => {
|
||||
table.json('item_duplication_fields').nullable();
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.alterTable('directus_collections', (table) => {
|
||||
table.dropColumn('item_duplication_fields');
|
||||
});
|
||||
}
|
||||
@@ -179,3 +179,19 @@ fields:
|
||||
- text: '$t:field_options.directus_collections.do_not_track_anything'
|
||||
value: null
|
||||
width: half
|
||||
|
||||
- field: duplication_divider
|
||||
special:
|
||||
- alias
|
||||
- no-data
|
||||
interface: presentation-divider
|
||||
options:
|
||||
icon: content_copy
|
||||
title: Duplication
|
||||
|
||||
- field: item_duplication_fields
|
||||
special:
|
||||
- json
|
||||
interface: code
|
||||
options:
|
||||
language: JSON
|
||||
|
||||
@@ -7,11 +7,10 @@ import dotenv from 'dotenv';
|
||||
import fs from 'fs';
|
||||
import { clone, toNumber, toString } from 'lodash';
|
||||
import path from 'path';
|
||||
import logger from './logger';
|
||||
import { requireYAML } from './utils/require-yaml';
|
||||
import { toArray } from './utils/to-array';
|
||||
|
||||
const acceptableEnvTypes = ['string', 'number', 'regex', 'array'];
|
||||
const acceptedEnvTypes = ['string', 'number', 'regex', 'array'];
|
||||
|
||||
const defaults: Record<string, any> = {
|
||||
CONFIG_PATH: path.resolve(process.cwd(), '.env'),
|
||||
@@ -125,7 +124,7 @@ function getEnv() {
|
||||
return exported;
|
||||
}
|
||||
|
||||
logger.warn(
|
||||
throw new Error(
|
||||
`Invalid JS configuration file export type. Requires one of "function", "object", received: "${typeof exported}"`
|
||||
);
|
||||
}
|
||||
@@ -141,11 +140,11 @@ function getEnv() {
|
||||
return data as Record<string, string>;
|
||||
}
|
||||
|
||||
logger.warn('Invalid YAML configuration. Root has to ben an object.');
|
||||
throw new Error('Invalid YAML configuration. Root has to be an object.');
|
||||
}
|
||||
|
||||
// Default to env vars plain text files
|
||||
return dotenv.parse(fs.readFileSync(configPath).toString());
|
||||
return dotenv.parse(fs.readFileSync(configPath, { encoding: 'utf8' }));
|
||||
}
|
||||
|
||||
function getVariableType(variable: string) {
|
||||
@@ -175,12 +174,33 @@ function getEnvironmentValueByType(envVariableString: string) {
|
||||
function processValues(env: Record<string, any>) {
|
||||
env = clone(env);
|
||||
|
||||
for (const [key, value] of Object.entries(env)) {
|
||||
if (typeof value === 'string' && acceptableEnvTypes.some((envType) => value.includes(`${envType}:`))) {
|
||||
for (let [key, value] of Object.entries(env)) {
|
||||
// If key ends with '_FILE', try to get the value from the file defined in this variable
|
||||
// and store it in the variable with the same name but without '_FILE' at the end
|
||||
let newKey;
|
||||
if (key.length > 5 && key.endsWith('_FILE')) {
|
||||
try {
|
||||
value = fs.readFileSync(value, { encoding: 'utf8' });
|
||||
newKey = key.slice(0, -5);
|
||||
if (newKey in env) {
|
||||
throw new Error(
|
||||
`Duplicate environment variable encountered: you can't use "${key}" and "${newKey}" simultaneously.`
|
||||
);
|
||||
}
|
||||
key = newKey;
|
||||
} catch {
|
||||
throw new Error(`Failed to read value from file "${value}", defined in environment variable "${key}".`);
|
||||
}
|
||||
}
|
||||
|
||||
// Convert values with a type prefix
|
||||
// (see https://docs.directus.io/reference/environment-variables/#environment-syntax-prefix)
|
||||
if (typeof value === 'string' && acceptedEnvTypes.some((envType) => value.includes(`${envType}:`))) {
|
||||
env[key] = getEnvironmentValueByType(value);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Convert values where the key is defined in typeMap
|
||||
if (typeMap[key]) {
|
||||
switch (typeMap[key]) {
|
||||
case 'number':
|
||||
@@ -193,14 +213,42 @@ function processValues(env: Record<string, any>) {
|
||||
env[key] = toArray(value);
|
||||
break;
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (value === 'true') env[key] = true;
|
||||
if (value === 'false') env[key] = false;
|
||||
if (value === 'null') env[key] = null;
|
||||
if (String(value).startsWith('0') === false && isNaN(value) === false && value.length > 0) env[key] = Number(value);
|
||||
// Try to convert remaining values:
|
||||
// - boolean values to boolean
|
||||
// - 'null' to null
|
||||
// - number values (> 0 <= Number.MAX_SAFE_INTEGER) to number
|
||||
if (value === 'true') {
|
||||
env[key] = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (value === 'false') {
|
||||
env[key] = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (value === 'null') {
|
||||
env[key] = null;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (
|
||||
String(value).startsWith('0') === false &&
|
||||
isNaN(value) === false &&
|
||||
value.length > 0 &&
|
||||
value <= Number.MAX_SAFE_INTEGER
|
||||
) {
|
||||
env[key] = Number(value);
|
||||
continue;
|
||||
}
|
||||
|
||||
// If '_FILE' variable hasn't been processed yet, store it as it is (string)
|
||||
if (newKey) {
|
||||
env[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return env;
|
||||
|
||||
@@ -11,6 +11,7 @@ import * as services from './services';
|
||||
import { EndpointRegisterFunction, HookRegisterFunction } from './types';
|
||||
import { getSchema } from './utils/get-schema';
|
||||
import listFolders from './utils/list-folders';
|
||||
import { schedule, validate } from 'node-cron';
|
||||
|
||||
export async function ensureFoldersExist(): Promise<void> {
|
||||
const folders = ['endpoints', 'hooks', 'interfaces', 'modules', 'layouts', 'displays'];
|
||||
@@ -94,8 +95,19 @@ function registerHooks(hooks: string[]) {
|
||||
}
|
||||
|
||||
const events = register({ services, exceptions, env, database: getDatabase(), getSchema });
|
||||
|
||||
for (const [event, handler] of Object.entries(events)) {
|
||||
emitter.on(event, handler);
|
||||
if (event.startsWith('cron(')) {
|
||||
const cron = event.match(/\(([^)]+)\)/)?.[1];
|
||||
|
||||
if (!cron || validate(cron) === false) {
|
||||
logger.warn(`Couldn't register cron hook. Provided cron is invalid: ${cron}`);
|
||||
} else {
|
||||
schedule(cron, handler);
|
||||
}
|
||||
} else {
|
||||
emitter.on(event, handler);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
|
||||
import env from './env';
|
||||
import { toArray } from './utils/to-array';
|
||||
import { getConfigFromEnv } from './utils/get-config-from-env';
|
||||
|
||||
const enabledProviders = toArray(env.OAUTH_PROVIDERS).map((provider) => provider.toLowerCase());
|
||||
|
||||
@@ -16,23 +17,8 @@ const config: any = {
|
||||
},
|
||||
};
|
||||
|
||||
for (const [key, value] of Object.entries(env)) {
|
||||
if (key.startsWith('OAUTH') === false) continue;
|
||||
|
||||
const parts = key.split('_');
|
||||
const provider = parts[1].toLowerCase();
|
||||
|
||||
if (enabledProviders.includes(provider) === false) continue;
|
||||
|
||||
// OAUTH <PROVIDER> SETTING = VALUE
|
||||
parts.splice(0, 2);
|
||||
|
||||
const configKey = parts.join('_').toLowerCase();
|
||||
|
||||
config[provider] = {
|
||||
...(config[provider] || {}),
|
||||
[configKey]: value,
|
||||
};
|
||||
for (const provider of enabledProviders) {
|
||||
config[provider] = getConfigFromEnv(`OAUTH_${provider.toUpperCase()}_`, undefined, 'underscore');
|
||||
}
|
||||
|
||||
export default config;
|
||||
|
||||
@@ -2,54 +2,48 @@ import nodemailer, { Transporter } from 'nodemailer';
|
||||
import env from './env';
|
||||
import logger from './logger';
|
||||
|
||||
let transporter: Transporter | null = null;
|
||||
let transporter: Transporter;
|
||||
|
||||
if (env.EMAIL_TRANSPORT === 'sendmail') {
|
||||
transporter = nodemailer.createTransport({
|
||||
sendmail: true,
|
||||
newline: env.EMAIL_SENDMAIL_NEW_LINE || 'unix',
|
||||
path: env.EMAIL_SENDMAIL_PATH || '/usr/sbin/sendmail',
|
||||
});
|
||||
} else if (env.EMAIL_TRANSPORT.toLowerCase() === 'smtp') {
|
||||
let auth: boolean | { user?: string; pass?: string } = false;
|
||||
export default function getMailer(): Transporter {
|
||||
if (transporter) return transporter;
|
||||
|
||||
if (env.EMAIL_SMTP_USER || env.EMAIL_SMTP_PASSWORD) {
|
||||
auth = {
|
||||
user: env.EMAIL_SMTP_USER,
|
||||
pass: env.EMAIL_SMTP_PASSWORD,
|
||||
};
|
||||
if (env.EMAIL_TRANSPORT === 'sendmail') {
|
||||
transporter = nodemailer.createTransport({
|
||||
sendmail: true,
|
||||
newline: env.EMAIL_SENDMAIL_NEW_LINE || 'unix',
|
||||
path: env.EMAIL_SENDMAIL_PATH || '/usr/sbin/sendmail',
|
||||
});
|
||||
} else if (env.EMAIL_TRANSPORT.toLowerCase() === 'smtp') {
|
||||
let auth: boolean | { user?: string; pass?: string } = false;
|
||||
|
||||
if (env.EMAIL_SMTP_USER || env.EMAIL_SMTP_PASSWORD) {
|
||||
auth = {
|
||||
user: env.EMAIL_SMTP_USER,
|
||||
pass: env.EMAIL_SMTP_PASSWORD,
|
||||
};
|
||||
}
|
||||
|
||||
transporter = nodemailer.createTransport({
|
||||
pool: env.EMAIL_SMTP_POOL,
|
||||
host: env.EMAIL_SMTP_HOST,
|
||||
port: env.EMAIL_SMTP_PORT,
|
||||
secure: env.EMAIL_SMTP_SECURE,
|
||||
ignoreTLS: env.EMAIL_SMTP_IGNORE_TLS,
|
||||
auth: auth,
|
||||
} as Record<string, unknown>);
|
||||
} else if (env.EMAIL_TRANSPORT.toLowerCase() === 'mailgun') {
|
||||
const mg = require('nodemailer-mailgun-transport');
|
||||
transporter = nodemailer.createTransport(
|
||||
mg({
|
||||
auth: {
|
||||
api_key: env.EMAIL_MAILGUN_API_KEY,
|
||||
domain: env.EMAIL_MAILGUN_DOMAIN,
|
||||
},
|
||||
}) as any
|
||||
);
|
||||
} else {
|
||||
logger.warn('Illegal transport given for email. Check the EMAIL_TRANSPORT env var.');
|
||||
}
|
||||
|
||||
transporter = nodemailer.createTransport({
|
||||
pool: env.EMAIL_SMTP_POOL,
|
||||
host: env.EMAIL_SMTP_HOST,
|
||||
port: env.EMAIL_SMTP_PORT,
|
||||
secure: env.EMAIL_SMTP_SECURE,
|
||||
auth: auth,
|
||||
} as Record<string, unknown>);
|
||||
} else if (env.EMAIL_TRANSPORT.toLowerCase() === 'mailgun') {
|
||||
const mg = require('nodemailer-mailgun-transport');
|
||||
transporter = nodemailer.createTransport(
|
||||
mg({
|
||||
auth: {
|
||||
api_key: env.EMAIL_MAILGUN_API_KEY,
|
||||
domain: env.EMAIL_MAILGUN_DOMAIN,
|
||||
},
|
||||
}) as any
|
||||
);
|
||||
} else {
|
||||
logger.warn('Illegal transport given for email. Check the EMAIL_TRANSPORT env var.');
|
||||
return transporter;
|
||||
}
|
||||
|
||||
if (transporter) {
|
||||
transporter.verify((error) => {
|
||||
if (error) {
|
||||
logger.warn(`Couldn't connect to email server.`);
|
||||
logger.warn(`Email verification error: ${error}`);
|
||||
} else {
|
||||
logger.info(`Email connection established`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export default transporter;
|
||||
|
||||
@@ -248,20 +248,25 @@ export class AuthenticationService {
|
||||
}
|
||||
|
||||
async generateOTPAuthURL(pk: string, secret: string): Promise<string> {
|
||||
const user = await this.knex.select('first_name', 'last_name').from('directus_users').where({ id: pk }).first();
|
||||
const name = `${user.first_name} ${user.last_name}`;
|
||||
return authenticator.keyuri(name, 'Directus', secret);
|
||||
const user = await this.knex.select('email').from('directus_users').where({ id: pk }).first();
|
||||
const project = await this.knex.select('project_name').from('directus_settings').limit(1).first();
|
||||
return authenticator.keyuri(user.email, project?.project_name || 'Directus', secret);
|
||||
}
|
||||
|
||||
async verifyOTP(pk: string, otp: string): Promise<boolean> {
|
||||
const user = await this.knex.select('tfa_secret').from('directus_users').where({ id: pk }).first();
|
||||
async verifyOTP(pk: string, otp: string, secret?: string): Promise<boolean> {
|
||||
let tfaSecret: string;
|
||||
if (!secret) {
|
||||
const user = await this.knex.select('tfa_secret').from('directus_users').where({ id: pk }).first();
|
||||
|
||||
if (!user.tfa_secret) {
|
||||
throw new InvalidPayloadException(`User "${pk}" doesn't have TFA enabled.`);
|
||||
if (!user.tfa_secret) {
|
||||
throw new InvalidPayloadException(`User "${pk}" doesn't have TFA enabled.`);
|
||||
}
|
||||
tfaSecret = user.tfa_secret;
|
||||
} else {
|
||||
tfaSecret = secret;
|
||||
}
|
||||
|
||||
const secret = user.tfa_secret;
|
||||
return authenticator.check(otp, secret);
|
||||
return authenticator.check(otp, tfaSecret);
|
||||
}
|
||||
|
||||
async verifyPassword(pk: string, password: string): Promise<boolean> {
|
||||
|
||||
@@ -399,6 +399,19 @@ export class CollectionsService {
|
||||
}
|
||||
}
|
||||
|
||||
const m2aRelationsThatIncludeThisCollection = this.schema.relations.filter((relation) => {
|
||||
return relation.meta?.one_allowed_collections?.includes(collectionKey);
|
||||
});
|
||||
|
||||
for (const relation of m2aRelationsThatIncludeThisCollection) {
|
||||
const newAllowedCollections = relation
|
||||
.meta!.one_allowed_collections!.filter((collection) => collectionKey !== collection)
|
||||
.join(',');
|
||||
await trx('directus_relations')
|
||||
.update({ one_allowed_collections: newAllowedCollections })
|
||||
.where({ id: relation.meta!.id });
|
||||
}
|
||||
|
||||
await collectionItemsService.deleteOne(collectionKey);
|
||||
await trx.schema.dropTable(collectionKey);
|
||||
});
|
||||
|
||||
@@ -417,8 +417,6 @@ export class FieldsService {
|
||||
|
||||
if (field.schema?.has_auto_increment) {
|
||||
column = table.increments(field.field);
|
||||
} else if (field.schema?.data_type) {
|
||||
column = table.specificType(field.field, field.schema.data_type);
|
||||
} else if (field.type === 'string') {
|
||||
column = table.string(field.field, field.schema?.max_length ?? undefined);
|
||||
} else if (['float', 'decimal'].includes(field.type)) {
|
||||
|
||||
@@ -1481,9 +1481,9 @@ export class GraphQLService {
|
||||
return true;
|
||||
},
|
||||
},
|
||||
users_me_tfa_enable: {
|
||||
users_me_tfa_generate: {
|
||||
type: new GraphQLObjectType({
|
||||
name: 'users_me_tfa_enable_data',
|
||||
name: 'users_me_tfa_generate_data',
|
||||
fields: {
|
||||
secret: { type: GraphQLString },
|
||||
otpauth_url: { type: GraphQLString },
|
||||
@@ -1503,10 +1503,27 @@ export class GraphQLService {
|
||||
schema: this.schema,
|
||||
});
|
||||
await authService.verifyPassword(this.accountability.user, args.password);
|
||||
const { url, secret } = await service.enableTFA(this.accountability.user);
|
||||
const { url, secret } = await service.generateTFA(this.accountability.user);
|
||||
return { secret, otpauth_url: url };
|
||||
},
|
||||
},
|
||||
users_me_tfa_enable: {
|
||||
type: GraphQLBoolean,
|
||||
args: {
|
||||
otp: GraphQLNonNull(GraphQLString),
|
||||
secret: GraphQLNonNull(GraphQLString),
|
||||
},
|
||||
resolve: async (_, args) => {
|
||||
if (!this.accountability?.user) return null;
|
||||
const service = new UsersService({
|
||||
accountability: this.accountability,
|
||||
schema: this.schema,
|
||||
});
|
||||
|
||||
await service.enableTFA(this.accountability.user, args.otp, args.secret);
|
||||
return true;
|
||||
},
|
||||
},
|
||||
users_me_tfa_disable: {
|
||||
type: GraphQLBoolean,
|
||||
args: {
|
||||
|
||||
@@ -7,8 +7,8 @@ import env from '../../env';
|
||||
import { InvalidPayloadException } from '../../exceptions';
|
||||
import logger from '../../logger';
|
||||
import { AbstractServiceOptions, Accountability, SchemaOverview } from '../../types';
|
||||
import mailer from '../../mailer';
|
||||
import { SendMailOptions } from 'nodemailer';
|
||||
import getMailer from '../../mailer';
|
||||
import { Transporter, SendMailOptions } from 'nodemailer';
|
||||
|
||||
const liquidEngine = new Liquid({
|
||||
root: [path.resolve(env.EXTENSIONS_PATH, 'templates'), path.resolve(__dirname, 'templates')],
|
||||
@@ -26,16 +26,23 @@ export class MailService {
|
||||
schema: SchemaOverview;
|
||||
accountability: Accountability | null;
|
||||
knex: Knex;
|
||||
mailer: Transporter;
|
||||
|
||||
constructor(opts: AbstractServiceOptions) {
|
||||
this.schema = opts.schema;
|
||||
this.accountability = opts.accountability || null;
|
||||
this.knex = opts?.knex || getDatabase();
|
||||
this.mailer = getMailer();
|
||||
|
||||
this.mailer.verify((error) => {
|
||||
if (error) {
|
||||
logger.warn(`Email connection failed:`);
|
||||
logger.warn(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async send(options: EmailOptions): Promise<void> {
|
||||
if (!mailer) return;
|
||||
|
||||
const { template, ...emailOptions } = options;
|
||||
let { html } = options;
|
||||
|
||||
@@ -55,7 +62,7 @@ export class MailService {
|
||||
}
|
||||
|
||||
try {
|
||||
await mailer.sendMail({ ...emailOptions, from, html });
|
||||
await this.mailer.sendMail({ ...emailOptions, from, html });
|
||||
} catch (error) {
|
||||
logger.warn('[Email] Unexpected error while sending an email:');
|
||||
logger.warn(error);
|
||||
|
||||
@@ -14,7 +14,7 @@ import { rateLimiter } from '../middleware/rate-limiter';
|
||||
import storage from '../storage';
|
||||
import { AbstractServiceOptions, Accountability, SchemaOverview } from '../types';
|
||||
import { toArray } from '../utils/to-array';
|
||||
import mailer from '../mailer';
|
||||
import getMailer from '../mailer';
|
||||
import { SettingsService } from './settings';
|
||||
|
||||
export class ServerService {
|
||||
@@ -316,8 +316,10 @@ export class ServerService {
|
||||
],
|
||||
};
|
||||
|
||||
const mailer = getMailer();
|
||||
|
||||
try {
|
||||
await mailer?.verify();
|
||||
await mailer.verify();
|
||||
} catch (err) {
|
||||
checks['email:connection'][0].status = 'error';
|
||||
checks['email:connection'][0].output = err;
|
||||
|
||||
@@ -10,10 +10,12 @@ import {
|
||||
ForbiddenException,
|
||||
InvalidPayloadException,
|
||||
UnprocessableEntityException,
|
||||
InvalidCredentialsException,
|
||||
} from '../exceptions';
|
||||
import { RecordNotUniqueException } from '../exceptions/database/record-not-unique';
|
||||
import logger from '../logger';
|
||||
import { AbstractServiceOptions, Accountability, Item, PrimaryKey, Query, SchemaOverview } from '../types';
|
||||
import isUrlAllowed from '../utils/is-url-allowed';
|
||||
import { toArray } from '../utils/to-array';
|
||||
import { AuthenticationService } from './authentication';
|
||||
import { ItemsService, MutationOptions } from './items';
|
||||
@@ -226,9 +228,7 @@ export class UsersService extends ItemsService {
|
||||
async inviteUser(email: string | string[], role: string, url: string | null, subject?: string | null): Promise<void> {
|
||||
const emails = toArray(email);
|
||||
|
||||
const urlWhitelist = toArray(env.USER_INVITE_URL_ALLOW_LIST);
|
||||
|
||||
if (url && urlWhitelist.includes(url) === false) {
|
||||
if (url && isUrlAllowed(url, env.USER_INVITE_URL_ALLOW_LIST) === false) {
|
||||
throw new InvalidPayloadException(`Url "${url}" can't be used to invite users.`);
|
||||
}
|
||||
|
||||
@@ -305,9 +305,7 @@ export class UsersService extends ItemsService {
|
||||
const payload = { email, scope: 'password-reset' };
|
||||
const token = jwt.sign(payload, env.SECRET as string, { expiresIn: '1d' });
|
||||
|
||||
const urlWhitelist = toArray(env.PASSWORD_RESET_URL_ALLOW_LIST);
|
||||
|
||||
if (url && urlWhitelist.includes(url) === false) {
|
||||
if (url && isUrlAllowed(url, env.PASSWORD_RESET_URL_ALLOW_LIST) === false) {
|
||||
throw new InvalidPayloadException(`Url "${url}" can't be used to reset passwords.`);
|
||||
}
|
||||
|
||||
@@ -350,7 +348,7 @@ export class UsersService extends ItemsService {
|
||||
}
|
||||
}
|
||||
|
||||
async enableTFA(pk: string): Promise<Record<string, string>> {
|
||||
async generateTFA(pk: string): Promise<Record<string, string>> {
|
||||
const user = await this.knex.select('tfa_secret').from('directus_users').where({ id: pk }).first();
|
||||
|
||||
if (user?.tfa_secret !== null) {
|
||||
@@ -364,14 +362,36 @@ export class UsersService extends ItemsService {
|
||||
});
|
||||
const secret = authService.generateTFASecret();
|
||||
|
||||
await this.knex('directus_users').update({ tfa_secret: secret }).where({ id: pk });
|
||||
|
||||
return {
|
||||
secret,
|
||||
url: await authService.generateOTPAuthURL(pk, secret),
|
||||
};
|
||||
}
|
||||
|
||||
async enableTFA(pk: string, otp: string, secret: string): Promise<void> {
|
||||
const authService = new AuthenticationService({
|
||||
schema: this.schema,
|
||||
});
|
||||
|
||||
if (!pk) {
|
||||
throw new InvalidCredentialsException();
|
||||
}
|
||||
|
||||
const otpValid = await authService.verifyOTP(pk, otp, secret);
|
||||
|
||||
if (otpValid === false) {
|
||||
throw new InvalidPayloadException(`"otp" is invalid`);
|
||||
}
|
||||
|
||||
const userSecret = await this.knex.select('tfa_secret').from('directus_users').where({ id: pk }).first();
|
||||
|
||||
if (userSecret?.tfa_secret !== null) {
|
||||
throw new InvalidPayloadException('TFA Secret is already set for this user');
|
||||
}
|
||||
|
||||
await this.knex('directus_users').update({ tfa_secret: secret }).where({ id: pk });
|
||||
}
|
||||
|
||||
async disableTFA(pk: string): Promise<void> {
|
||||
await this.knex('directus_users').update({ tfa_secret: null }).where({ id: pk });
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ export type CollectionMeta = {
|
||||
singleton: boolean;
|
||||
icon: string | null;
|
||||
translations: Record<string, string>;
|
||||
item_duplication_fields: string[] | null;
|
||||
accountability: 'all' | 'accountability' | null;
|
||||
};
|
||||
|
||||
|
||||
@@ -2,7 +2,11 @@ import camelcase from 'camelcase';
|
||||
import { set } from 'lodash';
|
||||
import env from '../env';
|
||||
|
||||
export function getConfigFromEnv(prefix: string, omitPrefix?: string | string[]): any {
|
||||
export function getConfigFromEnv(
|
||||
prefix: string,
|
||||
omitPrefix?: string | string[],
|
||||
type: 'camelcase' | 'underscore' = 'camelcase'
|
||||
): Record<string, any> {
|
||||
const config: any = {};
|
||||
|
||||
for (const [key, value] of Object.entries(env)) {
|
||||
@@ -23,12 +27,22 @@ export function getConfigFromEnv(prefix: string, omitPrefix?: string | string[])
|
||||
if (key.includes('__')) {
|
||||
const path = key
|
||||
.split('__')
|
||||
.map((key, index) => (index === 0 ? camelcase(camelcase(key.slice(prefix.length))) : camelcase(key)));
|
||||
.map((key, index) => (index === 0 ? transform(transform(key.slice(prefix.length))) : transform(key)));
|
||||
set(config, path.join('.'), value);
|
||||
} else {
|
||||
config[camelcase(key.slice(prefix.length))] = value;
|
||||
config[transform(key.slice(prefix.length))] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return config;
|
||||
|
||||
function transform(key: string): string {
|
||||
if (type === 'camelcase') {
|
||||
return camelcase(key);
|
||||
} else if (type === 'underscore') {
|
||||
return key.toLowerCase();
|
||||
}
|
||||
|
||||
return key;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -93,6 +93,14 @@ export default function getLocalType(
|
||||
): typeof types[number] | 'unknown' {
|
||||
const type = localTypeMap[column.data_type.toLowerCase().split('(')[0]];
|
||||
|
||||
const special = field?.special;
|
||||
if (special) {
|
||||
if (special.includes('json')) return 'json';
|
||||
if (special.includes('hash')) return 'hash';
|
||||
if (special.includes('csv')) return 'csv';
|
||||
if (special.includes('uuid')) return 'uuid';
|
||||
}
|
||||
|
||||
/** Handle Postgres numeric decimals */
|
||||
if (column.data_type === 'numeric' && column.numeric_precision !== null && column.numeric_scale !== null) {
|
||||
return 'decimal';
|
||||
@@ -103,11 +111,6 @@ export default function getLocalType(
|
||||
return 'text';
|
||||
}
|
||||
|
||||
if (field?.special?.includes('json')) return 'json';
|
||||
if (field?.special?.includes('hash')) return 'hash';
|
||||
if (field?.special?.includes('csv')) return 'csv';
|
||||
if (field?.special?.includes('uuid')) return 'uuid';
|
||||
|
||||
if (type) {
|
||||
return type.type;
|
||||
}
|
||||
|
||||
29
api/src/utils/is-url-allowed.ts
Normal file
29
api/src/utils/is-url-allowed.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { toArray } from './to-array';
|
||||
import logger from '../logger';
|
||||
|
||||
/**
|
||||
* Check if url matches allow list either exactly or by domain+path
|
||||
*/
|
||||
export default function isUrlAllowed(url: string, allowList: string | string[]): boolean {
|
||||
console.log(url, allowList);
|
||||
|
||||
const urlAllowList = toArray(allowList);
|
||||
|
||||
if (urlAllowList.includes(url)) return true;
|
||||
|
||||
const parsedWhitelist = urlAllowList.map((allowedURL) => {
|
||||
try {
|
||||
const { hostname, pathname } = new URL(allowedURL);
|
||||
return hostname + pathname;
|
||||
} catch {
|
||||
logger.warn(`Invalid URL used "${url}"`);
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
const { hostname, pathname } = new URL(url);
|
||||
return parsedWhitelist.includes(hostname + pathname);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user