diff --git a/api/src/app.ts b/api/src/app.ts index ab3f86acfd..c925dfa324 100644 --- a/api/src/app.ts +++ b/api/src/app.ts @@ -55,13 +55,13 @@ app.set('trust proxy', true); app.use(expressLogger({ logger })); app.use((req, res, next) => { - bodyParser.json()(req, res, err => { - if (err) { + bodyParser.json()(req, res, (err) => { + if (err) { return next(new InvalidPayloadException(err.message)); - } + } - return next(); - }); + return next(); + }); }); app.use(bodyParser.json()); @@ -129,7 +129,6 @@ registerExtensions(customRouter); track('serverStarted'); -emitter.emitAsync('server.started') - .catch((err) => logger.warn(err)); +emitter.emitAsync('server.started').catch((err) => logger.warn(err)); export default app; diff --git a/api/src/cache.ts b/api/src/cache.ts index 2a1a230e66..3bcc494dc2 100644 --- a/api/src/cache.ts +++ b/api/src/cache.ts @@ -27,18 +27,19 @@ function getKevyInstance() { } } -function getConfig( - store: 'memory' | 'redis' | 'memcache' = 'memory' -): Options { - const config: Options = { namespace: env.CACHE_NAMESPACE, ttl: ms(env.CACHE_TTL as string) }; +function getConfig(store: 'memory' | 'redis' | 'memcache' = 'memory'): Options { + const config: Options = { + namespace: env.CACHE_NAMESPACE, + ttl: ms(env.CACHE_TTL as string), + }; if (store === 'redis') { const Redis = require('ioredis'); const KeyvRedis = require('@keyv/redis'); - config.store = new KeyvRedis(new Redis( - env.CACHE_REDIS || getConfigFromEnv('CACHE_REDIS_') - )); + config.store = new KeyvRedis( + new Redis(env.CACHE_REDIS || getConfigFromEnv('CACHE_REDIS_')) + ); } if (store === 'memcache') { diff --git a/api/src/cli/commands/database/migrate.ts b/api/src/cli/commands/database/migrate.ts index 2bbaa49c71..0629b253ce 100644 --- a/api/src/cli/commands/database/migrate.ts +++ b/api/src/cli/commands/database/migrate.ts @@ -5,7 +5,7 @@ export default async function migrate(direction: 'latest' | 'up' | 'down') { try { await run(database, direction); - } catch(err) { + } catch (err) { console.log(err); process.exit(1); } finally { diff --git a/api/src/cli/index.ts b/api/src/cli/index.ts index a7c20c52f6..3331dc3415 100644 --- a/api/src/cli/index.ts +++ b/api/src/cli/index.ts @@ -19,9 +19,18 @@ program.command('init').description('Create a new Directus Project').action(init const dbCommand = program.command('database'); dbCommand.command('install').description('Install the database').action(dbInstall); -dbCommand.command('migrate:latest').description('Upgrade the database').action(() => dbMigrate('latest')); -dbCommand.command('migrate:up').description('Upgrade the database').action(() => dbMigrate('up')); -dbCommand.command('migrate:down').description('Downgrade the database').action(() => dbMigrate('down')); +dbCommand + .command('migrate:latest') + .description('Upgrade the database') + .action(() => dbMigrate('latest')); +dbCommand + .command('migrate:up') + .description('Upgrade the database') + .action(() => dbMigrate('up')); +dbCommand + .command('migrate:down') + .description('Downgrade the database') + .action(() => dbMigrate('down')); const usersCommand = program.command('users'); usersCommand @@ -34,7 +43,7 @@ usersCommand const rolesCommand = program.command('roles'); rolesCommand -.command('create') + .command('create') .storeOptionsAsProperties(false) .passCommandToAction(false) .description('Create a new role') diff --git a/api/src/controllers/activity.ts b/api/src/controllers/activity.ts index 780f183249..774b0293ef 100644 --- a/api/src/controllers/activity.ts +++ b/api/src/controllers/activity.ts @@ -24,7 +24,7 @@ router.get( }; return next(); - }), + }) ); router.get( @@ -38,7 +38,7 @@ router.get( }; return next(); - }), + }) ); router.post( @@ -69,7 +69,7 @@ router.post( } return next(); - }), + }) ); router.patch( @@ -93,7 +93,7 @@ router.patch( } return next(); - }), + }) ); router.delete( @@ -103,7 +103,7 @@ router.delete( await service.delete(req.params.pk); return next(); - }), + }) ); export default router; diff --git a/api/src/controllers/files.ts b/api/src/controllers/files.ts index 86cae96ec1..673f3daf91 100644 --- a/api/src/controllers/files.ts +++ b/api/src/controllers/files.ts @@ -112,7 +112,9 @@ router.post( try { const record = await service.readByKey(keys as any, req.sanitizedQuery); - res.locals.payload = { data: res.locals.savedFiles.length === 1 ? record[0] : record || null }; + res.locals.payload = { + data: res.locals.savedFiles.length === 1 ? record[0] : record || null, + }; } catch (error) { if (error instanceof ForbiddenException) { return next(); @@ -127,7 +129,7 @@ router.post( const importSchema = Joi.object({ url: Joi.string().required(), - data: Joi.object() + data: Joi.object(), }); router.post( diff --git a/api/src/controllers/folders.ts b/api/src/controllers/folders.ts index 84e2a14ba8..660611cc55 100644 --- a/api/src/controllers/folders.ts +++ b/api/src/controllers/folders.ts @@ -1,6 +1,6 @@ import express from 'express'; import asyncHandler from 'express-async-handler'; -import {FoldersService, MetaService} from '../services'; +import { FoldersService, MetaService } from '../services'; import { ForbiddenException } from '../exceptions'; import useCollection from '../middleware/use-collection'; diff --git a/api/src/controllers/items.ts b/api/src/controllers/items.ts index 2af0809fb5..5d31d60eb9 100644 --- a/api/src/controllers/items.ts +++ b/api/src/controllers/items.ts @@ -1,7 +1,7 @@ import express from 'express'; import asyncHandler from 'express-async-handler'; import collectionExists from '../middleware/collection-exists'; -import { ItemsService, MetaService} from '../services'; +import { ItemsService, MetaService } from '../services'; import { RouteNotFoundException, ForbiddenException } from '../exceptions'; const router = express.Router(); @@ -29,7 +29,7 @@ router.post( } return next(); - }), + }) ); router.get( @@ -50,7 +50,7 @@ router.get( data: records || null, }; return next(); - }), + }) ); router.get( @@ -69,7 +69,7 @@ router.get( data: result || null, }; return next(); - }), + }) ); router.patch( @@ -100,7 +100,7 @@ router.patch( } return next(); - }), + }) ); router.patch( @@ -128,7 +128,7 @@ router.patch( } return next(); - }), + }) ); router.delete( @@ -139,7 +139,7 @@ router.delete( const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk; await service.delete(pk as any); return next(); - }), + }) ); export default router; diff --git a/api/src/controllers/revisions.ts b/api/src/controllers/revisions.ts index a583b5fb79..310315ba9e 100644 --- a/api/src/controllers/revisions.ts +++ b/api/src/controllers/revisions.ts @@ -1,6 +1,6 @@ import express from 'express'; import asyncHandler from 'express-async-handler'; -import { RevisionsService, MetaService} from '../services'; +import { RevisionsService, MetaService } from '../services'; import useCollection from '../middleware/use-collection'; const router = express.Router(); diff --git a/api/src/controllers/roles.ts b/api/src/controllers/roles.ts index 45e049a88b..8413681c02 100644 --- a/api/src/controllers/roles.ts +++ b/api/src/controllers/roles.ts @@ -1,6 +1,6 @@ import express from 'express'; import asyncHandler from 'express-async-handler'; -import { RolesService, MetaService} from '../services'; +import { RolesService, MetaService } from '../services'; import { ForbiddenException } from '../exceptions'; import useCollection from '../middleware/use-collection'; diff --git a/api/src/controllers/users.ts b/api/src/controllers/users.ts index dc1f8237d5..502bda6253 100644 --- a/api/src/controllers/users.ts +++ b/api/src/controllers/users.ts @@ -1,7 +1,11 @@ import express from 'express'; import asyncHandler from 'express-async-handler'; import Joi from 'joi'; -import { InvalidPayloadException, InvalidCredentialsException, ForbiddenException } from '../exceptions'; +import { + InvalidPayloadException, + InvalidCredentialsException, + ForbiddenException, +} from '../exceptions'; import { UsersService, MetaService, AuthenticationService } from '../services'; import useCollection from '../middleware/use-collection'; diff --git a/api/src/controllers/webhooks.ts b/api/src/controllers/webhooks.ts index 0e05f051c9..de8a6a76a8 100644 --- a/api/src/controllers/webhooks.ts +++ b/api/src/controllers/webhooks.ts @@ -1,6 +1,6 @@ import express from 'express'; import asyncHandler from 'express-async-handler'; -import { WebhooksService, MetaService} from '../services'; +import { WebhooksService, MetaService } from '../services'; import { ForbiddenException } from '../exceptions'; import useCollection from '../middleware/use-collection'; diff --git a/api/src/database/migrations/run.ts b/api/src/database/migrations/run.ts index 7f5c77df24..6c3fece8f2 100644 --- a/api/src/database/migrations/run.ts +++ b/api/src/database/migrations/run.ts @@ -7,13 +7,16 @@ type Migration = { version: string; name: string; timestamp: Date; -} +}; export default async function run(database: Knex, direction: 'up' | 'down' | 'latest') { let migrationFiles = await fse.readdir(__dirname); migrationFiles = migrationFiles.filter((file: string) => file !== 'run.ts'); - const completedMigrations = await database.select('*').from('directus_migrations').orderBy('version'); + const completedMigrations = await database + .select('*') + .from('directus_migrations') + .orderBy('version'); const migrations = migrationFiles.map((migrationFile) => { const version = migrationFile.split('-')[0]; @@ -24,7 +27,7 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la file: migrationFile, version, name, - completed + completed, }; }); @@ -51,7 +54,9 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la const { up } = require(path.join(__dirname, nextVersion.file)); await up(database); - await database.insert({ version: nextVersion.version, name: nextVersion.name }).into('directus_migrations'); + await database + .insert({ version: nextVersion.version, name: nextVersion.name }) + .into('directus_migrations'); } async function down() { @@ -61,7 +66,9 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la throw Error('Nothing to downgrade'); } - const migration = migrations.find((migration) => migration.version === currentVersion.version); + const migration = migrations.find( + (migration) => migration.version === currentVersion.version + ); if (!migration) { throw new Error('Couldnt find migration'); @@ -77,7 +84,9 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la if (migration.completed === false) { const { up } = require(path.join(__dirname, migration.file)); await up(database); - await database.insert({ version: migration.version, name: migration.name }).into('directus_migrations'); + await database + .insert({ version: migration.version, name: migration.name }) + .into('directus_migrations'); } } } diff --git a/api/src/database/run-ast.ts b/api/src/database/run-ast.ts index af5f244c89..a35a9ee65b 100644 --- a/api/src/database/run-ast.ts +++ b/api/src/database/run-ast.ts @@ -8,9 +8,9 @@ import applyQuery from '../utils/apply-query'; import Knex from 'knex'; type RunASTOptions = { - query?: AST['query'], - knex?: Knex -} + query?: AST['query']; + knex?: Knex; +}; export default async function runAST(ast: AST, options?: RunASTOptions) { const query = options?.query || ast.query; diff --git a/api/src/database/seeds/run.ts b/api/src/database/seeds/run.ts index 108bb72427..3544183e6c 100644 --- a/api/src/database/seeds/run.ts +++ b/api/src/database/seeds/run.ts @@ -22,14 +22,14 @@ type TableSeed = { column: string; }; }; - } -} + }; +}; type RowSeed = { table: string; defaults: Record; data: Record[]; -} +}; type FieldSeed = { table: string; @@ -50,7 +50,7 @@ type FieldSeed = { translation: Record | null; note: string | null; }[]; -} +}; export default async function runSeed(database: Knex) { const exists = await database.schema.hasTable('directus_collections'); @@ -68,10 +68,13 @@ async function createTables(database: Knex) { const tableSeeds = await fse.readdir(path.resolve(__dirname, './01-tables/')); for (const tableSeedFile of tableSeeds) { - const yamlRaw = await fse.readFile(path.resolve(__dirname, './01-tables', tableSeedFile), 'utf8'); + const yamlRaw = await fse.readFile( + path.resolve(__dirname, './01-tables', tableSeedFile), + 'utf8' + ); const seedData = yaml.safeLoad(yamlRaw) as TableSeed; - await database.schema.createTable(seedData.table, tableBuilder => { + await database.schema.createTable(seedData.table, (tableBuilder) => { for (const [columnName, columnInfo] of Object.entries(seedData.columns)) { let column: ColumnBuilder; @@ -129,7 +132,10 @@ async function insertRows(database: Knex) { const rowSeeds = await fse.readdir(path.resolve(__dirname, './02-rows/')); for (const rowSeedFile of rowSeeds) { - const yamlRaw = await fse.readFile(path.resolve(__dirname, './02-rows', rowSeedFile), 'utf8'); + const yamlRaw = await fse.readFile( + path.resolve(__dirname, './02-rows', rowSeedFile), + 'utf8' + ); const seedData = yaml.safeLoad(yamlRaw) as RowSeed; const dataWithDefaults = seedData.data.map((row) => { @@ -149,11 +155,17 @@ async function insertRows(database: Knex) { async function insertFields(database: Knex) { const fieldSeeds = await fse.readdir(path.resolve(__dirname, './03-fields/')); - const defaultsYaml = await fse.readFile(path.resolve(__dirname, './03-fields/_defaults.yaml'), 'utf8'); + const defaultsYaml = await fse.readFile( + path.resolve(__dirname, './03-fields/_defaults.yaml'), + 'utf8' + ); const defaults = yaml.safeLoad(defaultsYaml) as FieldSeed; for (const fieldSeedFile of fieldSeeds) { - const yamlRaw = await fse.readFile(path.resolve(__dirname, './03-fields', fieldSeedFile), 'utf8'); + const yamlRaw = await fse.readFile( + path.resolve(__dirname, './03-fields', fieldSeedFile), + 'utf8' + ); const seedData = yaml.safeLoad(yamlRaw) as FieldSeed; if (fieldSeedFile === '_defaults.yaml') { diff --git a/api/src/emitter.ts b/api/src/emitter.ts index 323dbb178a..ac126496da 100644 --- a/api/src/emitter.ts +++ b/api/src/emitter.ts @@ -3,6 +3,6 @@ import { EventEmitter2 } from 'eventemitter2'; const emitter = new EventEmitter2({ wildcard: true, verboseMemoryLeak: true, delimiter: '.' }); // No-op function to ensure we never end up with no data -emitter.on('item.*.*.before', input => input); +emitter.on('item.*.*.before', (input) => input); export default emitter; diff --git a/api/src/extensions.ts b/api/src/extensions.ts index ac6cabb607..30131bac28 100644 --- a/api/src/extensions.ts +++ b/api/src/extensions.ts @@ -19,9 +19,12 @@ export async function listExtensions(type: string) { return await listFolders(location); } catch (err) { if (err.code === 'ENOENT') { - throw new ServiceUnavailableException(`Extension folder "extensions/${type}" couldn't be opened`, { - service: 'extensions', - }); + throw new ServiceUnavailableException( + `Extension folder "extensions/${type}" couldn't be opened`, + { + service: 'extensions', + } + ); } throw err; } diff --git a/api/src/middleware/respond.ts b/api/src/middleware/respond.ts index a49c25ad95..2ea4162913 100644 --- a/api/src/middleware/respond.ts +++ b/api/src/middleware/respond.ts @@ -1,13 +1,18 @@ -import { RequestHandler } from "express"; -import asyncHandler from "express-async-handler"; -import env from "../env"; -import { getCacheKey } from "../utils/get-cache-key"; +import { RequestHandler } from 'express'; +import asyncHandler from 'express-async-handler'; +import env from '../env'; +import { getCacheKey } from '../utils/get-cache-key'; import cache from '../cache'; import { Transform, transforms } from 'json2csv'; import { PassThrough } from 'stream'; export const respond: RequestHandler = asyncHandler(async (req, res) => { - if (req.method.toLowerCase() === 'get' && env.CACHE_ENABLED === true && cache && !req.sanitizedQuery.export) { + if ( + req.method.toLowerCase() === 'get' && + env.CACHE_ENABLED === true && + cache && + !req.sanitizedQuery.export + ) { const key = getCacheKey(req); await cache.set(key, res.locals.payload); } @@ -34,7 +39,9 @@ export const respond: RequestHandler = asyncHandler(async (req, res) => { res.set('Content-Type', 'text/csv'); const stream = new PassThrough(); stream.end(Buffer.from(JSON.stringify(res.locals.payload.data), 'utf-8')); - const json2csv = new Transform({ transforms: [transforms.flatten({ separator: '.' })] }); + const json2csv = new Transform({ + transforms: [transforms.flatten({ separator: '.' })], + }); return stream.pipe(json2csv).pipe(res); } } diff --git a/api/src/middleware/sanitize-query.ts b/api/src/middleware/sanitize-query.ts index 58031f6523..ee2858c5bd 100644 --- a/api/src/middleware/sanitize-query.ts +++ b/api/src/middleware/sanitize-query.ts @@ -56,7 +56,11 @@ const sanitizeQuery: RequestHandler = (req, res, next) => { query.search = req.query.search; } - if (req.query.export && typeof req.query.export === 'string' && ['json', 'csv'].includes(req.query.export)) { + if ( + req.query.export && + typeof req.query.export === 'string' && + ['json', 'csv'].includes(req.query.export) + ) { query.export = req.query.export as 'json' | 'csv'; } diff --git a/api/src/services/items.ts b/api/src/services/items.ts index ad4da99f37..bbac3a52bb 100644 --- a/api/src/services/items.ts +++ b/api/src/services/items.ts @@ -53,14 +53,18 @@ export class ItemsService implements AbstractService { }); if (this.collection.startsWith('directus_') === false) { - const customProcessed = await emitter.emitAsync(`item.create.${this.collection}.before`, payloads, { - event: `item.create.${this.collection}.before`, - accountability: this.accountability, - collection: this.collection, - item: null, - action: 'create', - payload: payloads, - }); + const customProcessed = await emitter.emitAsync( + `item.create.${this.collection}.before`, + payloads, + { + event: `item.create.${this.collection}.before`, + accountability: this.accountability, + collection: this.collection, + item: null, + action: 'create', + payload: payloads, + } + ); payloads = customProcessed[customProcessed.length - 1]; } @@ -166,14 +170,16 @@ export class ItemsService implements AbstractService { } if (this.collection.startsWith('directus_') === false) { - emitter.emitAsync(`item.create.${this.collection}`, { - event: `item.create.${this.collection}`, - accountability: this.accountability, - collection: this.collection, - item: primaryKeys, - action: 'create', - payload: payloads, - }).catch(err => logger.warn(err)); + emitter + .emitAsync(`item.create.${this.collection}`, { + event: `item.create.${this.collection}`, + accountability: this.accountability, + collection: this.collection, + item: primaryKeys, + action: 'create', + payload: payloads, + }) + .catch((err) => logger.warn(err)); } return primaryKeys; @@ -186,7 +192,10 @@ export class ItemsService implements AbstractService { const authorizationService = new AuthorizationService({ accountability: this.accountability, }); - let ast = await getASTFromQuery(this.collection, query, { accountability: this.accountability, knex: this.knex }); + let ast = await getASTFromQuery(this.collection, query, { + accountability: this.accountability, + knex: this.knex, + }); if (this.accountability && this.accountability.admin !== true) { ast = await authorizationService.processAST(ast); @@ -219,15 +228,11 @@ export class ItemsService implements AbstractService { }, }; - let ast = await getASTFromQuery( - this.collection, - queryWithFilter, - { - accountability: this.accountability, - action, - knex: this.knex, - } - ); + let ast = await getASTFromQuery(this.collection, queryWithFilter, { + accountability: this.accountability, + action, + knex: this.knex, + }); if (this.accountability && this.accountability.admin !== true) { const authorizationService = new AuthorizationService({ @@ -259,14 +264,18 @@ export class ItemsService implements AbstractService { let payload = clone(data); if (this.collection.startsWith('directus_') === false) { - const customProcessed = await emitter.emitAsync(`item.update.${this.collection}.before`, payload, { - event: `item.update.${this.collection}.before`, - accountability: this.accountability, - collection: this.collection, - item: null, - action: 'update', + const customProcessed = await emitter.emitAsync( + `item.update.${this.collection}.before`, payload, - }); + { + event: `item.update.${this.collection}.before`, + accountability: this.accountability, + collection: this.collection, + item: null, + action: 'update', + payload, + } + ); payload = customProcessed[customProcessed.length - 1]; } @@ -354,14 +363,16 @@ export class ItemsService implements AbstractService { await cache.clear(); } - emitter.emitAsync(`item.update.${this.collection}`, { - event: `item.update.${this.collection}`, - accountability: this.accountability, - collection: this.collection, - item: key, - action: 'update', - payload, - }).catch(err => logger.warn(err)); + emitter + .emitAsync(`item.update.${this.collection}`, { + event: `item.update.${this.collection}`, + accountability: this.accountability, + collection: this.collection, + item: key, + action: 'update', + payload, + }) + .catch((err) => logger.warn(err)); return key; } @@ -438,14 +449,16 @@ export class ItemsService implements AbstractService { await cache.clear(); } - emitter.emitAsync(`item.delete.${this.collection}`, { - event: `item.delete.${this.collection}`, - accountability: this.accountability, - collection: this.collection, - item: keys, - action: 'delete', - payload: null, - }).catch(err => logger.warn(err)); + emitter + .emitAsync(`item.delete.${this.collection}`, { + event: `item.delete.${this.collection}`, + accountability: this.accountability, + collection: this.collection, + item: keys, + action: 'delete', + payload: null, + }) + .catch((err) => logger.warn(err)); return key; } diff --git a/api/src/services/payload.ts b/api/src/services/payload.ts index 76d8acbad1..e54c86623e 100644 --- a/api/src/services/payload.ts +++ b/api/src/services/payload.ts @@ -175,7 +175,12 @@ export class PayloadService { if (['create', 'update'].includes(action)) { processedPayload.forEach((record) => { for (const [key, value] of Object.entries(record)) { - if (Array.isArray(value) || (typeof value === 'object' && (value instanceof Date) !== true && value !== null)) { + if ( + Array.isArray(value) || + (typeof value === 'object' && + value instanceof Date !== true && + value !== null) + ) { record[key] = JSON.stringify(value); } } diff --git a/api/src/types/extensions.ts b/api/src/types/extensions.ts index 0107e1c2af..1d60d25bfa 100644 --- a/api/src/types/extensions.ts +++ b/api/src/types/extensions.ts @@ -6,10 +6,10 @@ import Knex from 'knex'; import { Router } from 'express'; type ExtensionContext = { - services: typeof services, - exceptions: typeof exceptions, - database: Knex, - env: typeof env, + services: typeof services; + exceptions: typeof exceptions; + database: Knex; + env: typeof env; }; export type HookRegisterFunction = (context: ExtensionContext) => Record; diff --git a/api/src/utils/get-ast-from-query.ts b/api/src/utils/get-ast-from-query.ts index f5c05a7d36..d349d3d3a9 100644 --- a/api/src/utils/get-ast-from-query.ts +++ b/api/src/utils/get-ast-from-query.ts @@ -19,7 +19,7 @@ type GetASTOptions = { accountability?: Accountability | null; action?: PermissionsAction; knex?: Knex; -} +}; export default async function getASTFromQuery( collection: string, diff --git a/api/src/utils/get-cache-key.ts b/api/src/utils/get-cache-key.ts index 4c39e0a003..47e554a9d1 100644 --- a/api/src/utils/get-cache-key.ts +++ b/api/src/utils/get-cache-key.ts @@ -1,8 +1,10 @@ -import { Request } from "express"; +import { Request } from 'express'; import url from 'url'; export function getCacheKey(req: Request) { const path = url.parse(req.originalUrl).pathname; - const key = `${req.accountability?.user || 'null'}-${path}-${JSON.stringify(req.sanitizedQuery)}`; + const key = `${req.accountability?.user || 'null'}-${path}-${JSON.stringify( + req.sanitizedQuery + )}`; return key; } diff --git a/api/src/utils/track.ts b/api/src/utils/track.ts index 09c3ebe214..5a86cf68ae 100644 --- a/api/src/utils/track.ts +++ b/api/src/utils/track.ts @@ -48,7 +48,7 @@ async function getEnvInfo(event: string) { store: env.CACHE_STORE, }, storage: { - drivers: getStorageDrivers() + drivers: getStorageDrivers(), }, cors: { enabled: env.CORS_ENABLED, @@ -57,15 +57,19 @@ async function getEnvInfo(event: string) { transport: env.EMAIL_TRANSPORT, }, oauth: { - providers: env.OAUTH_PROVIDERS.split(',').filter((p?: string) => p).map((p: string) => p.trim()), + providers: env.OAUTH_PROVIDERS.split(',') + .filter((p?: string) => p) + .map((p: string) => p.trim()), }, - db_client: env.DB_CLIENT - } + db_client: env.DB_CLIENT, + }; } function getStorageDrivers() { const drivers: string[] = []; - const locations = env.STORAGE_LOCATIONS.split(',').filter((l?: string) => l).map((l: string) => l.trim()); + const locations = env.STORAGE_LOCATIONS.split(',') + .filter((l?: string) => l) + .map((l: string) => l.trim()); for (const location of locations) { const driver = env[`STORAGE_${location.toUpperCase()}_DRIVER`];