Merge pull request #3341 from kibertoad/chore/prettier

Run prettier across app and api
This commit is contained in:
Rijk van Zanten
2020-12-07 21:28:40 -05:00
committed by GitHub
78 changed files with 329 additions and 1048 deletions

View File

@@ -3,16 +3,8 @@ module.exports = {
env: {
node: true,
},
extends: [
'plugin:@typescript-eslint/recommended',
// 'plugin:prettier/recommended'
// Uncomment when we are ready to prettify everything
],
plugins: [
'@typescript-eslint',
// 'prettier'
// Uncomment when we are ready to prettify everything
],
extends: ['plugin:@typescript-eslint/recommended', 'plugin:prettier/recommended'],
plugins: ['@typescript-eslint', 'prettier'],
rules: {
'no-console': process.env.NODE_ENV === 'production' ? 'error' : 'off',
'no-debugger': process.env.NODE_ENV === 'production' ? 'error' : 'off',
@@ -21,6 +13,7 @@ module.exports = {
'@typescript-eslint/ban-ts-ignore': 0,
'@typescript-eslint/no-explicit-any': 0,
'@typescript-eslint/no-var-requires': 0,
'prettier/prettier': ['error', { singleQuote: true }],
'comma-dangle': [
'error',
{

View File

@@ -56,7 +56,8 @@
"dev": "cross-env NODE_ENV=development ts-node-dev --files src/start.ts --respawn --watch \"src/**/*.ts\" --watch \".env\" --transpile-only",
"cli": "cross-env NODE_ENV=development ts-node --script-mode --transpile-only src/cli/index.ts",
"lint": "eslint \"src/**/*.ts\" cli.js index.js",
"prepublishOnly": "npm run build"
"prepublishOnly": "npm run build",
"prettier": "prettier --write \"src/**/*.ts\" cli.js index.js"
},
"files": [
"dist",

View File

@@ -53,10 +53,7 @@ rolesCommand
.option('--admin', `whether or not the role has admin access`)
.action(rolesCreate);
program
.command('count <collection>')
.description('Count the amount of items in a given collection')
.action(count);
program.command('count <collection>').description('Count the amount of items in a given collection').action(count);
program.command('bootstrap').description('Initialize or update the database').action(bootstrap);

View File

@@ -21,11 +21,7 @@ const defaults = {
},
};
export default async function createEnv(
client: keyof typeof drivers,
credentials: Credentials,
directory: string
) {
export default async function createEnv(client: keyof typeof drivers, credentials: Credentials, directory: string) {
const config: Record<string, any> = {
...defaults,
database: {

View File

@@ -30,11 +30,7 @@ router.get(
const isValidUUID = validate(id, 4);
if (isValidUUID === false) throw new ForbiddenException();
const file = await database
.select('id', 'storage', 'filename_disk')
.from('directus_files')
.where({ id })
.first();
const file = await database.select('id', 'storage', 'filename_disk').from('directus_files').where({ id }).first();
if (!file) throw new ForbiddenException();
@@ -64,24 +60,17 @@ router.get(
const transformation = pick(req.query, ASSET_TRANSFORM_QUERY_KEYS);
if (transformation.hasOwnProperty('key') && Object.keys(transformation).length > 1) {
throw new InvalidQueryException(
`You can't combine the "key" query parameter with any other transformation.`
);
throw new InvalidQueryException(`You can't combine the "key" query parameter with any other transformation.`);
}
const systemKeys = SYSTEM_ASSET_ALLOW_LIST.map((transformation) => transformation.key);
const allKeys: string[] = [
...systemKeys,
...(assetSettings.storage_asset_presets || []).map(
(transformation: Transformation) => transformation.key
),
...(assetSettings.storage_asset_presets || []).map((transformation: Transformation) => transformation.key),
];
// For use in the next request handler
res.locals.shortcuts = [
...SYSTEM_ASSET_ALLOW_LIST,
...(assetSettings.storage_asset_presets || []),
];
res.locals.shortcuts = [...SYSTEM_ASSET_ALLOW_LIST, ...(assetSettings.storage_asset_presets || [])];
res.locals.transformation = transformation;
if (Object.keys(transformation).length === 0) {
@@ -93,15 +82,10 @@ router.get(
return next();
} else if (assetSettings.storage_asset_transform === 'shortcut') {
if (allKeys.includes(transformation.key as string)) return next();
throw new InvalidQueryException(
`Only configured shortcuts can be used in asset generation.`
);
throw new InvalidQueryException(`Only configured shortcuts can be used in asset generation.`);
} else {
if (transformation.key && systemKeys.includes(transformation.key as string))
return next();
throw new InvalidQueryException(
`Dynamic asset generation has been disabled for this project.`
);
if (transformation.key && systemKeys.includes(transformation.key as string)) return next();
throw new InvalidQueryException(`Dynamic asset generation has been disabled for this project.`);
}
}),
@@ -114,8 +98,7 @@ router.get(
const transformation: Transformation = res.locals.transformation.key
? res.locals.shortcuts.find(
(transformation: Transformation) =>
transformation.key === res.locals.transformation.key
(transformation: Transformation) => transformation.key === res.locals.transformation.key
)
: res.locals.transformation;

View File

@@ -68,8 +68,7 @@ router.post(
httpOnly: true,
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
secure: env.REFRESH_TOKEN_COOKIE_SECURE === 'true' ? true : false,
sameSite:
(env.REFRESH_TOKEN_COOKIE_SAME_SITE as 'lax' | 'strict' | 'none') || 'strict',
sameSite: (env.REFRESH_TOKEN_COOKIE_SAME_SITE as 'lax' | 'strict' | 'none') || 'strict',
});
}
@@ -97,16 +96,12 @@ router.post(
const currentRefreshToken = req.body.refresh_token || req.cookies.directus_refresh_token;
if (!currentRefreshToken) {
throw new InvalidPayloadException(
`"refresh_token" is required in either the JSON payload or Cookie`
);
throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`);
}
const mode: 'json' | 'cookie' = req.body.mode || req.body.refresh_token ? 'json' : 'cookie';
const { accessToken, refreshToken, expires } = await authenticationService.refresh(
currentRefreshToken
);
const { accessToken, refreshToken, expires } = await authenticationService.refresh(currentRefreshToken);
const payload = {
data: { access_token: accessToken, expires },
@@ -121,8 +116,7 @@ router.post(
httpOnly: true,
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
secure: env.REFRESH_TOKEN_COOKIE_SECURE === 'true' ? true : false,
sameSite:
(env.REFRESH_TOKEN_COOKIE_SAME_SITE as 'lax' | 'strict' | 'none') || 'strict',
sameSite: (env.REFRESH_TOKEN_COOKIE_SAME_SITE as 'lax' | 'strict' | 'none') || 'strict',
});
}
@@ -150,9 +144,7 @@ router.post(
const currentRefreshToken = req.body.refresh_token || req.cookies.directus_refresh_token;
if (!currentRefreshToken) {
throw new InvalidPayloadException(
`"refresh_token" is required in either the JSON payload or Cookie`
);
throw new InvalidPayloadException(`"refresh_token" is required in either the JSON payload or Cookie`);
}
await authenticationService.logout(currentRefreshToken);
@@ -222,10 +214,7 @@ router.get(
respond
);
router.use(
'/oauth',
session({ secret: env.SECRET as string, saveUninitialized: false, resave: false })
);
router.use('/oauth', session({ secret: env.SECRET as string, saveUninitialized: false, resave: false }));
router.get(
'/oauth/:provider',
@@ -279,8 +268,7 @@ router.get(
httpOnly: true,
maxAge: ms(env.REFRESH_TOKEN_TTL as string),
secure: env.REFRESH_TOKEN_COOKIE_SECURE === 'true' ? true : false,
sameSite:
(env.REFRESH_TOKEN_COOKIE_SAME_SITE as 'lax' | 'strict' | 'none') || 'strict',
sameSite: (env.REFRESH_TOKEN_COOKIE_SAME_SITE as 'lax' | 'strict' | 'none') || 'strict',
});
return res.redirect(redirect);

View File

@@ -52,8 +52,7 @@ router.get(
schema: req.schema,
});
if (req.params.field in req.schema[req.params.collection].columns === false)
throw new ForbiddenException();
if (req.params.field in req.schema[req.params.collection].columns === false) throw new ForbiddenException();
const field = await service.readOne(req.params.collection, req.params.field);
@@ -80,8 +79,7 @@ router.post(
'/:collection',
validateCollection,
asyncHandler(async (req, res, next) => {
if (!req.body.schema && !req.body.meta)
throw new InvalidPayloadException(`"schema" or "meta" is required`);
if (!req.body.schema && !req.body.meta) throw new InvalidPayloadException(`"schema" or "meta" is required`);
const service = new FieldsService({
accountability: req.accountability,

View File

@@ -68,11 +68,7 @@ const multipartHandler = asyncHandler(async (req, res, next) => {
};
try {
const primaryKey = await service.upload(
fileStream,
payloadWithRequiredFields,
existingPrimaryKey
);
const primaryKey = await service.upload(fileStream, payloadWithRequiredFields, existingPrimaryKey);
savedFiles.push(primaryKey);
tryDone();
} catch (error) {

View File

@@ -2,11 +2,7 @@ import express from 'express';
import asyncHandler from 'express-async-handler';
import collectionExists from '../middleware/collection-exists';
import { ItemsService, MetaService } from '../services';
import {
RouteNotFoundException,
ForbiddenException,
FailedValidationException,
} from '../exceptions';
import { RouteNotFoundException, ForbiddenException, FailedValidationException } from '../exceptions';
import { respond } from '../middleware/respond';
import { InvalidPayloadException } from '../exceptions';
import { PrimaryKey } from '../types';

View File

@@ -2,11 +2,7 @@ import express from 'express';
import asyncHandler from 'express-async-handler';
import { PermissionsService, MetaService } from '../services';
import { clone } from 'lodash';
import {
InvalidCredentialsException,
ForbiddenException,
InvalidPayloadException,
} from '../exceptions';
import { InvalidCredentialsException, ForbiddenException, InvalidPayloadException } from '../exceptions';
import useCollection from '../middleware/use-collection';
import { respond } from '../middleware/respond';
import { PrimaryKey } from '../types';

View File

@@ -1,11 +1,7 @@
import express from 'express';
import asyncHandler from 'express-async-handler';
import Joi from 'joi';
import {
InvalidPayloadException,
InvalidCredentialsException,
ForbiddenException,
} from '../exceptions';
import { InvalidPayloadException, InvalidCredentialsException, ForbiddenException } from '../exceptions';
import { UsersService, MetaService, AuthenticationService } from '../services';
import useCollection from '../middleware/use-collection';
import { respond } from '../middleware/respond';
@@ -205,10 +201,7 @@ router.delete(
);
const inviteSchema = Joi.object({
email: Joi.alternatives(
Joi.string().email(),
Joi.array().items(Joi.string().email())
).required(),
email: Joi.alternatives(Joi.string().email(), Joi.array().items(Joi.string().email())).required(),
role: Joi.string().uuid({ version: 'uuidv4' }).required(),
});

View File

@@ -1206,8 +1206,7 @@ const systemFields = [
text: 'Weak Minimum 8 Characters',
},
{
value:
"/(?=^.{8,}$)(?=.*\\d)(?=.*[a-z])(?=.*[A-Z])(?=.*[!@#$%^&*()_+}{';'?>.<,])(?!.*\\s).*$/",
value: "/(?=^.{8,}$)(?=.*\\d)(?=.*[a-z])(?=.*[A-Z])(?=.*[!@#$%^&*()_+}{';'?>.<,])(?!.*\\s).*$/",
text: 'Strong Upper / Lowercase / Numbers / Special',
},
],
@@ -1643,10 +1642,7 @@ const systemFields = [
export async function up(knex: Knex) {
const fieldKeys = uniq(systemFields.map((field: any) => field.field));
await knex('directus_fields')
.delete()
.where('collection', 'like', 'directus_%')
.whereIn('field', fieldKeys);
await knex('directus_fields').delete().where('collection', 'like', 'directus_%').whereIn('field', fieldKeys);
}
export async function down(knex: Knex) {

View File

@@ -145,11 +145,7 @@ export async function down(knex: Knex) {
for (const constraint of update.constraints) {
table.dropForeign([constraint.column]);
table
.foreign(constraint.column)
.references(constraint.references)
.onUpdate('NO ACTION')
.onDelete('NO ACTION');
table.foreign(constraint.column).references(constraint.references).onUpdate('NO ACTION').onDelete('NO ACTION');
}
});
}

View File

@@ -15,18 +15,13 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
const customMigrationsPath = path.resolve(env.EXTENSIONS_PATH, 'migrations');
const customMigrationFiles =
((await fse.pathExists(customMigrationsPath)) &&
(await fse.readdir(customMigrationsPath))) ||
[];
((await fse.pathExists(customMigrationsPath)) && (await fse.readdir(customMigrationsPath))) || [];
migrationFiles = migrationFiles.filter(
(file: string) => file.startsWith('run') === false && file.endsWith('.d.ts') === false
);
const completedMigrations = await database
.select<Migration[]>('*')
.from('directus_migrations')
.orderBy('version');
const completedMigrations = await database.select<Migration[]>('*').from('directus_migrations').orderBy('version');
const migrations = [
...migrationFiles.map((path) => parseFilePath(path)),
@@ -39,9 +34,7 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
const completed = !!completedMigrations.find((migration) => migration.version === version);
return {
file: custom
? path.join(customMigrationsPath, filePath)
: path.join(__dirname, filePath),
file: custom ? path.join(customMigrationsPath, filePath) : path.join(__dirname, filePath),
version,
name,
completed,
@@ -71,9 +64,7 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
const { up } = require(nextVersion.file);
await up(database);
await database
.insert({ version: nextVersion.version, name: nextVersion.name })
.into('directus_migrations');
await database.insert({ version: nextVersion.version, name: nextVersion.name }).into('directus_migrations');
}
async function down() {
@@ -83,9 +74,7 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
throw Error('Nothing to downgrade');
}
const migration = migrations.find(
(migration) => migration.version === currentVersion.version
);
const migration = migrations.find((migration) => migration.version === currentVersion.version);
if (!migration) {
throw new Error('Couldnt find migration');
@@ -101,9 +90,7 @@ export default async function run(database: Knex, direction: 'up' | 'down' | 'la
if (migration.completed === false) {
const { up } = require(migration.file);
await up(database);
await database
.insert({ version: migration.version, name: migration.name })
.into('directus_migrations');
await database.insert({ version: migration.version, name: migration.name }).into('directus_migrations');
}
}
}

View File

@@ -35,11 +35,7 @@ export default async function runAST(
return await run(ast.name, ast.children, options?.query || ast.query);
}
async function run(
collection: string,
children: (NestedCollectionNode | FieldNode)[],
query: Query
) {
async function run(collection: string, children: (NestedCollectionNode | FieldNode)[], query: Query) {
// Retrieve the database columns to select in the current AST
const { columnsToSelect, primaryKeyField, nestedCollectionNodes } = await parseCurrentLevel(
collection,
@@ -48,14 +44,7 @@ export default async function runAST(
);
// The actual knex query builder instance. This is a promise that resolves with the raw items from the db
const dbQuery = await getDBQuery(
knex,
collection,
columnsToSelect,
query,
primaryKeyField,
schema
);
const dbQuery = await getDBQuery(knex, collection, columnsToSelect, query, primaryKeyField, schema);
const rawItems: Item | Item[] = await dbQuery;
@@ -170,10 +159,7 @@ async function getDBQuery(
return dbQuery;
}
function applyParentFilters(
nestedCollectionNodes: NestedCollectionNode[],
parentItem: Item | Item[]
) {
function applyParentFilters(nestedCollectionNodes: NestedCollectionNode[], parentItem: Item | Item[]) {
const parentItems = toArray(parentItem);
for (const nestedNode of nestedCollectionNodes) {
@@ -185,9 +171,7 @@ function applyParentFilters(
filter: {
...(nestedNode.query.filter || {}),
[nestedNode.relation.one_primary!]: {
_in: uniq(parentItems.map((res) => res[nestedNode.relation.many_field])).filter(
(id) => id
),
_in: uniq(parentItems.map((res) => res[nestedNode.relation.many_field])).filter((id) => id),
},
},
};
@@ -263,8 +247,7 @@ function mergeWithParentItems(
if (Array.isArray(nestedItem[nestedNode.relation.many_field])) return true;
return (
nestedItem[nestedNode.relation.many_field] ==
parentItem[nestedNode.relation.one_primary!] ||
nestedItem[nestedNode.relation.many_field] == parentItem[nestedNode.relation.one_primary!] ||
nestedItem[nestedNode.relation.many_field]?.[nestedNode.relation.one_primary!] ==
parentItem[nestedNode.relation.one_primary!]
);
@@ -282,13 +265,9 @@ function mergeWithParentItems(
for (const parentItem of parentItems) {
const relatedCollection = parentItem[nestedNode.relation.one_collection_field!];
const itemChild = (nestedItem as Record<string, any[]>)[relatedCollection].find(
(nestedItem) => {
return (
nestedItem[nestedNode.relatedKey[relatedCollection]] == parentItem[nestedNode.fieldKey]
);
}
);
const itemChild = (nestedItem as Record<string, any[]>)[relatedCollection].find((nestedItem) => {
return nestedItem[nestedNode.relatedKey[relatedCollection]] == parentItem[nestedNode.fieldKey];
});
parentItem[nestedNode.fieldKey] = itemChild || null;
}
@@ -340,10 +319,7 @@ function removeTemporaryFields(
);
}
item =
fields[relatedCollection].length > 0
? pick(rawItem, fields[relatedCollection])
: rawItem[primaryKeyField];
item = fields[relatedCollection].length > 0 ? pick(rawItem, fields[relatedCollection]) : rawItem[primaryKeyField];
items.push(item);
}
@@ -369,9 +345,7 @@ function removeTemporaryFields(
item[nestedNode.fieldKey] = removeTemporaryFields(
item[nestedNode.fieldKey],
nestedNode,
nestedNode.type === 'm2o'
? nestedNode.relation.one_primary!
: nestedNode.relation.many_primary,
nestedNode.type === 'm2o' ? nestedNode.relation.one_primary! : nestedNode.relation.many_primary,
item
);
}

View File

@@ -86,9 +86,7 @@ export default async function runSeed(database: Knex) {
}
if (columnInfo.references) {
column
.references(columnInfo.references.column)
.inTable(columnInfo.references.table);
column.references(columnInfo.references.column).inTable(columnInfo.references.table);
}
}
});

View File

@@ -4,8 +4,6 @@ import { CollectionMeta } from '../../../types';
const systemData = requireYAML(require.resolve('./collections.yaml'));
export const systemCollectionRows: CollectionMeta[] = systemData.data.map(
(row: Record<string, any>) => {
return merge({ system: true }, systemData.defaults, row);
}
);
export const systemCollectionRows: CollectionMeta[] = systemData.data.map((row: Record<string, any>) => {
return merge({ system: true }, systemData.defaults, row);
});

View File

@@ -3,6 +3,6 @@ import { EventEmitter2 } from 'eventemitter2';
const emitter = new EventEmitter2({ wildcard: true, verboseMemoryLeak: true, delimiter: '.' });
// No-op function to ensure we never end up with no data
emitter.on('*.*.before', input => input);
emitter.on('*.*.before', (input) => input);
export default emitter;

View File

@@ -62,8 +62,7 @@ function processValues(env: Record<string, any>) {
if (value === 'true') env[key] = true;
if (value === 'false') env[key] = false;
if (value === 'null') env[key] = null;
if (String(value).startsWith('0') === false && isNaN(value) === false && value.length > 0)
env[key] = Number(value);
if (String(value).startsWith('0') === false && isNaN(value) === false && value.length > 0) env[key] = Number(value);
}
return env;

View File

@@ -33,12 +33,9 @@ export async function listExtensions(type: string) {
return await listFolders(location);
} catch (err) {
if (err.code === 'ENOENT') {
throw new ServiceUnavailableException(
`Extension folder "extensions/${type}" couldn't be opened`,
{
service: 'extensions',
}
);
throw new ServiceUnavailableException(`Extension folder "extensions/${type}" couldn't be opened`, {
service: 'extensions',
});
}
throw err;
}
@@ -78,9 +75,7 @@ function registerHooks(hooks: string[]) {
function registerHook(hook: string) {
const hookPath = path.resolve(extensionsPath, 'hooks', hook, 'index.js');
const hookInstance:
| HookRegisterFunction
| { default?: HookRegisterFunction } = require(hookPath);
const hookInstance: HookRegisterFunction | { default?: HookRegisterFunction } = require(hookPath);
let register: HookRegisterFunction = hookInstance as HookRegisterFunction;
if (typeof hookInstance !== 'function') {
@@ -110,9 +105,7 @@ function registerEndpoints(endpoints: string[], router: Router) {
function registerEndpoint(endpoint: string) {
const endpointPath = path.resolve(extensionsPath, 'endpoints', endpoint, 'index.js');
const endpointInstance:
| EndpointRegisterFunction
| { default?: EndpointRegisterFunction } = require(endpointPath);
const endpointInstance: EndpointRegisterFunction | { default?: EndpointRegisterFunction } = require(endpointPath);
let register: EndpointRegisterFunction = endpointInstance as EndpointRegisterFunction;
if (typeof endpointInstance !== 'function') {

View File

@@ -74,9 +74,7 @@ const authenticate: RequestHandler = asyncHandler(async (req, res, next) => {
}
if (req.accountability?.user) {
await database('directus_users')
.update({ last_access: new Date() })
.where({ id: req.accountability.user });
await database('directus_users').update({ last_access: new Date() }).where({ id: req.accountability.user });
}
return next();

View File

@@ -12,7 +12,6 @@ export const checkIP: RequestHandler = asyncHandler(async (req, res, next) => {
const ipAllowlist = (role?.ip_access || '').split(',').filter((ip: string) => ip);
if (ipAllowlist.length > 0 && ipAllowlist.includes(req.accountability!.ip) === false)
throw new InvalidIPException();
if (ipAllowlist.length > 0 && ipAllowlist.includes(req.accountability!.ip) === false) throw new InvalidIPException();
return next();
});

View File

@@ -27,13 +27,10 @@ if (env.RATE_LIMITER_ENABLED === true) {
if (rateLimiterRes instanceof Error) throw rateLimiterRes;
res.set('Retry-After', String(rateLimiterRes.msBeforeNext / 1000));
throw new HitRateLimitException(
`Too many requests, retry after ${ms(rateLimiterRes.msBeforeNext)}.`,
{
limit: +env.RATE_LIMITER_POINTS,
reset: new Date(Date.now() + rateLimiterRes.msBeforeNext),
}
);
throw new HitRateLimitException(`Too many requests, retry after ${ms(rateLimiterRes.msBeforeNext)}.`, {
limit: +env.RATE_LIMITER_POINTS,
reset: new Date(Date.now() + rateLimiterRes.msBeforeNext),
});
}
next();
@@ -56,25 +53,18 @@ function getRateLimiter() {
function getConfig(store?: 'memory'): IRateLimiterOptions;
function getConfig(store: 'redis' | 'memcache'): IRateLimiterStoreOptions;
function getConfig(
store: 'memory' | 'redis' | 'memcache' = 'memory'
): IRateLimiterOptions | IRateLimiterStoreOptions {
function getConfig(store: 'memory' | 'redis' | 'memcache' = 'memory'): IRateLimiterOptions | IRateLimiterStoreOptions {
const config: any = getConfigFromEnv('RATE_LIMITER_', `RATE_LIMITER_${store}_`);
if (store === 'redis') {
const Redis = require('ioredis');
delete config.redis;
config.storeClient = new Redis(
env.RATE_LIMITER_REDIS || getConfigFromEnv('RATE_LIMITER_REDIS_')
);
config.storeClient = new Redis(env.RATE_LIMITER_REDIS || getConfigFromEnv('RATE_LIMITER_REDIS_'));
}
if (store === 'memcache') {
const Memcached = require('memcached');
config.storeClient = new Memcached(
env.RATE_LIMITER_MEMCACHE,
getConfigFromEnv('RATE_LIMITER_MEMCACHE_')
);
config.storeClient = new Memcached(env.RATE_LIMITER_MEMCACHE, getConfigFromEnv('RATE_LIMITER_MEMCACHE_'));
}
delete config.enabled;

View File

@@ -7,12 +7,7 @@ import { Transform, transforms } from 'json2csv';
import { PassThrough } from 'stream';
export const respond: RequestHandler = asyncHandler(async (req, res) => {
if (
req.method.toLowerCase() === 'get' &&
env.CACHE_ENABLED === true &&
cache &&
!req.sanitizedQuery.export
) {
if (req.method.toLowerCase() === 'get' && env.CACHE_ENABLED === true && cache && !req.sanitizedQuery.export) {
const key = getCacheKey(req);
await cache.set(key, res.locals.payload);
}

View File

@@ -8,9 +8,7 @@ const getSchema: RequestHandler = asyncHandler(async (req, res, next) => {
for (const [collection, info] of Object.entries(schemaOverview)) {
if (!info.primary) {
logger.warn(
`Collection "${collection}" doesn't have a primary key column and will be ignored`
);
logger.warn(`Collection "${collection}" doesn't have a primary key column and will be ignored`);
delete schemaOverview[collection];
}
}

View File

@@ -37,10 +37,7 @@ export default async function createServer() {
// Compatibility when supporting serving with certificates
const protocol = server instanceof https.Server ? 'https' : 'http';
const url = new URL(
(req.originalUrl || req.url) as string,
`${protocol}://${req.headers.host}`
);
const url = new URL((req.originalUrl || req.url) as string, `${protocol}://${req.headers.host}`);
const query = url.search.startsWith('?') ? url.search.substr(1) : url.search;
const info = {
@@ -62,10 +59,7 @@ export default async function createServer() {
size: metrics.out,
headers: res.getHeaders(),
},
ip:
req.headers['x-forwarded-for'] ||
req.connection?.remoteAddress ||
req.socket?.remoteAddress,
ip: req.headers['x-forwarded-for'] || req.connection?.remoteAddress || req.socket?.remoteAddress,
duration: elapsedMilliseconds.toFixed(),
};

View File

@@ -3,11 +3,7 @@ import jwt from 'jsonwebtoken';
import argon2 from 'argon2';
import { nanoid } from 'nanoid';
import ms from 'ms';
import {
InvalidCredentialsException,
InvalidPayloadException,
InvalidOTPException,
} from '../exceptions';
import { InvalidCredentialsException, InvalidPayloadException, InvalidOTPException } from '../exceptions';
import { Session, Accountability, AbstractServiceOptions, Action } from '../types';
import Knex from 'knex';
import { ActivityService } from '../services/activity';
@@ -158,21 +154,13 @@ export class AuthenticationService {
}
async generateOTPAuthURL(pk: string, secret: string) {
const user = await this.knex
.select('first_name', 'last_name')
.from('directus_users')
.where({ id: pk })
.first();
const user = await this.knex.select('first_name', 'last_name').from('directus_users').where({ id: pk }).first();
const name = `${user.first_name} ${user.last_name}`;
return authenticator.keyuri(name, 'Directus', secret);
}
async verifyOTP(pk: string, otp: string): Promise<boolean> {
const user = await this.knex
.select('tfa_secret')
.from('directus_users')
.where({ id: pk })
.first();
const user = await this.knex.select('tfa_secret').from('directus_users').where({ id: pk }).first();
if (!user.tfa_secret) {
throw new InvalidPayloadException(`User "${pk}" doesn't have TFA enabled.`);
@@ -183,11 +171,7 @@ export class AuthenticationService {
}
async verifyPassword(pk: string, password: string) {
const userRecord = await this.knex
.select('password')
.from('directus_users')
.where({ id: pk })
.first();
const userRecord = await this.knex.select('password').from('directus_users').where({ id: pk }).first();
if (!userRecord || !userRecord.password) {
throw new InvalidCredentialsException();

View File

@@ -56,27 +56,19 @@ export class AuthorizationService {
)) as Permission[];
// If the permissions don't match the collections, you don't have permission to read all of them
const uniqueCollectionsRequestedCount = uniq(
collectionsRequested.map(({ collection }) => collection)
).length;
const uniqueCollectionsRequestedCount = uniq(collectionsRequested.map(({ collection }) => collection)).length;
if (uniqueCollectionsRequestedCount !== permissionsForCollections.length) {
// Find the first collection that doesn't have permissions configured
const { collection, field } = collectionsRequested.find(
({ collection }) =>
permissionsForCollections.find(
(permission) => permission.collection === collection
) === undefined
permissionsForCollections.find((permission) => permission.collection === collection) === undefined
)!;
if (field) {
throw new ForbiddenException(
`You don't have permission to access the "${field}" field.`
);
throw new ForbiddenException(`You don't have permission to access the "${field}" field.`);
} else {
throw new ForbiddenException(
`You don't have permission to access the "${collection}" collection.`
);
throw new ForbiddenException(`You don't have permission to access the "${collection}" collection.`);
}
}
@@ -88,15 +80,11 @@ export class AuthorizationService {
/**
* Traverses the AST and returns an array of all collections that are being fetched
*/
function getCollectionsFromAST(
ast: AST | NestedCollectionNode
): { collection: string; field: string }[] {
function getCollectionsFromAST(ast: AST | NestedCollectionNode): { collection: string; field: string }[] {
const collections = [];
if (ast.type === 'm2a') {
collections.push(
...ast.names.map((name) => ({ collection: name, field: ast.fieldKey }))
);
collections.push(...ast.names.map((name) => ({ collection: name, field: ast.fieldKey })));
/** @TODO add nestedNode */
} else {
@@ -121,9 +109,7 @@ export class AuthorizationService {
const collection = ast.name;
// We check the availability of the permissions in the step before this is run
const permissions = permissionsForCollections.find(
(permission) => permission.collection === collection
)!;
const permissions = permissionsForCollections.find((permission) => permission.collection === collection)!;
const allowedFields = permissions.fields || [];
@@ -138,9 +124,7 @@ export class AuthorizationService {
const fieldKey = childNode.name;
if (allowedFields.includes(fieldKey) === false) {
throw new ForbiddenException(
`You don't have permission to access the "${fieldKey}" field.`
);
throw new ForbiddenException(`You don't have permission to access the "${fieldKey}" field.`);
}
}
}
@@ -155,9 +139,7 @@ export class AuthorizationService {
const collection = ast.name;
// We check the availability of the permissions in the step before this is run
const permissions = permissionsForCollections.find(
(permission) => permission.collection === collection
)!;
const permissions = permissionsForCollections.find((permission) => permission.collection === collection)!;
const parsedPermissions = parseFilter(permissions.permissions, accountability);
@@ -174,9 +156,7 @@ export class AuthorizationService {
if (ast.query.filter._and.length === 0) delete ast.query.filter._and;
if (permissions.limit && ast.query.limit && ast.query.limit > permissions.limit) {
throw new ForbiddenException(
`You can't read more than ${permissions.limit} items at a time.`
);
throw new ForbiddenException(`You can't read more than ${permissions.limit} items at a time.`);
}
// Default to the permissions limit if limit hasn't been set
@@ -197,16 +177,8 @@ export class AuthorizationService {
/**
* Checks if the provided payload matches the configured permissions, and adds the presets to the payload.
*/
validatePayload(
action: PermissionsAction,
collection: string,
payloads: Partial<Item>[]
): Promise<Partial<Item>[]>;
validatePayload(
action: PermissionsAction,
collection: string,
payload: Partial<Item>
): Promise<Partial<Item>>;
validatePayload(action: PermissionsAction, collection: string, payloads: Partial<Item>[]): Promise<Partial<Item>[]>;
validatePayload(action: PermissionsAction, collection: string, payload: Partial<Item>): Promise<Partial<Item>>;
async validatePayload(
action: PermissionsAction,
collection: string,
@@ -239,10 +211,7 @@ export class AuthorizationService {
if (!permission) throw new ForbiddenException();
permission = (await this.payloadService.processValues(
'read',
permission as Item
)) as Permission;
permission = (await this.payloadService.processValues('read', permission as Item)) as Permission;
// Check if you have permission to access the fields you're trying to acces
@@ -251,9 +220,7 @@ export class AuthorizationService {
if (allowedFields.includes('*') === false) {
for (const payload of payloads) {
const keysInData = Object.keys(payload);
const invalidKeys = keysInData.filter(
(fieldKey) => allowedFields.includes(fieldKey) === false
);
const invalidKeys = keysInData.filter((fieldKey) => allowedFields.includes(fieldKey) === false);
if (invalidKeys.length > 0) {
throw new ForbiddenException(
@@ -280,24 +247,16 @@ export class AuthorizationService {
.where({ collection, field: column.column_name })
.first()) ||
systemFieldRows.find(
(fieldMeta) =>
fieldMeta.field === column.column_name &&
fieldMeta.collection === collection
(fieldMeta) => fieldMeta.field === column.column_name && fieldMeta.collection === collection
);
const specials = field?.special ? toArray(field.special) : [];
const hasGenerateSpecial = [
'uuid',
'date-created',
'role-created',
'user-created',
].some((name) => specials.includes(name));
const hasGenerateSpecial = ['uuid', 'date-created', 'role-created', 'user-created'].some((name) =>
specials.includes(name)
);
const isRequired =
column.is_nullable === false &&
column.default_value === null &&
hasGenerateSpecial === false;
const isRequired = column.is_nullable === false && column.default_value === null && hasGenerateSpecial === false;
if (isRequired) {
requiredColumns.push(column.column_name);
@@ -350,9 +309,7 @@ export class AuthorizationService {
if (Object.keys(validation)[0] === '_and') {
const subValidation = Object.values(validation)[0];
const nestedErrors = flatten<FailedValidationException>(
subValidation.map((subObj: Record<string, any>) =>
this.validateJoi(subObj, payloads)
)
subValidation.map((subObj: Record<string, any>) => this.validateJoi(subObj, payloads))
).filter((err?: FailedValidationException) => err);
errors.push(...nestedErrors);
}
@@ -360,9 +317,7 @@ export class AuthorizationService {
if (Object.keys(validation)[0] === '_or') {
const subValidation = Object.values(validation)[0];
const nestedErrors = flatten<FailedValidationException>(
subValidation.map((subObj: Record<string, any>) =>
this.validateJoi(subObj, payloads)
)
subValidation.map((subObj: Record<string, any>) => this.validateJoi(subObj, payloads))
);
const allErrored = nestedErrors.every((err?: FailedValidationException) => err);
@@ -377,20 +332,14 @@ export class AuthorizationService {
const { error } = schema.validate(payload, { abortEarly: false });
if (error) {
errors.push(
...error.details.map((details) => new FailedValidationException(details))
);
errors.push(...error.details.map((details) => new FailedValidationException(details)));
}
}
return errors;
}
async checkAccess(
action: PermissionsAction,
collection: string,
pk: PrimaryKey | PrimaryKey[]
) {
async checkAccess(action: PermissionsAction, collection: string, pk: PrimaryKey | PrimaryKey[]) {
if (this.accountability?.admin === true) return;
const itemsService = new ItemsService(collection, {
@@ -409,14 +358,11 @@ export class AuthorizationService {
if (!result) throw '';
if (Array.isArray(pk) && pk.length > 1 && result.length !== pk.length) throw '';
} catch {
throw new ForbiddenException(
`You're not allowed to ${action} item "${pk}" in collection "${collection}".`,
{
collection,
item: pk,
action,
}
);
throw new ForbiddenException(`You're not allowed to ${action} item "${pk}" in collection "${collection}".`, {
collection,
item: pk,
action,
});
}
}
}

View File

@@ -1,12 +1,5 @@
import database, { schemaInspector } from '../database';
import {
AbstractServiceOptions,
Accountability,
Collection,
CollectionMeta,
Relation,
SchemaOverview,
} from '../types';
import { AbstractServiceOptions, Accountability, Collection, CollectionMeta, Relation, SchemaOverview } from '../types';
import Knex from 'knex';
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
import { FieldsService } from '../services/fields';
@@ -78,9 +71,7 @@ export class CollectionsService {
}
if (payload.collection in this.schema) {
throw new InvalidPayloadException(
`Collection "${payload.collection}" already exists.`
);
throw new InvalidPayloadException(`Collection "${payload.collection}" already exists.`);
}
await trx.schema.createTable(payload.collection, (table) => {
@@ -94,9 +85,7 @@ export class CollectionsService {
collection: payload.collection,
});
const fieldPayloads = payload
.fields!.filter((field) => field.meta)
.map((field) => field.meta);
const fieldPayloads = payload.fields!.filter((field) => field.meta).map((field) => field.meta);
await fieldItemsService.create(fieldPayloads);
@@ -131,15 +120,11 @@ export class CollectionsService {
.whereIn('collection', collectionKeys);
if (collectionKeys.length !== permissions.length) {
const collectionsYouHavePermissionToRead = permissions.map(
({ collection }) => collection
);
const collectionsYouHavePermissionToRead = permissions.map(({ collection }) => collection);
for (const collectionKey of collectionKeys) {
if (collectionsYouHavePermissionToRead.includes(collectionKey) === false) {
throw new ForbiddenException(
`You don't have access to the "${collectionKey}" collection.`
);
throw new ForbiddenException(`You don't have access to the "${collectionKey}" collection.`);
}
}
}
@@ -218,10 +203,7 @@ export class CollectionsService {
update(data: Partial<Collection>, keys: string[]): Promise<string[]>;
update(data: Partial<Collection>, key: string): Promise<string>;
update(data: Partial<Collection>[]): Promise<string[]>;
async update(
data: Partial<Collection> | Partial<Collection>[],
key?: string | string[]
): Promise<string | string[]> {
async update(data: Partial<Collection> | Partial<Collection>[], key?: string | string[]): Promise<string | string[]> {
const collectionItemsService = new ItemsService('directus_collections', {
knex: this.knex,
accountability: this.accountability,
@@ -239,11 +221,8 @@ export class CollectionsService {
for (const key of keys) {
const exists =
(await this.knex
.select('collection')
.from('directus_collections')
.where({ collection: key })
.first()) !== undefined;
(await this.knex.select('collection').from('directus_collections').where({ collection: key }).first()) !==
undefined;
if (exists) {
await collectionItemsService.update(payload.meta, key);

View File

@@ -1,12 +1,6 @@
import database, { schemaInspector } from '../database';
import { Field } from '../types/field';
import {
Accountability,
AbstractServiceOptions,
FieldMeta,
Relation,
SchemaOverview,
} from '../types';
import { Accountability, AbstractServiceOptions, FieldMeta, Relation, SchemaOverview } from '../types';
import { ItemsService } from '../services/items';
import { ColumnBuilder } from 'knex';
import getLocalType from '../utils/get-local-type';
@@ -53,9 +47,7 @@ export class FieldsService {
limit: -1,
})) as FieldMeta[];
fields.push(
...systemFieldRows.filter((fieldMeta) => fieldMeta.collection === collection)
);
fields.push(...systemFieldRows.filter((fieldMeta) => fieldMeta.collection === collection));
} else {
fields = (await nonAuthorizedItemsService.readByQuery({ limit: -1 })) as FieldMeta[];
fields.push(...systemFieldRows);
@@ -92,14 +84,10 @@ export class FieldsService {
aliasQuery.andWhere('collection', collection);
}
let aliasFields = [
...((await this.payloadService.processValues('read', await aliasQuery)) as FieldMeta[]),
];
let aliasFields = [...((await this.payloadService.processValues('read', await aliasQuery)) as FieldMeta[])];
if (collection) {
aliasFields.push(
...systemFieldRows.filter((fieldMeta) => fieldMeta.collection === collection)
);
aliasFields.push(...systemFieldRows.filter((fieldMeta) => fieldMeta.collection === collection));
} else {
aliasFields.push(...systemFieldRows);
}
@@ -139,9 +127,7 @@ export class FieldsService {
const allowedFieldsInCollection: Record<string, string[]> = {};
permissions.forEach((permission) => {
allowedFieldsInCollection[permission.collection] = (permission.fields || '').split(
','
);
allowedFieldsInCollection[permission.collection] = (permission.fields || '').split(',');
});
if (collection && allowedFieldsInCollection.hasOwnProperty(collection) === false) {
@@ -149,8 +135,7 @@ export class FieldsService {
}
return result.filter((field) => {
if (allowedFieldsInCollection.hasOwnProperty(field.collection) === false)
return false;
if (allowedFieldsInCollection.hasOwnProperty(field.collection) === false) return false;
const allowedFields = allowedFieldsInCollection[field.collection];
if (allowedFields[0] === '*') return true;
return allowedFields.includes(field.field);
@@ -180,11 +165,7 @@ export class FieldsService {
}
let column;
let fieldInfo = await this.knex
.select('*')
.from('directus_fields')
.where({ collection, field })
.first();
let fieldInfo = await this.knex.select('*').from('directus_fields').where({ collection, field }).first();
if (fieldInfo) {
fieldInfo = (await this.payloadService.processValues('read', fieldInfo)) as FieldMeta[];
@@ -192,9 +173,7 @@ export class FieldsService {
fieldInfo =
fieldInfo ||
systemFieldRows.find(
(fieldMeta) => fieldMeta.collection === collection && fieldMeta.field === field
);
systemFieldRows.find((fieldMeta) => fieldMeta.collection === collection && fieldMeta.field === field);
try {
column = await this.schemaInspector.columnInfo(collection, field);
@@ -223,19 +202,11 @@ export class FieldsService {
// Check if field already exists, either as a column, or as a row in directus_fields
if (field.field in this.schema[collection].columns) {
throw new InvalidPayloadException(
`Field "${field.field}" already exists in collection "${collection}"`
);
throw new InvalidPayloadException(`Field "${field.field}" already exists in collection "${collection}"`);
} else if (
!!(await this.knex
.select('id')
.from('directus_fields')
.where({ collection, field: field.field })
.first())
!!(await this.knex.select('id').from('directus_fields').where({ collection, field: field.field }).first())
) {
throw new InvalidPayloadException(
`Field "${field.field}" already exists in collection "${collection}"`
);
throw new InvalidPayloadException(`Field "${field.field}" already exists in collection "${collection}"`);
}
if (field.schema) {
@@ -275,17 +246,10 @@ export class FieldsService {
if (!field.schema) return;
if (field.type === 'string') {
column = table.string(
field.field,
field.schema.max_length !== null ? field.schema.max_length : undefined
);
column = table.string(field.field, field.schema.max_length !== null ? field.schema.max_length : undefined);
} else if (['float', 'decimal'].includes(field.type)) {
const type = field.type as 'float' | 'decimal';
column = table[type](
field.field,
field.schema?.numeric_precision || 10,
field.schema?.numeric_scale || 5
);
column = table[type](field.field, field.schema?.numeric_precision || 10, field.schema?.numeric_scale || 5);
} else if (field.type === 'csv') {
column = table.string(field.field);
} else {
@@ -293,10 +257,7 @@ export class FieldsService {
}
if (field.schema.default_value !== undefined) {
if (
typeof field.schema.default_value === 'string' &&
field.schema.default_value.toLowerCase() === 'now()'
) {
if (typeof field.schema.default_value === 'string' && field.schema.default_value.toLowerCase() === 'now()') {
column.defaultTo(this.knex.fn.now());
} else {
column.defaultTo(field.schema.default_value);
@@ -371,9 +332,7 @@ export class FieldsService {
/** @TODO M2A — Handle m2a case here */
if (isM2O) {
await this.knex('directus_relations')
.delete()
.where({ many_collection: collection, many_field: field });
await this.knex('directus_relations').delete().where({ many_collection: collection, many_field: field });
await this.deleteField(relation.one_collection!, relation.one_field!);
} else {
await this.knex('directus_relations')

View File

@@ -38,8 +38,7 @@ export class FilesService extends ItemsService {
primaryKey = await this.create(payload);
}
const fileExtension =
(payload.type && extension(payload.type)) || path.extname(payload.filename_download);
const fileExtension = (payload.type && extension(payload.type)) || path.extname(payload.filename_download);
payload.filename_disk = primaryKey + '.' + fileExtension;

View File

@@ -1,14 +1,6 @@
import Knex from 'knex';
import database from '../database';
import {
AbstractServiceOptions,
Accountability,
Collection,
Field,
Relation,
Query,
SchemaOverview,
} from '../types';
import { AbstractServiceOptions, Accountability, Collection, Field, Relation, Query, SchemaOverview } from '../types';
import {
GraphQLString,
GraphQLSchema,
@@ -91,11 +83,7 @@ export class GraphQLService {
const fieldsInSystem = await this.fieldsService.readAll();
const relationsInSystem = (await this.relationsService.readByQuery({})) as Relation[];
const schema = this.getGraphQLSchema(
collectionsInSystem,
fieldsInSystem,
relationsInSystem
);
const schema = this.getGraphQLSchema(collectionsInSystem, fieldsInSystem, relationsInSystem);
return schema;
}
@@ -113,17 +101,13 @@ export class GraphQLService {
description: collection.meta?.note,
fields: () => {
const fieldsObject: GraphQLFieldConfigMap<any, any> = {};
const fieldsInCollection = fields.filter(
(field) => field.collection === collection.collection
);
const fieldsInCollection = fields.filter((field) => field.collection === collection.collection);
for (const field of fieldsInCollection) {
const relationForField = relations.find((relation) => {
return (
(relation.many_collection === collection.collection &&
relation.many_field === field.field) ||
(relation.one_collection === collection.collection &&
relation.one_field === field.field)
(relation.many_collection === collection.collection && relation.many_field === field.field) ||
(relation.one_collection === collection.collection && relation.one_field === field.field)
);
});
@@ -135,9 +119,7 @@ export class GraphQLService {
});
if (relationType === 'm2o') {
const relatedIsSystem = relationForField.one_collection!.startsWith(
'directus_'
);
const relatedIsSystem = relationForField.one_collection!.startsWith('directus_');
const relatedType = relatedIsSystem
? schema[relationForField.one_collection!.substring(9)].type
@@ -147,9 +129,7 @@ export class GraphQLService {
type: relatedType,
};
} else if (relationType === 'o2m') {
const relatedIsSystem = relationForField.many_collection.startsWith(
'directus_'
);
const relatedIsSystem = relationForField.many_collection.startsWith('directus_');
const relatedType = relatedIsSystem
? schema[relationForField.many_collection.substring(9)].type
@@ -170,9 +150,7 @@ export class GraphQLService {
const types: any = [];
for (const relatedCollection of relatedCollections) {
const relatedType = relatedCollection.startsWith(
'directus_'
)
const relatedType = relatedCollection.startsWith('directus_')
? schema[relatedCollection.substring(9)].type
: schema.items[relatedCollection].type;
@@ -195,9 +173,7 @@ export class GraphQLService {
}
} else {
fieldsObject[field.field] = {
type: field.schema?.is_primary_key
? GraphQLID
: getGraphQLType(field.type),
type: field.schema?.is_primary_key ? GraphQLID : getGraphQLType(field.type),
};
}
@@ -293,17 +269,13 @@ export class GraphQLService {
},
};
const fieldsInCollection = fields.filter(
(field) => field.collection === collection.collection
);
const fieldsInCollection = fields.filter((field) => field.collection === collection.collection);
for (const field of fieldsInCollection) {
const relationForField = relations.find((relation) => {
return (
(relation.many_collection === collection.collection &&
relation.many_field === field.field) ||
(relation.one_collection === collection.collection &&
relation.one_field === field.field)
(relation.many_collection === collection.collection && relation.many_field === field.field) ||
(relation.one_collection === collection.collection && relation.one_field === field.field)
);
});
@@ -332,9 +304,7 @@ export class GraphQLService {
* Figure out how to setup filter fields for a union type output
*/
} else {
const fieldType = field.schema?.is_primary_key
? GraphQLID
: getGraphQLType(field.type);
const fieldType = field.schema?.is_primary_key ? GraphQLID : getGraphQLType(field.type);
filterFields[field.field] = {
type: new GraphQLInputObjectType({
@@ -402,18 +372,13 @@ export class GraphQLService {
const collection = systemField ? `directus_${info.fieldName}` : info.fieldName;
const selections = info.fieldNodes[0]?.selectionSet?.selections?.filter(
(node) => node.kind === 'Field'
) as FieldNode[] | undefined;
const selections = info.fieldNodes[0]?.selectionSet?.selections?.filter((node) => node.kind === 'Field') as
| FieldNode[]
| undefined;
if (!selections) return null;
return await this.getData(
collection,
selections,
info.fieldNodes[0].arguments || [],
info.variableValues
);
return await this.getData(collection, selections, info.fieldNodes[0].arguments || [], info.variableValues);
}
async getData(
@@ -436,9 +401,7 @@ export class GraphQLService {
fields.push(current);
} else {
const children = parseFields(
selection.selectionSet.selections.filter(
(selection) => selection.kind === 'Field'
) as FieldNode[],
selection.selectionSet.selections.filter((selection) => selection.kind === 'Field') as FieldNode[],
current
);
fields.push(...children);
@@ -447,10 +410,7 @@ export class GraphQLService {
if (selection.arguments && selection.arguments.length > 0) {
if (!query.deep) query.deep = {};
const args: Record<string, any> = this.parseArgs(
selection.arguments,
variableValues
);
const args: Record<string, any> = this.parseArgs(selection.arguments, variableValues);
query.deep[current] = sanitizeQuery(args, this.accountability);
}
}
@@ -458,9 +418,7 @@ export class GraphQLService {
return fields;
};
query.fields = parseFields(
selections.filter((selection) => selection.kind === 'Field') as FieldNode[]
);
query.fields = parseFields(selections.filter((selection) => selection.kind === 'Field') as FieldNode[]);
let service: ItemsService;
@@ -550,18 +508,10 @@ export class GraphQLService {
}
const collectionInfo =
(await this.knex
.select('singleton')
.from('directus_collections')
.where({ collection: collection })
.first()) ||
systemCollectionRows.find(
(collectionMeta) => collectionMeta?.collection === collection
);
(await this.knex.select('singleton').from('directus_collections').where({ collection: collection }).first()) ||
systemCollectionRows.find((collectionMeta) => collectionMeta?.collection === collection);
const result = collectionInfo?.singleton
? await service.readSingleton(query)
: await service.readByQuery(query);
const result = collectionInfo?.singleton ? await service.readSingleton(query) : await service.readByQuery(query);
return result;
}
@@ -596,10 +546,7 @@ export class GraphQLService {
argsObject[argument.name.value] = values;
} else {
argsObject[argument.name.value] = (argument.value as
| IntValueNode
| StringValueNode
| BooleanValueNode).value;
argsObject[argument.name.value] = (argument.value as IntValueNode | StringValueNode | BooleanValueNode).value;
}
}

View File

@@ -37,9 +37,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
this.collection = collection;
this.knex = options.knex || database;
this.accountability = options.accountability || null;
this.eventScope = this.collection.startsWith('directus_')
? this.collection.substring(9)
: 'items';
this.eventScope = this.collection.startsWith('directus_') ? this.collection.substring(9) : 'items';
this.schema = options.schema;
return this;
@@ -60,19 +58,15 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
schema: this.schema,
});
const customProcessed = await emitter.emitAsync(
`${this.eventScope}.create.before`,
payloads,
{
event: `${this.eventScope}.create.before`,
accountability: this.accountability,
collection: this.collection,
item: null,
action: 'create',
payload: payloads,
schema: this.schema,
}
);
const customProcessed = await emitter.emitAsync(`${this.eventScope}.create.before`, payloads, {
event: `${this.eventScope}.create.before`,
accountability: this.accountability,
collection: this.collection,
item: null,
action: 'create',
payload: payloads,
schema: this.schema,
});
if (customProcessed) {
payloads = customProcessed[customProcessed.length - 1];
@@ -85,11 +79,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
schema: this.schema,
});
payloads = await authorizationService.validatePayload(
'create',
this.collection,
payloads
);
payloads = await authorizationService.validatePayload('create', this.collection, payloads);
}
payloads = await payloadService.processM2O(payloads);
@@ -97,10 +87,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
let payloadsWithoutAliases = payloads.map((payload) => pick(payload, columns));
payloadsWithoutAliases = await payloadService.processValues(
'create',
payloadsWithoutAliases
);
payloadsWithoutAliases = await payloadService.processValues('create', payloadsWithoutAliases);
const primaryKeys: PrimaryKey[] = [];
@@ -149,11 +136,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
let primaryKey;
const result = await trx
.select('id')
.from('directus_activity')
.orderBy('id', 'desc')
.first();
const result = await trx.select('id').from('directus_activity').orderBy('id', 'desc').first();
primaryKey = result.id;
@@ -213,16 +196,8 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
return records as Partial<Item> | Partial<Item>[] | null;
}
readByKey(
keys: PrimaryKey[],
query?: Query,
action?: PermissionsAction
): Promise<null | Partial<Item>[]>;
readByKey(
key: PrimaryKey,
query?: Query,
action?: PermissionsAction
): Promise<null | Partial<Item>>;
readByKey(keys: PrimaryKey[], query?: Query, action?: PermissionsAction): Promise<null | Partial<Item>[]>;
readByKey(key: PrimaryKey, query?: Query, action?: PermissionsAction): Promise<null | Partial<Item>>;
async readByKey(
key: PrimaryKey | PrimaryKey[],
query: Query = {},
@@ -285,19 +260,15 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
let payload: Partial<AnyItem> | Partial<AnyItem>[] = clone(data);
const customProcessed = await emitter.emitAsync(
`${this.eventScope}.update.before`,
const customProcessed = await emitter.emitAsync(`${this.eventScope}.update.before`, payload, {
event: `${this.eventScope}.update.before`,
accountability: this.accountability,
collection: this.collection,
item: key,
action: 'update',
payload,
{
event: `${this.eventScope}.update.before`,
accountability: this.accountability,
collection: this.collection,
item: key,
action: 'update',
payload,
schema: this.schema,
}
);
schema: this.schema,
});
if (customProcessed) {
payload = customProcessed[customProcessed.length - 1];
@@ -312,11 +283,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
await authorizationService.checkAccess('update', this.collection, keys);
payload = await authorizationService.validatePayload(
'update',
this.collection,
payload
);
payload = await authorizationService.validatePayload('update', this.collection, payload);
}
await this.knex.transaction(async (trx) => {
@@ -331,15 +298,10 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
let payloadWithoutAliases = pick(payload, columns);
payloadWithoutAliases = await payloadService.processValues(
'update',
payloadWithoutAliases
);
payloadWithoutAliases = await payloadService.processValues('update', payloadWithoutAliases);
if (Object.keys(payloadWithoutAliases).length > 0) {
await trx(this.collection)
.update(payloadWithoutAliases)
.whereIn(primaryKeyField, keys);
await trx(this.collection).update(payloadWithoutAliases).whereIn(primaryKeyField, keys);
}
for (const key of keys) {
@@ -362,11 +324,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
await trx.insert(activityRecord).into('directus_activity');
let primaryKey;
const result = await trx
.select('id')
.from('directus_activity')
.orderBy('id', 'desc')
.first();
const result = await trx.select('id').from('directus_activity').orderBy('id', 'desc').first();
primaryKey = result.id;
activityPrimaryKeys.push(primaryKey);
@@ -383,9 +341,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
collection: this.collection,
item: keys[index],
data:
snapshots && Array.isArray(snapshots)
? JSON.stringify(snapshots?.[index])
: JSON.stringify(snapshots),
snapshots && Array.isArray(snapshots) ? JSON.stringify(snapshots?.[index]) : JSON.stringify(snapshots),
delta: JSON.stringify(payloadWithoutAliases),
}));
@@ -454,9 +410,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
let itemsToUpdate = await itemsService.readByQuery(readQuery);
itemsToUpdate = toArray(itemsToUpdate);
const keys: PrimaryKey[] = itemsToUpdate.map(
(item: Partial<Item>) => item[primaryKeyField]
);
const keys: PrimaryKey[] = itemsToUpdate.map((item: Partial<Item>) => item[primaryKeyField]);
return await this.update(data, keys);
}
@@ -565,9 +519,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
let itemsToDelete = await itemsService.readByQuery(readQuery);
itemsToDelete = toArray(itemsToDelete);
const keys: PrimaryKey[] = itemsToDelete.map(
(item: Partial<Item>) => item[primaryKeyField]
);
const keys: PrimaryKey[] = itemsToDelete.map((item: Partial<Item>) => item[primaryKeyField]);
return await this.delete(keys);
}
@@ -600,11 +552,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
async upsertSingleton(data: Partial<Item>) {
const primaryKeyField = this.schema[this.collection].primary;
const record = await this.knex
.select(primaryKeyField)
.from(this.collection)
.limit(1)
.first();
const record = await this.knex.select(primaryKeyField).from(this.collection).limit(1).first();
if (record) {
return await this.update(data, record.id);

View File

@@ -7,14 +7,7 @@ import argon2 from 'argon2';
import { v4 as uuidv4 } from 'uuid';
import database from '../database';
import { clone, isObject, cloneDeep } from 'lodash';
import {
Relation,
Item,
AbstractServiceOptions,
Accountability,
PrimaryKey,
SchemaOverview,
} from '../types';
import { Relation, Item, AbstractServiceOptions, Accountability, PrimaryKey, SchemaOverview } from '../types';
import { ItemsService } from './items';
import { URL } from 'url';
import Knex from 'knex';
@@ -167,9 +160,7 @@ export class PayloadService {
.where({ collection: this.collection })
.whereNotNull('special');
specialFieldsInCollection.push(
...systemFieldRows.filter((fieldMeta) => fieldMeta.collection === this.collection)
);
specialFieldsInCollection.push(...systemFieldRows.filter((fieldMeta) => fieldMeta.collection === this.collection));
if (action === 'read') {
specialFieldsInCollection = specialFieldsInCollection.filter((fieldMeta) => {
@@ -181,12 +172,7 @@ export class PayloadService {
processedPayload.map(async (record: any) => {
await Promise.all(
specialFieldsInCollection.map(async (field) => {
const newValue = await this.processField(
field,
record,
action,
this.accountability
);
const newValue = await this.processField(field, record, action, this.accountability);
if (newValue !== undefined) record[field.field] = newValue;
})
);
@@ -200,12 +186,7 @@ export class PayloadService {
if (['create', 'update'].includes(action)) {
processedPayload.forEach((record) => {
for (const [key, value] of Object.entries(record)) {
if (
Array.isArray(value) ||
(typeof value === 'object' &&
value instanceof Date !== true &&
value !== null)
) {
if (Array.isArray(value) || (typeof value === 'object' && value instanceof Date !== true && value !== null)) {
record[key] = JSON.stringify(value);
}
}
@@ -219,12 +200,7 @@ export class PayloadService {
return processedPayload[0];
}
async processField(
field: FieldMeta,
payload: Partial<Item>,
action: Action,
accountability: Accountability | null
) {
async processField(field: FieldMeta, payload: Partial<Item>, action: Action, accountability: Accountability | null) {
if (!field.special) return payload[field.field];
const fieldSpecials = field.special ? toArray(field.special) : [];
@@ -256,9 +232,7 @@ export class PayloadService {
type: getLocalType(column),
}));
const dateColumns = columnsWithType.filter((column) =>
['dateTime', 'date', 'timestamp'].includes(column.type)
);
const dateColumns = columnsWithType.filter((column) => ['dateTime', 'date', 'timestamp'].includes(column.type));
if (dateColumns.length === 0) return payloads;
@@ -302,17 +276,13 @@ export class PayloadService {
*/
processA2O(payloads: Partial<Item>[]): Promise<Partial<Item>[]>;
processA2O(payloads: Partial<Item>): Promise<Partial<Item>>;
async processA2O(
payload: Partial<Item> | Partial<Item>[]
): Promise<Partial<Item> | Partial<Item>[]> {
async processA2O(payload: Partial<Item> | Partial<Item>[]): Promise<Partial<Item> | Partial<Item>[]> {
const relations = [
...(await this.knex
.select<Relation[]>('*')
.from('directus_relations')
.where({ many_collection: this.collection })),
...systemRelationRows.filter(
(systemRelation) => systemRelation.many_collection === this.collection
),
...systemRelationRows.filter((systemRelation) => systemRelation.many_collection === this.collection),
];
const payloads = clone(toArray(payload));
@@ -322,10 +292,7 @@ export class PayloadService {
// Only process related records that are actually in the payload
const relationsToProcess = relations.filter((relation) => {
return (
payload.hasOwnProperty(relation.many_field) &&
isObject(payload[relation.many_field])
);
return payload.hasOwnProperty(relation.many_field) && isObject(payload[relation.many_field]);
});
for (const relation of relationsToProcess) {
@@ -360,9 +327,7 @@ export class PayloadService {
const hasPrimaryKey = relatedRecord.hasOwnProperty(relatedPrimary);
let relatedPrimaryKey: PrimaryKey = relatedRecord[relatedPrimary];
const exists =
hasPrimaryKey &&
!!(await this.knex.select(relatedPrimary).from(relatedCollection).first());
const exists = hasPrimaryKey && !!(await this.knex.select(relatedPrimary).from(relatedCollection).first());
if (exists) {
await itemsService.update(relatedRecord, relatedPrimaryKey);
@@ -383,17 +348,13 @@ export class PayloadService {
*/
processM2O(payloads: Partial<Item>[]): Promise<Partial<Item>[]>;
processM2O(payloads: Partial<Item>): Promise<Partial<Item>>;
async processM2O(
payload: Partial<Item> | Partial<Item>[]
): Promise<Partial<Item> | Partial<Item>[]> {
async processM2O(payload: Partial<Item> | Partial<Item>[]): Promise<Partial<Item> | Partial<Item>[]> {
const relations = [
...(await this.knex
.select<Relation[]>('*')
.from('directus_relations')
.where({ many_collection: this.collection })),
...systemRelationRows.filter(
(systemRelation) => systemRelation.many_collection === this.collection
),
...systemRelationRows.filter((systemRelation) => systemRelation.many_collection === this.collection),
];
const payloads = clone(toArray(payload));
@@ -403,10 +364,7 @@ export class PayloadService {
// Only process related records that are actually in the payload
const relationsToProcess = relations.filter((relation) => {
return (
payload.hasOwnProperty(relation.many_field) &&
isObject(payload[relation.many_field])
);
return payload.hasOwnProperty(relation.many_field) && isObject(payload[relation.many_field]);
});
for (const relation of relationsToProcess) {
@@ -425,11 +383,7 @@ export class PayloadService {
let relatedPrimaryKey: PrimaryKey = relatedRecord[relation.one_primary];
const exists =
hasPrimaryKey &&
!!(await this.knex
.select(relation.one_primary)
.from(relation.one_collection)
.first());
hasPrimaryKey && !!(await this.knex.select(relation.one_primary).from(relation.one_collection).first());
if (exists) {
await itemsService.update(relatedRecord, relatedPrimaryKey);
@@ -454,9 +408,7 @@ export class PayloadService {
.select<Relation[]>('*')
.from('directus_relations')
.where({ one_collection: this.collection })),
...systemRelationRows.filter(
(systemRelation) => systemRelation.one_collection === this.collection
),
...systemRelationRows.filter((systemRelation) => systemRelation.one_collection === this.collection),
];
const payloads = clone(toArray(payload));
@@ -485,10 +437,7 @@ export class PayloadService {
for (const relatedRecord of payload[relation.one_field!] || []) {
let record = cloneDeep(relatedRecord);
if (
typeof relatedRecord === 'string' ||
typeof relatedRecord === 'number'
) {
if (typeof relatedRecord === 'string' || typeof relatedRecord === 'number') {
const exists = !!(await this.knex
.select(relation.many_primary)
.from(relation.many_collection)

View File

@@ -7,19 +7,13 @@ export class PermissionsService extends ItemsService {
}
async getAllowedCollections(role: string | null, action: PermissionsAction) {
const query = this.knex
.select('collection')
.from('directus_permissions')
.where({ role, action });
const query = this.knex.select('collection').from('directus_permissions').where({ role, action });
const results = await query;
return results.map((result) => result.collection);
}
async getAllowedFields(role: string | null, action: PermissionsAction, collection?: string) {
const query = this.knex
.select('collection', 'fields')
.from('directus_permissions')
.where({ role, action });
const query = this.knex.select('collection', 'fields').from('directus_permissions').where({ role, action });
if (collection) {
query.andWhere({ collection });

View File

@@ -26,10 +26,7 @@ export class RelationsService extends ItemsService {
knex: this.knex,
schema: this.schema,
});
const results = (await service.readByQuery(query)) as
| ParsedRelation
| ParsedRelation[]
| null;
const results = (await service.readByQuery(query)) as ParsedRelation | ParsedRelation[] | null;
if (results && Array.isArray(results)) {
results.push(...(systemRelationRows as ParsedRelation[]));
@@ -40,11 +37,7 @@ export class RelationsService extends ItemsService {
return filteredResults;
}
readByKey(
keys: PrimaryKey[],
query?: Query,
action?: PermissionsAction
): Promise<null | Relation[]>;
readByKey(keys: PrimaryKey[], query?: Query, action?: PermissionsAction): Promise<null | Relation[]>;
readByKey(key: PrimaryKey, query?: Query, action?: PermissionsAction): Promise<null | Relation>;
async readByKey(
key: PrimaryKey | PrimaryKey[],
@@ -55,10 +48,7 @@ export class RelationsService extends ItemsService {
knex: this.knex,
schema: this.schema,
});
const results = (await service.readByKey(key as any, query, action)) as
| ParsedRelation
| ParsedRelation[]
| null;
const results = (await service.readByKey(key as any, query, action)) as ParsedRelation | ParsedRelation[] | null;
// No need to merge system relations here. They don't have PKs so can never be directly
// targetted
@@ -76,10 +66,7 @@ export class RelationsService extends ItemsService {
'read'
);
const allowedFields = await this.permissionsService.getAllowedFields(
this.accountability?.role || null,
'read'
);
const allowedFields = await this.permissionsService.getAllowedFields(this.accountability?.role || null, 'read');
relations = toArray(relations);
@@ -91,18 +78,13 @@ export class RelationsService extends ItemsService {
collectionsAllowed = false;
}
if (
relation.one_collection &&
allowedCollections.includes(relation.one_collection) === false
) {
if (relation.one_collection && allowedCollections.includes(relation.one_collection) === false) {
collectionsAllowed = false;
}
if (
relation.one_allowed_collections &&
relation.one_allowed_collections.every((collection) =>
allowedCollections.includes(collection)
) === false
relation.one_allowed_collections.every((collection) => allowedCollections.includes(collection)) === false
) {
collectionsAllowed = false;
}
@@ -120,8 +102,7 @@ export class RelationsService extends ItemsService {
relation.one_field &&
(!allowedFields[relation.one_collection] ||
(allowedFields[relation.one_collection].includes('*') === false &&
allowedFields[relation.one_collection].includes(relation.one_field) ===
false))
allowedFields[relation.one_collection].includes(relation.one_field) === false))
) {
fieldsAllowed = false;
}

View File

@@ -15,8 +15,7 @@ export class RevisionsService extends ItemsService {
const revision = (await super.readByKey(pk)) as Revision | null;
if (!revision) throw new ForbiddenException();
if (!revision.data)
throw new InvalidPayloadException(`Revision doesn't contain data to revert to`);
if (!revision.data) throw new InvalidPayloadException(`Revision doesn't contain data to revert to`);
const service = new ItemsService(revision.collection, {
accountability: this.accountability,

View File

@@ -24,8 +24,7 @@ export class RolesService extends ItemsService {
.andWhere({ admin_access: true })
.first();
const otherAdminRolesCount = +(otherAdminRoles?.count || 0);
if (otherAdminRolesCount === 0)
throw new UnprocessableEntityException(`You can't delete the last admin role.`);
if (otherAdminRolesCount === 0) throw new UnprocessableEntityException(`You can't delete the last admin role.`);
// Remove all permissions associated with this role
const permissionsService = new PermissionsService({

View File

@@ -40,10 +40,7 @@ export class ServerService {
if (this.accountability?.admin === true) {
const osType = os.type() === 'Darwin' ? 'macOS' : os.type();
const osVersion =
osType === 'macOS'
? `${macosRelease().name} (${macosRelease().version})`
: os.release();
const osVersion = osType === 'macOS' ? `${macosRelease().name} (${macosRelease().version})` : os.release();
info.directus = {
version,

View File

@@ -14,13 +14,7 @@ import formatTitle from '@directus/format-title';
import { cloneDeep, mergeWith } from 'lodash';
import { RelationsService } from './relations';
import env from '../env';
import {
OpenAPIObject,
PathItemObject,
OperationObject,
TagObject,
SchemaObject,
} from 'openapi3-ts';
import { OpenAPIObject, PathItemObject, OperationObject, TagObject, SchemaObject } from 'openapi3-ts';
// @ts-ignore
import { version } from '../../package.json';
@@ -110,8 +104,7 @@ class OASService implements SpecificationSubService {
openapi: '3.0.1',
info: {
title: 'Dynamic API Specification',
description:
'This is a dynamicly generated API specification for all endpoints existing on the current .',
description: 'This is a dynamicly generated API specification for all endpoints existing on the current .',
version: version,
},
servers: [
@@ -164,18 +157,13 @@ class OASService implements SpecificationSubService {
return tags.filter((tag) => tag.name !== 'Items');
}
private async generatePaths(
permissions: Permission[],
tags: OpenAPIObject['tags']
): Promise<OpenAPIObject['paths']> {
private async generatePaths(permissions: Permission[], tags: OpenAPIObject['tags']): Promise<OpenAPIObject['paths']> {
const paths: OpenAPIObject['paths'] = {};
if (!tags) return paths;
for (const tag of tags) {
const isSystem =
tag.hasOwnProperty('x-collection') === false ||
tag['x-collection'].startsWith('directus_');
const isSystem = tag.hasOwnProperty('x-collection') === false || tag['x-collection'].startsWith('directus_');
if (isSystem) {
for (const [path, pathItem] of Object.entries<PathItemObject>(openapi.paths)) {
@@ -210,23 +198,18 @@ class OASService implements SpecificationSubService {
this.accountability?.admin === true ||
!!permissions.find(
(permission) =>
permission.collection === collection &&
permission.action === this.getActionForMethod(method)
permission.collection === collection && permission.action === this.getActionForMethod(method)
);
if (hasPermission) {
if (!paths[`/items/${collection}`]) paths[`/items/${collection}`] = {};
if (!paths[`/items/${collection}/{id}`])
paths[`/items/${collection}/{id}`] = {};
if (!paths[`/items/${collection}/{id}`]) paths[`/items/${collection}/{id}`] = {};
if (listBase[method]) {
paths[`/items/${collection}`][method] = mergeWith(
cloneDeep(listBase[method]),
{
description: listBase[method].description.replace(
'item',
collection + ' item'
),
description: listBase[method].description.replace('item', collection + ' item'),
tags: [tag.name],
operationId: `${this.getActionForMethod(method)}${tag.name}`,
requestBody: ['get', 'delete'].includes(method)
@@ -281,14 +264,9 @@ class OASService implements SpecificationSubService {
paths[`/items/${collection}/{id}`][method] = mergeWith(
cloneDeep(detailBase[method]),
{
description: detailBase[method].description.replace(
'item',
collection + ' item'
),
description: detailBase[method].description.replace('item', collection + ' item'),
tags: [tag.name],
operationId: `${this.getActionForMethod(method)}Single${
tag.name
}`,
operationId: `${this.getActionForMethod(method)}Single${tag.name}`,
requestBody: ['get', 'delete'].includes(method)
? undefined
: {
@@ -355,23 +333,17 @@ class OASService implements SpecificationSubService {
const isSystem = collection.collection.startsWith('directus_');
const fieldsInCollection = fields.filter(
(field) => field.collection === collection.collection
);
const fieldsInCollection = fields.filter((field) => field.collection === collection.collection);
if (isSystem) {
const schemaComponent: SchemaObject = cloneDeep(
openapi.components!.schemas![tag.name]
);
const schemaComponent: SchemaObject = cloneDeep(openapi.components!.schemas![tag.name]);
schemaComponent.properties = {};
for (const field of fieldsInCollection) {
schemaComponent.properties[field.field] =
(cloneDeep(
(openapi.components!.schemas![tag.name] as SchemaObject).properties![
field.field
]
(openapi.components!.schemas![tag.name] as SchemaObject).properties![field.field]
) as SchemaObject) || this.generateField(field, relations, tags, fields);
}
@@ -384,12 +356,7 @@ class OASService implements SpecificationSubService {
};
for (const field of fieldsInCollection) {
schemaComponent.properties![field.field] = this.generateField(
field,
relations,
tags,
fields
);
schemaComponent.properties![field.field] = this.generateField(field, relations, tags, fields);
}
components.schemas[tag.name] = schemaComponent;
@@ -413,12 +380,7 @@ class OASService implements SpecificationSubService {
}
}
private generateField(
field: Field,
relations: Relation[],
tags: TagObject[],
fields: Field[]
): SchemaObject {
private generateField(field: Field, relations: Relation[], tags: TagObject[], fields: Field[]): SchemaObject {
let propertyObject: SchemaObject = {
nullable: field.schema?.is_nullable,
description: field.meta?.note || undefined,
@@ -426,8 +388,7 @@ class OASService implements SpecificationSubService {
const relation = relations.find(
(relation) =>
(relation.many_collection === field.collection &&
relation.many_field === field.field) ||
(relation.many_collection === field.collection && relation.many_field === field.field) ||
(relation.one_collection === field.collection && relation.one_field === field.field)
);
@@ -444,12 +405,9 @@ class OASService implements SpecificationSubService {
});
if (relationType === 'm2o') {
const relatedTag = tags.find(
(tag) => tag['x-collection'] === relation.one_collection
);
const relatedTag = tags.find((tag) => tag['x-collection'] === relation.one_collection);
const relatedPrimaryKeyField = fields.find(
(field) =>
field.collection === relation.one_collection && field.schema?.is_primary_key
(field) => field.collection === relation.one_collection && field.schema?.is_primary_key
);
if (!relatedTag || !relatedPrimaryKeyField) return propertyObject;
@@ -463,13 +421,9 @@ class OASService implements SpecificationSubService {
},
];
} else if (relationType === 'o2m') {
const relatedTag = tags.find(
(tag) => tag['x-collection'] === relation.many_collection
);
const relatedTag = tags.find((tag) => tag['x-collection'] === relation.many_collection);
const relatedPrimaryKeyField = fields.find(
(field) =>
field.collection === relation.many_collection &&
field.schema?.is_primary_key
(field) => field.collection === relation.many_collection && field.schema?.is_primary_key
);
if (!relatedTag || !relatedPrimaryKeyField) return propertyObject;
@@ -486,9 +440,7 @@ class OASService implements SpecificationSubService {
],
};
} else if (relationType === 'm2a') {
const relatedTags = tags.filter((tag) =>
relation.one_allowed_collections!.includes(tag['x-collection'])
);
const relatedTags = tags.filter((tag) => relation.one_allowed_collections!.includes(tag['x-collection']));
propertyObject.type = 'array';
propertyObject.items = {
@@ -510,15 +462,7 @@ class OASService implements SpecificationSubService {
private fieldTypes: Record<
typeof types[number],
{
type:
| 'string'
| 'number'
| 'boolean'
| 'object'
| 'array'
| 'integer'
| 'null'
| undefined;
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'integer' | 'null' | undefined;
format?: string;
items?: any;
}

View File

@@ -4,11 +4,7 @@ import jwt from 'jsonwebtoken';
import { sendInviteMail, sendPasswordResetMail } from '../mail';
import database from '../database';
import argon2 from 'argon2';
import {
InvalidPayloadException,
ForbiddenException,
UnprocessableEntityException,
} from '../exceptions';
import { InvalidPayloadException, ForbiddenException, UnprocessableEntityException } from '../exceptions';
import { Accountability, PrimaryKey, Item, AbstractServiceOptions, SchemaOverview } from '../types';
import Knex from 'knex';
import env from '../env';
@@ -104,11 +100,7 @@ export class UsersService extends ItemsService {
if (scope !== 'invite') throw new ForbiddenException();
const user = await this.knex
.select('id', 'status')
.from('directus_users')
.where({ email })
.first();
const user = await this.knex.select('id', 'status').from('directus_users').where({ email }).first();
if (!user || user.status !== 'invited') {
throw new InvalidPayloadException(`Email address ${email} hasn't been invited.`);
@@ -116,9 +108,7 @@ export class UsersService extends ItemsService {
const passwordHashed = await argon2.hash(password);
await this.knex('directus_users')
.update({ password: passwordHashed, status: 'active' })
.where({ id: user.id });
await this.knex('directus_users').update({ password: passwordHashed, status: 'active' }).where({ id: user.id });
if (cache) {
await cache.clear();
@@ -144,11 +134,7 @@ export class UsersService extends ItemsService {
if (scope !== 'password-reset') throw new ForbiddenException();
const user = await this.knex
.select('id', 'status')
.from('directus_users')
.where({ email })
.first();
const user = await this.knex.select('id', 'status').from('directus_users').where({ email }).first();
if (!user || user.status !== 'active') {
throw new ForbiddenException();
@@ -156,9 +142,7 @@ export class UsersService extends ItemsService {
const passwordHashed = await argon2.hash(password);
await this.knex('directus_users')
.update({ password: passwordHashed, status: 'active' })
.where({ id: user.id });
await this.knex('directus_users').update({ password: passwordHashed, status: 'active' }).where({ id: user.id });
if (cache) {
await cache.clear();
@@ -166,11 +150,7 @@ export class UsersService extends ItemsService {
}
async enableTFA(pk: string) {
const user = await this.knex
.select('tfa_secret')
.from('directus_users')
.where({ id: pk })
.first();
const user = await this.knex.select('tfa_secret').from('directus_users').where({ id: pk }).first();
if (user?.tfa_secret !== null) {
throw new InvalidPayloadException('TFA Secret is already set for this user');

View File

@@ -17,18 +17,13 @@ export class UtilsService {
async sort(collection: string, { item, to }: { item: PrimaryKey; to: PrimaryKey }) {
const sortFieldResponse =
(await this.knex
.select('sort_field')
.from('directus_collections')
.where({ collection })
.first()) || systemCollectionRows;
(await this.knex.select('sort_field').from('directus_collections').where({ collection }).first()) ||
systemCollectionRows;
const sortField = sortFieldResponse?.sort_field;
if (!sortField) {
throw new InvalidPayloadException(
`Collection "${collection}" doesn't have a sort field.`
);
throw new InvalidPayloadException(`Collection "${collection}" doesn't have a sort field.`);
}
if (this.accountability?.admin !== true) {
@@ -56,11 +51,7 @@ export class UtilsService {
const primaryKeyField = this.schema[collection].primary;
// Make sure all rows have a sort value
const countResponse = await this.knex
.count('* as count')
.from(collection)
.whereNull(sortField)
.first();
const countResponse = await this.knex.count('* as count').from(collection).whereNull(sortField).first();
if (countResponse?.count && +countResponse.count !== 0) {
const lastSortValueResponse = await this.knex.max(sortField).from(collection).first();

View File

@@ -1,9 +1,4 @@
import {
StorageManager,
LocalFileSystemStorage,
StorageManagerConfig,
Storage,
} from '@slynova/flydrive';
import { StorageManager, LocalFileSystemStorage, StorageManagerConfig, Storage } from '@slynova/flydrive';
import env from './env';
import { validateEnv } from './utils/validate-env';
import { getConfigFromEnv } from './utils/get-config-from-env';

View File

@@ -42,9 +42,7 @@ export default async function applyQuery(
columns
/** @todo Check if this scales between SQL vendors */
.filter(
(column) =>
column.data_type.toLowerCase().includes('text') ||
column.data_type.toLowerCase().includes('char')
(column) => column.data_type.toLowerCase().includes('text') || column.data_type.toLowerCase().includes('char')
)
.forEach((column) => {
this.orWhereRaw(`LOWER(??) LIKE ?`, [column.column_name, `%${query.search!}%`]);
@@ -53,37 +51,19 @@ export default async function applyQuery(
}
}
export async function applyFilter(
knex: Knex,
rootQuery: QueryBuilder,
rootFilter: Filter,
collection: string
) {
const relations: Relation[] = [
...(await knex.select('*').from('directus_relations')),
...systemRelationRows,
];
export async function applyFilter(knex: Knex, rootQuery: QueryBuilder, rootFilter: Filter, collection: string) {
const relations: Relation[] = [...(await knex.select('*').from('directus_relations')), ...systemRelationRows];
addWhereClauses(rootQuery, rootFilter, collection);
addJoins(rootQuery, rootFilter, collection);
function addWhereClauses(
dbQuery: QueryBuilder,
filter: Filter,
collection: string,
logical: 'and' | 'or' = 'and'
) {
function addWhereClauses(dbQuery: QueryBuilder, filter: Filter, collection: string, logical: 'and' | 'or' = 'and') {
for (const [key, value] of Object.entries(filter)) {
if (key === '_or' || key === '_and') {
/** @NOTE this callback function isn't called until Knex runs the query */
dbQuery.where((subQuery) => {
value.forEach((subFilter: Record<string, any>) => {
addWhereClauses(
subQuery,
subFilter,
collection,
key === '_and' ? 'and' : 'or'
);
addWhereClauses(subQuery, subFilter, collection, key === '_and' ? 'and' : 'or');
});
});
@@ -97,21 +77,11 @@ export async function applyFilter(
const columnName = getWhereColumn(filterPath, collection);
applyFilterToQuery(columnName, filterOperator, filterValue, logical);
} else {
applyFilterToQuery(
`${collection}.${filterPath[0]}`,
filterOperator,
filterValue,
logical
);
applyFilterToQuery(`${collection}.${filterPath[0]}`, filterOperator, filterValue, logical);
}
}
function applyFilterToQuery(
key: string,
operator: string,
compareValue: any,
logical: 'and' | 'or' = 'and'
) {
function applyFilterToQuery(key: string, operator: string, compareValue: any, logical: 'and' | 'or' = 'and') {
if (operator === '_eq') {
dbQuery[logical].where({ [key]: compareValue });
}
@@ -207,18 +177,14 @@ export async function applyFilter(
function followRelation(pathParts: string[], parentCollection: string = collection) {
const relation = relations.find((relation) => {
return (
(relation.many_collection === parentCollection &&
relation.many_field === pathParts[0]) ||
(relation.one_collection === parentCollection &&
relation.one_field === pathParts[0])
(relation.many_collection === parentCollection && relation.many_field === pathParts[0]) ||
(relation.one_collection === parentCollection && relation.one_field === pathParts[0])
);
});
if (!relation) return;
const isM2O =
relation.many_collection === parentCollection &&
relation.many_field === pathParts[0];
const isM2O = relation.many_collection === parentCollection && relation.many_field === pathParts[0];
pathParts.shift();
@@ -273,18 +239,14 @@ export async function applyFilter(
function followRelation(pathParts: string[], parentCollection: string = collection) {
const relation = relations.find((relation) => {
return (
(relation.many_collection === parentCollection &&
relation.many_field === pathParts[0]) ||
(relation.one_collection === parentCollection &&
relation.one_field === pathParts[0])
(relation.many_collection === parentCollection && relation.many_field === pathParts[0]) ||
(relation.one_collection === parentCollection && relation.one_field === pathParts[0])
);
});
if (!relation) return;
const isM2O =
relation.many_collection === parentCollection &&
relation.many_field === pathParts[0];
const isM2O = relation.many_collection === parentCollection && relation.many_field === pathParts[0];
if (isM2O) {
dbQuery.leftJoin(

View File

@@ -5,9 +5,7 @@ export function deepMap(
): any {
if (Array.isArray(object)) {
return object.map(function (val, key) {
return typeof val === 'object'
? deepMap(val, iterator, context)
: iterator.call(context, val, key);
return typeof val === 'object' ? deepMap(val, iterator, context) : iterator.call(context, val, key);
});
} else if (typeof object === 'object') {
const res: Record<string, any> = {};

View File

@@ -45,10 +45,7 @@ export default async function getASTFromQuery(
* we might not need al this info at all times, but it's easier to fetch it all once, than trying to fetch it for every
* requested field. @todo look into utilizing graphql/dataloader for this purpose
*/
const relations = [
...(await knex.select<Relation[]>('*').from('directus_relations')),
...systemRelationRows,
];
const relations = [...(await knex.select<Relation[]>('*').from('directus_relations')), ...systemRelationRows];
const permissions =
accountability && accountability.admin !== true
@@ -76,11 +73,7 @@ export default async function getASTFromQuery(
return ast;
async function parseFields(
parentCollection: string,
fields: string[] | null,
deep?: Record<string, Query>
) {
async function parseFields(parentCollection: string, fields: string[] | null, deep?: Record<string, Query>) {
if (!fields) return [];
fields = await convertWildcards(parentCollection, fields);
@@ -96,10 +89,7 @@ export default async function getASTFromQuery(
field.includes('.') ||
// We'll always treat top level o2m fields as a related item. This is an alias field, otherwise it won't return
// anything
!!relations.find(
(relation) =>
relation.one_collection === parentCollection && relation.one_field === field
);
!!relations.find((relation) => relation.one_collection === parentCollection && relation.one_field === field);
if (isRelational) {
// field is relational
@@ -131,9 +121,7 @@ export default async function getASTFromQuery(
(relationalStructure[fieldKey] as anyNested)[collectionScope] = [];
}
(relationalStructure[fieldKey] as anyNested)[collectionScope].push(
childKey
);
(relationalStructure[fieldKey] as anyNested)[collectionScope].push(childKey);
} else {
(relationalStructure[fieldKey] as string[]).push(childKey);
}
@@ -160,14 +148,10 @@ export default async function getASTFromQuery(
let child: NestedCollectionNode | null = null;
if (relationType === 'm2a') {
const allowedCollections = relation
.one_allowed_collections!.split(',')
.filter((collection) => {
if (!permissions) return true;
return permissions.some(
(permission) => permission.collection === collection
);
});
const allowedCollections = relation.one_allowed_collections!.split(',').filter((collection) => {
if (!permissions) return true;
return permissions.some((permission) => permission.collection === collection);
});
child = {
type: 'm2a',
@@ -183,20 +167,13 @@ export default async function getASTFromQuery(
for (const relatedCollection of allowedCollections) {
child.children[relatedCollection] = await parseFields(
relatedCollection,
Array.isArray(nestedFields)
? nestedFields
: (nestedFields as anyNested)[relatedCollection] || ['*']
Array.isArray(nestedFields) ? nestedFields : (nestedFields as anyNested)[relatedCollection] || ['*']
);
child.query[relatedCollection] = {};
child.relatedKey[relatedCollection] = schema[relatedCollection].primary;
}
} else if (relatedCollection) {
if (
permissions &&
permissions.some(
(permission) => permission.collection === relatedCollection
) === false
) {
if (permissions && permissions.some((permission) => permission.collection === relatedCollection) === false) {
continue;
}
@@ -226,9 +203,7 @@ export default async function getASTFromQuery(
const fieldsInCollection = await getFieldsInCollection(parentCollection);
const allowedFields = permissions
? permissions
.find((permission) => parentCollection === permission.collection)
?.fields?.split(',')
? permissions.find((permission) => parentCollection === permission.collection)?.fields?.split(',')
: fieldsInCollection;
if (!allowedFields || allowedFields.length === 0) return [];
@@ -256,8 +231,7 @@ export default async function getASTFromQuery(
? relations
.filter(
(relation) =>
relation.many_collection === parentCollection ||
relation.one_collection === parentCollection
relation.many_collection === parentCollection || relation.one_collection === parentCollection
)
.map((relation) => {
const isMany = relation.many_collection === parentCollection;
@@ -265,9 +239,7 @@ export default async function getASTFromQuery(
})
: allowedFields.filter((fieldKey) => !!getRelation(parentCollection, fieldKey));
const nonRelationalFields = allowedFields.filter(
(fieldKey) => relationalFields.includes(fieldKey) === false
);
const nonRelationalFields = allowedFields.filter((fieldKey) => relationalFields.includes(fieldKey) === false);
fields.splice(
index,
@@ -315,12 +287,8 @@ export default async function getASTFromQuery(
async function getFieldsInCollection(collection: string) {
const columns = Object.keys(schema[collection].columns);
const fields = [
...(await knex.select('field').from('directus_fields').where({ collection })).map(
(field) => field.field
),
...systemFieldRows
.filter((fieldMeta) => fieldMeta.collection === collection)
.map((fieldMeta) => fieldMeta.field),
...(await knex.select('field').from('directus_fields').where({ collection })).map((field) => field.field),
...systemFieldRows.filter((fieldMeta) => fieldMeta.collection === collection).map((fieldMeta) => fieldMeta.field),
];
const fieldsInCollection = [

View File

@@ -3,8 +3,6 @@ import url from 'url';
export function getCacheKey(req: Request) {
const path = url.parse(req.originalUrl).pathname;
const key = `${req.accountability?.user || 'null'}-${path}-${JSON.stringify(
req.sanitizedQuery
)}`;
const key = `${req.accountability?.user || 'null'}-${path}-${JSON.stringify(req.sanitizedQuery)}`;
return key;
}

View File

@@ -12,9 +12,7 @@ export function getConfigFromEnv(prefix: string, omitPrefix?: string | string[])
let matches = false;
if (Array.isArray(omitPrefix)) {
matches = omitPrefix.some((prefix) =>
key.toLowerCase().startsWith(prefix.toLowerCase())
);
matches = omitPrefix.some((prefix) => key.toLowerCase().startsWith(prefix.toLowerCase()));
} else {
matches = key.toLowerCase().startsWith(omitPrefix.toLowerCase());
}
@@ -25,9 +23,7 @@ export function getConfigFromEnv(prefix: string, omitPrefix?: string | string[])
if (key.includes('__')) {
const path = key
.split('__')
.map((key, index) =>
index === 0 ? camelcase(camelcase(key.slice(prefix.length))) : camelcase(key)
);
.map((key, index) => (index === 0 ? camelcase(camelcase(key.slice(prefix.length))) : camelcase(key)));
set(config, path.join('.'), value);
} else {
config[camelcase(key.slice(prefix.length))] = value;

View File

@@ -2,9 +2,7 @@ import getLocalType from './get-local-type';
import { Column } from '@directus/schema/dist/types/column';
import { SchemaOverview } from '../types';
export default function getDefaultValue(
column: SchemaOverview[string]['columns'][string] | Column
) {
export default function getDefaultValue(column: SchemaOverview[string]['columns'][string] | Column) {
const type = getLocalType(column);
let defaultValue = column.default_value || null;

View File

@@ -16,13 +16,15 @@ const profileMap: Record<string, string> = {};
* This is used in the SSO flow to extract the users
*/
export default function getEmailFromProfile(provider: string, profile: Record<string, any>) {
const path =
profileMap[provider] || env[`OAUTH_${provider.toUpperCase()}_PROFILE_EMAIL`] || 'email';
const path = profileMap[provider] || env[`OAUTH_${provider.toUpperCase()}_PROFILE_EMAIL`] || 'email';
const email = get(profile, path);
if (!email) {
throw new ServiceUnavailableException("Couldn't extract email address from SSO provider response", { service: 'oauth', provider });
throw new ServiceUnavailableException("Couldn't extract email address from SSO provider response", {
service: 'oauth',
provider,
});
}
return email;

View File

@@ -87,11 +87,7 @@ export default function getLocalType(
const type = localTypeMap[column.data_type.toLowerCase().split('(')[0]];
/** Handle Postgres numeric decimals */
if (
column.data_type === 'numeric' &&
column.numeric_precision !== null &&
column.numeric_scale !== null
) {
if (column.data_type === 'numeric' && column.numeric_precision !== null && column.numeric_scale !== null) {
return 'decimal';
}

View File

@@ -20,10 +20,7 @@ export default function parseIPTC(buffer: Buffer) {
let lastIptcEntryPos = buffer.indexOf(IPTC_ENTRY_MARKER);
while (lastIptcEntryPos !== -1) {
lastIptcEntryPos = buffer.indexOf(
IPTC_ENTRY_MARKER,
lastIptcEntryPos + IPTC_ENTRY_MARKER.byteLength
);
lastIptcEntryPos = buffer.indexOf(IPTC_ENTRY_MARKER, lastIptcEntryPos + IPTC_ENTRY_MARKER.byteLength);
let iptcBlockTypePos = lastIptcEntryPos + IPTC_ENTRY_MARKER.byteLength;
let iptcBlockSizePos = iptcBlockTypePos + 1;

View File

@@ -3,10 +3,7 @@ import logger from '../logger';
import { parseFilter } from '../utils/parse-filter';
import { flatten } from 'lodash';
export function sanitizeQuery(
rawQuery: Record<string, any>,
accountability: Accountability | null
) {
export function sanitizeQuery(rawQuery: Record<string, any>, accountability: Accountability | null) {
const query: Query = {};
if (rawQuery.limit !== undefined) {

View File

@@ -79,13 +79,8 @@ function validateFilter(filter: Query['filter']) {
}
function validateFilterPrimitive(value: any, key: string) {
if (
(typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') ===
false
) {
throw new InvalidQueryException(
`The filter value for "${key}" has to be a string or a number`
);
if ((typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') === false) {
throw new InvalidQueryException(`The filter value for "${key}" has to be a string or a number`);
}
if (typeof value === 'number' && Number.isNaN(value)) {

View File

@@ -10,10 +10,7 @@ let registered: { event: string; handler: ListenerFn }[] = [];
export async function register() {
unregister();
const webhooks = await database
.select<Webhook[]>('*')
.from('directus_webhooks')
.where({ status: 'active' });
const webhooks = await database.select<Webhook[]>('*').from('directus_webhooks').where({ status: 'active' });
for (const webhook of webhooks) {
if (webhook.actions === '*') {
@@ -43,11 +40,7 @@ export function unregister() {
function createHandler(webhook: Webhook): ListenerFn {
return async (data) => {
const collectionAllowList = webhook.collections.split(',');
if (
collectionAllowList.includes('*') === false &&
collectionAllowList.includes(data.collection) === false
)
return;
if (collectionAllowList.includes('*') === false && collectionAllowList.includes(data.collection) === false) return;
try {
await axios({

View File

@@ -3,15 +3,9 @@ const parentConfig = require('../.eslintrc.js');
module.exports = {
...parentConfig,
extends: [
'plugin:vue/essential',
'@vue/typescript/recommended',
'@vue/prettier',
'@vue/prettier/@typescript-eslint',
],
extends: ['plugin:vue/essential', '@vue/typescript/recommended', '@vue/prettier', '@vue/prettier/@typescript-eslint'],
rules: {
...parentConfig.rules,
'prettier/prettier': ['error', { singleQuote: true }],
'vue/valid-v-slot': 0,
},
};

View File

@@ -26,7 +26,8 @@
"fix:styles": "stylelint --fix \"**/*.{vue,scss}\"",
"storybook": "start-storybook -p 6006",
"build-storybook": "build-storybook",
"prepublishOnly": "npm run build"
"prepublishOnly": "npm run build",
"prettier": "prettier --write \"src/**/*.ts\""
},
"gitHead": "4476da28dbbc2824e680137aa28b2b91b5afabec",
"dependencies": {
@@ -41,6 +42,7 @@
"@vue/cli-service": "^4.5.8",
"@vue/eslint-config-prettier": "^6.0.0",
"@vue/eslint-config-typescript": "^7.0.0",
"@vue/test-utils": "^1.1.1"
"@vue/test-utils": "^1.1.1",
"prettier": "^2.2.1"
}
}

View File

@@ -40,9 +40,7 @@ export default function (expandedParentClass = '', xAxis = false) {
void el.offsetHeight; // force reflow
el.style.transition =
initialStyle.transition !== ''
? initialStyle.transition
: `${sizeProperty} var(--medium) var(--transition)`;
initialStyle.transition !== '' ? initialStyle.transition : `${sizeProperty} var(--medium) var(--transition)`;
if (expandedParentClass && el._parent) {
el._parent.classList.add(expandedParentClass);

View File

@@ -14,4 +14,4 @@ export type ValidationError = {
valid?: number | string | (number | string)[];
invalid?: number | string | (number | string)[];
substring?: string;
}
};

View File

@@ -149,15 +149,15 @@ export function useGroupableParent(
// Register a child within the context of this group
function register(item: GroupableInstance) {
items.value = [...items.value, item];
const value = getValueForItem(item)
const value = getValueForItem(item);
// If you're required to select a value, make sure a value is selected on first render
if (selection.value.length === 0 && options?.mandatory?.value === true && items.value.length === 1) {
selection.value = [value];
}
if(item.active.value && selection.value.includes(value) === false) {
toggle(item)
if (item.active.value && selection.value.includes(value) === false) {
toggle(item);
}
}

View File

@@ -27,9 +27,7 @@ export function useCustomSelection(currentValue: Ref<string>, items: Ref<any[]>,
// Check if set value is one of the existing keys
const values = items.value.map((item) => item.value);
return (
currentValue.value !== null &&
currentValue.value.length > 0 &&
values.includes(currentValue.value) === false
currentValue.value !== null && currentValue.value.length > 0 && values.includes(currentValue.value) === false
);
});

View File

@@ -17,8 +17,7 @@ export default function useFieldTree(collection: Ref<string>, inject?: { fields:
const fieldsInLevel = cloneDeep(fieldsStore.getFieldsForCollection(collection))
.filter((field: Field) => {
const shown =
field.meta?.special?.includes('alias') !== true &&
field.meta?.special?.includes('no-data') !== true;
field.meta?.special?.includes('alias') !== true && field.meta?.special?.includes('no-data') !== true;
return shown;
})
.map((field: Field) => ({

View File

@@ -17,11 +17,7 @@ export async function registerDisplays() {
try {
const customResponse = await api.get('/extensions/displays');
if (
customResponse.data.data &&
Array.isArray(customResponse.data.data) &&
customResponse.data.data.length > 0
) {
if (customResponse.data.data && Array.isArray(customResponse.data.data) && customResponse.data.data.length > 0) {
for (const customKey of customResponse.data.data) {
try {
const module = await import(/* webpackIgnore: true */ `/extensions/displays/${customKey}/index.js`);

View File

@@ -3,4 +3,6 @@ import mitt from 'mitt';
const emitter = mitt();
export default emitter;
export enum Events { upload = 'upload' }
export enum Events {
upload = 'upload',
}

View File

@@ -16,7 +16,7 @@ export default defineInterface(({ i18n }) => ({
type: 'string',
meta: {
width: 'full',
interface: 'text-input'
interface: 'text-input',
},
schema: {
default_value: null,

View File

@@ -37,8 +37,7 @@ export default function useActions(
if (value.value === null || junctionField === null) return [];
return value.value.filter(
(item) =>
typeof get(item, junctionField) === 'object' && has(item, [junctionField, relationPkField]) === false
(item) => typeof get(item, junctionField) === 'object' && has(item, [junctionField, relationPkField]) === false
) as Record<string, any>[];
}

View File

@@ -54,12 +54,7 @@ export default function usePreview(
let responseData: Record<string, any>[] = [];
if (relatedPrimaryKeys.length > 0) {
responseData = await request(
relationCollection,
filteredFields,
relationPkField,
relatedPrimaryKeys
);
responseData = await request(relationCollection, filteredFields, relationPkField, relatedPrimaryKeys);
}
// Insert the related items into the junction items
@@ -80,9 +75,7 @@ export default function usePreview(
// Replace existing items with it's updated counterparts
responseData = responseData
.map((item) => {
const updatedItem = updatedItems.find(
(updated) => updated[junctionPkField] === item[junctionPkField]
);
const updatedItem = updatedItems.find((updated) => updated[junctionPkField] === item[junctionPkField]);
if (updatedItem !== undefined) return updatedItem;
return item;
})

View File

@@ -54,8 +54,8 @@ export default defineInterface(({ i18n }) => ({
interface: 'icon',
},
schema: {
default_value: 'radio_button_checked'
}
default_value: 'radio_button_checked',
},
},
{
field: 'iconOff',
@@ -66,8 +66,8 @@ export default defineInterface(({ i18n }) => ({
interface: 'icon',
},
schema: {
default_value: 'radio_button_unchecked'
}
default_value: 'radio_button_unchecked',
},
},
{
field: 'color',

View File

@@ -17,16 +17,10 @@ export async function registerInterfaces() {
try {
const customResponse = await api.get('/extensions/interfaces');
if (
customResponse.data.data &&
Array.isArray(customResponse.data.data) &&
customResponse.data.data.length > 0
) {
if (customResponse.data.data && Array.isArray(customResponse.data.data) && customResponse.data.data.length > 0) {
for (const customKey of customResponse.data.data) {
try {
const module = await import(
/* webpackIgnore: true */ `/extensions/interfaces/${customKey}/index.js`
);
const module = await import(/* webpackIgnore: true */ `/extensions/interfaces/${customKey}/index.js`);
modules.push(module.default);
} catch (err) {
console.warn(`Couldn't load custom interface "${customKey}"`);

View File

@@ -16,11 +16,7 @@ export async function registerLayouts() {
try {
const customResponse = await api.get('/extensions/layouts');
if (
customResponse.data.data &&
Array.isArray(customResponse.data.data) &&
customResponse.data.data.length > 0
) {
if (customResponse.data.data && Array.isArray(customResponse.data.data) && customResponse.data.data.length > 0) {
for (const customKey of customResponse.data.data) {
try {
const module = await import(/* webpackIgnore: true */ `/extensions/layouts/${customKey}/index.js`);

View File

@@ -62,9 +62,7 @@ export default function useFolders() {
}
export function nestFolders(rawFolders: FolderRaw[]) {
return rawFolders
.map((rawFolder) => nestChildren(rawFolder, rawFolders))
.filter((folder) => folder.parent === null);
return rawFolders.map((rawFolder) => nestChildren(rawFolder, rawFolders)).filter((folder) => folder.parent === null);
}
export function nestChildren(rawFolder: FolderRaw, rawFolders: FolderRaw[]) {

View File

@@ -19,11 +19,7 @@ export async function loadModules() {
try {
const customResponse = await api.get('/extensions/modules');
if (
customResponse.data.data &&
Array.isArray(customResponse.data.data) &&
customResponse.data.data.length > 0
) {
if (customResponse.data.data && Array.isArray(customResponse.data.data) && customResponse.data.data.length > 0) {
for (const customKey of customResponse.data.data) {
try {
const module = await import(/* webpackIgnore: true */ `/extensions/modules/${customKey}/index.js`);

View File

@@ -28,7 +28,7 @@ export default function useNavigation() {
const rolesResponse = await api.get(`/roles`, {
params: {
sort: 'name',
}
},
});
roles.value = rolesResponse.data.data;
loading.value = false;

View File

@@ -184,9 +184,7 @@ export const useFieldsStore = createStore({
if (currentUpdate === updateID) {
this.state.fields = this.state.fields.map((field) => {
if (field.collection === collectionKey) {
const newDataForField = response.data.data.find(
(update: Field) => update.field === field.field
);
const newDataForField = response.data.data.find((update: Field) => update.field === field.field);
if (newDataForField) return this.parseField(newDataForField);
}

View File

@@ -3,5 +3,5 @@ export type APIError = {
extensions: {
code: string;
[key: string]: any;
}
}
};
};

View File

@@ -33,20 +33,7 @@ export default function getAvailableOperatorsForType(type: string) {
case 'sort':
return {
type: 'number',
operators: [
'eq',
'neq',
'lt',
'lte',
'gt',
'gte',
'between',
'nbetween',
'empty',
'nempty',
'in',
'nin',
],
operators: ['eq', 'neq', 'lt', 'lte', 'gt', 'gte', 'between', 'nbetween', 'empty', 'nempty', 'in', 'nin'],
};
// Datetime
case 'datetime':
@@ -56,20 +43,7 @@ export default function getAvailableOperatorsForType(type: string) {
case 'datetime_updated':
return {
type: 'datetime',
operators: [
'eq',
'neq',
'lt',
'lte',
'gt',
'gte',
'between',
'nbetween',
'empty',
'nempty',
'in',
'nin',
],
operators: ['eq', 'neq', 'lt', 'lte', 'gt', 'gte', 'between', 'nbetween', 'empty', 'nempty', 'in', 'nin'],
};
default:
return {

6
package-lock.json generated
View File

@@ -26500,9 +26500,9 @@
"dev": true
},
"prettier": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.1.2.tgz",
"integrity": "sha512-16c7K+x4qVlJg9rEbXl7HEGmQyZlG4R9AgP+oHKRMsMsuk8s+ATStlf1NpDqyBI1HpVyfjLOeMhH2LvuNvV5Vg==",
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.2.1.tgz",
"integrity": "sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q==",
"dev": true
},
"prettier-bytes": {

View File

@@ -7,7 +7,8 @@
"lint": "lerna run lint",
"release": "lerna publish --force-publish",
"cli": "cross-env NODE_ENV=development DOTENV_CONFIG_PATH=api/.env ts-node -r dotenv/config --script-mode --transpile-only api/src/cli/index.ts",
"postinstall": "npm run build"
"postinstall": "npm run build",
"prettier": "lerna run prettier"
},
"repository": {
"type": "git",
@@ -134,7 +135,7 @@
"nyc": "^15.1.0",
"pinia": "0.0.7",
"portal-vue": "^2.1.7",
"prettier": "^2.1.1",
"prettier": "^2.2.1",
"pretty-ms": "^7.0.1",
"qrcode": "^1.4.4",
"raw-loader": "^4.0.1",