Update Prettier v3.1.0 (#20481)

This commit is contained in:
Pascal Jufer
2023-11-20 16:23:22 +01:00
committed by GitHub
parent 59d40451ba
commit ba7fac402e
748 changed files with 3107 additions and 2884 deletions

View File

@@ -15,4 +15,5 @@ dist/
*.css
*.svg
*.liquid
*.html
Dockerfile

View File

@@ -124,7 +124,7 @@ export default async function createApp(): Promise<express.Application> {
maxMemoryHeapUsed: env['PRESSURE_LIMITER_MAX_MEMORY_HEAP_USED'],
error: new ServiceUnavailableError({ service: 'api', reason: 'Under pressure' }),
retryAfter: env['PRESSURE_LIMITER_RETRY_AFTER'],
})
}),
);
}
@@ -150,9 +150,9 @@ export default async function createApp(): Promise<express.Application> {
connectSrc: ["'self'", 'https://*'],
},
},
getConfigFromEnv('CONTENT_SECURITY_POLICY_')
)
)
getConfigFromEnv('CONTENT_SECURITY_POLICY_'),
),
),
);
if (env['HSTS_ENABLED']) {

View File

@@ -72,7 +72,7 @@ export async function registerAuthProviders(): Promise<void> {
function getProviderInstance(
driver: string,
options: AuthDriverOptions,
config: Record<string, any> = {}
config: Record<string, any> = {},
): AuthDriver | undefined {
switch (driver) {
case 'local':

View File

@@ -123,7 +123,7 @@ export class LDAPAuthDriver extends AuthDriver {
private async fetchUserInfo(
baseDn: string,
filter?: ldap.EqualityFilter,
scope?: SearchScope
scope?: SearchScope,
): Promise<UserInfo | undefined> {
let { firstNameAttribute, lastNameAttribute, mailAttribute } = this.config;
@@ -174,7 +174,7 @@ export class LDAPAuthDriver extends AuthDriver {
res.on('end', () => {
resolve(undefined);
});
}
},
);
});
}
@@ -212,7 +212,7 @@ export class LDAPAuthDriver extends AuthDriver {
res.on('end', () => {
resolve(userGroups);
});
}
},
);
});
}
@@ -242,7 +242,7 @@ export class LDAPAuthDriver extends AuthDriver {
attribute: userAttribute ?? 'cn',
value: payload['identifier'],
}),
userScope ?? 'one'
userScope ?? 'one',
);
if (!userInfo?.dn) {
@@ -258,7 +258,7 @@ export class LDAPAuthDriver extends AuthDriver {
attribute: groupAttribute ?? 'member',
value: groupAttribute?.toLowerCase() === 'memberuid' && userInfo.uid ? userInfo.uid : userInfo.dn,
}),
groupScope ?? 'one'
groupScope ?? 'one',
);
if (userGroups.length) {
@@ -282,7 +282,7 @@ export class LDAPAuthDriver extends AuthDriver {
`auth.update`,
{},
{ identifier: userInfo.dn, provider: this.config['provider'], providerPayload: { userInfo, userRole } },
{ database: getDatabase(), schema: this.schema, accountability: null }
{ database: getDatabase(), schema: this.schema, accountability: null },
);
// Only sync roles if the AD groups are configured
@@ -315,7 +315,7 @@ export class LDAPAuthDriver extends AuthDriver {
`auth.create`,
userPayload,
{ identifier: userInfo.dn, provider: this.config['provider'], providerPayload: { userInfo, userRole } },
{ database: getDatabase(), schema: this.schema, accountability: null }
{ database: getDatabase(), schema: this.schema, accountability: null },
);
try {
@@ -436,7 +436,7 @@ export function createLDAPAuthRouter(provider: string): Router {
const { accessToken, refreshToken, expires } = await authenticationService.login(
provider,
req.body,
req.body?.otp
req.body?.otp,
);
const payload = {
@@ -461,7 +461,7 @@ export function createLDAPAuthRouter(provider: string): Router {
return next();
}),
respond
respond,
);
return router;

View File

@@ -88,7 +88,7 @@ export function createLocalAuthRouter(provider: string): Router {
const { accessToken, refreshToken, expires } = await authenticationService.login(
provider,
req.body,
req.body?.otp
req.body?.otp,
);
const payload = {
@@ -107,7 +107,7 @@ export function createLocalAuthRouter(provider: string): Router {
return next();
}),
respond
respond,
);
return router;

View File

@@ -62,7 +62,7 @@ export class OAuth2AuthDriver extends LocalAuthDriver {
const clientOptionsOverrides = getConfigFromEnv(
`AUTH_${config['provider'].toUpperCase()}_CLIENT_`,
[`AUTH_${config['provider'].toUpperCase()}_CLIENT_ID`, `AUTH_${config['provider'].toUpperCase()}_CLIENT_SECRET`],
'underscore'
'underscore',
);
this.client = new issuer.Client({
@@ -129,7 +129,7 @@ export class OAuth2AuthDriver extends LocalAuthDriver {
tokenSet = await this.client.oauthCallback(
this.redirectUrl,
{ code: payload['code'], state: payload['state'] },
{ code_verifier: payload['codeVerifier'], state: codeChallenge }
{ code_verifier: payload['codeVerifier'], state: codeChallenge },
);
userInfo = await this.client.userinfo(tokenSet.access_token!);
@@ -174,7 +174,7 @@ export class OAuth2AuthDriver extends LocalAuthDriver {
provider: this.config['provider'],
providerPayload: { accessToken: tokenSet.access_token, userInfo },
},
{ database: getDatabase(), schema: this.schema, accountability: null }
{ database: getDatabase(), schema: this.schema, accountability: null },
);
// Update user to update refresh_token and other properties that might have changed
@@ -201,7 +201,7 @@ export class OAuth2AuthDriver extends LocalAuthDriver {
provider: this.config['provider'],
providerPayload: { accessToken: tokenSet.access_token, userInfo },
},
{ database: getDatabase(), schema: this.schema, accountability: null }
{ database: getDatabase(), schema: this.schema, accountability: null },
);
try {
@@ -290,7 +290,7 @@ export function createOAuth2AuthRouter(providerName: string): Router {
{
expiresIn: '5m',
issuer: 'directus',
}
},
);
res.cookie(`oauth2.${providerName}`, token, {
@@ -300,7 +300,7 @@ export function createOAuth2AuthRouter(providerName: string): Router {
return res.redirect(provider.generateAuthUrl(codeVerifier, prompt));
},
respond
respond,
);
router.post(
@@ -309,7 +309,7 @@ export function createOAuth2AuthRouter(providerName: string): Router {
(req, res) => {
res.redirect(303, `./callback?${new URLSearchParams(req.body)}`);
},
respond
respond,
);
router.get(
@@ -400,7 +400,7 @@ export function createOAuth2AuthRouter(providerName: string): Router {
next();
}),
respond
respond,
);
return router;

View File

@@ -51,7 +51,7 @@ export class OpenIDAuthDriver extends LocalAuthDriver {
const clientOptionsOverrides = getConfigFromEnv(
`AUTH_${config['provider'].toUpperCase()}_CLIENT_`,
[`AUTH_${config['provider'].toUpperCase()}_CLIENT_ID`, `AUTH_${config['provider'].toUpperCase()}_CLIENT_SECRET`],
'underscore'
'underscore',
);
this.redirectUrl = redirectUrl.toString();
@@ -69,7 +69,7 @@ export class OpenIDAuthDriver extends LocalAuthDriver {
reject(
new InvalidProviderConfigError({
provider: additionalConfig['provider'],
})
}),
);
}
@@ -80,7 +80,7 @@ export class OpenIDAuthDriver extends LocalAuthDriver {
redirect_uris: [this.redirectUrl],
response_types: ['code'],
...clientOptionsOverrides,
})
}),
);
})
.catch((e) => {
@@ -149,7 +149,7 @@ export class OpenIDAuthDriver extends LocalAuthDriver {
tokenSet = await client.callback(
this.redirectUrl,
{ code: payload['code'], state: payload['state'], iss: payload['iss'] },
{ code_verifier: payload['codeVerifier'], state: codeChallenge, nonce: codeChallenge }
{ code_verifier: payload['codeVerifier'], state: codeChallenge, nonce: codeChallenge },
);
userInfo = tokenSet.claims();
@@ -201,7 +201,7 @@ export class OpenIDAuthDriver extends LocalAuthDriver {
provider: this.config['provider'],
providerPayload: { accessToken: tokenSet.access_token, userInfo },
},
{ database: getDatabase(), schema: this.schema, accountability: null }
{ database: getDatabase(), schema: this.schema, accountability: null },
);
// Update user to update refresh_token and other properties that might have changed
@@ -230,7 +230,7 @@ export class OpenIDAuthDriver extends LocalAuthDriver {
provider: this.config['provider'],
providerPayload: { accessToken: tokenSet.access_token, userInfo },
},
{ database: getDatabase(), schema: this.schema, accountability: null }
{ database: getDatabase(), schema: this.schema, accountability: null },
);
try {
@@ -320,7 +320,7 @@ export function createOpenIDAuthRouter(providerName: string): Router {
{
expiresIn: '5m',
issuer: 'directus',
}
},
);
res.cookie(`openid.${providerName}`, token, {
@@ -330,7 +330,7 @@ export function createOpenIDAuthRouter(providerName: string): Router {
return res.redirect(await provider.generateAuthUrl(codeVerifier, prompt));
}),
respond
respond,
);
router.post(
@@ -339,7 +339,7 @@ export function createOpenIDAuthRouter(providerName: string): Router {
(req, res) => {
res.redirect(303, `./callback?${new URLSearchParams(req.body)}`);
},
respond
respond,
);
router.get(
@@ -433,7 +433,7 @@ export function createOpenIDAuthRouter(providerName: string): Router {
next();
}),
respond
respond,
);
return router;

View File

@@ -79,7 +79,7 @@ export class SAMLAuthDriver extends LocalAuthDriver {
`auth.create`,
userPayload,
{ identifier: identifier.toLowerCase(), provider: this.config['provider'], providerPayload: { ...payload } },
{ database: getDatabase(), schema: this.schema, accountability: null }
{ database: getDatabase(), schema: this.schema, accountability: null },
);
try {
@@ -108,7 +108,7 @@ export function createSAMLAuthRouter(providerName: string) {
asyncHandler(async (_req, res) => {
const { sp } = getAuthProvider(providerName) as SAMLAuthDriver;
return res.header('Content-Type', 'text/xml').send(sp.getMetadata());
})
}),
);
router.get(
@@ -123,7 +123,7 @@ export function createSAMLAuthRouter(providerName: string) {
}
return res.redirect(parsedUrl.toString());
})
}),
);
router.post(
@@ -144,7 +144,7 @@ export function createSAMLAuthRouter(providerName: string) {
}
return res.redirect(context);
})
}),
);
router.post(
@@ -191,7 +191,7 @@ export function createSAMLAuthRouter(providerName: string) {
throw error;
}
}),
respond
respond,
);
return router;

View File

@@ -140,7 +140,7 @@ export async function setCacheValue(
cache: Keyv,
key: string,
value: Record<string, any> | Record<string, any>[],
ttl?: number
ttl?: number,
) {
const compressed = await compress(value);
await cache.set(key, compressed, ttl);

View File

@@ -40,8 +40,8 @@ export default async function init(): Promise<void> {
async function trySeed(): Promise<{ credentials: Credentials; db: Knex }> {
const credentials: Credentials = await inquirer.prompt(
(databaseQuestions[dbClient] as any[]).map((question: ({ client, filepath }: any) => any) =>
question({ client: dbClient, filepath: rootPath })
)
question({ client: dbClient, filepath: rootPath }),
),
);
const db = createDBConnection(dbClient, credentials!);

View File

@@ -8,7 +8,7 @@ import { dump as toYaml } from 'js-yaml';
export async function snapshot(
snapshotPath?: string,
options?: { yes: boolean; format: 'json' | 'yaml' }
options?: { yes: boolean; format: 'json' | 'yaml' },
): Promise<void> {
const database = getDatabase();

View File

@@ -22,7 +22,7 @@ const liquidEngine = new Liquid({
export default async function createEnv(
client: keyof typeof drivers,
credentials: Credentials,
directory: string
directory: string,
): Promise<void> {
const { nanoid } = await import('nanoid');

View File

@@ -65,7 +65,7 @@ router.get(
return next();
}),
respond
respond,
);
const createCommentSchema = Joi.object({
@@ -113,7 +113,7 @@ router.post(
return next();
}),
respond
respond,
);
const updateCommentSchema = Joi.object({
@@ -152,7 +152,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.delete(
@@ -177,7 +177,7 @@ router.delete(
return next();
}),
respond
respond,
);
export default router;

View File

@@ -89,7 +89,7 @@ router.get(
const allKeys: string[] = [
...systemKeys,
...(assetSettings.storage_asset_presets || []).map(
(transformation: TransformationParams) => transformation['key']
(transformation: TransformationParams) => transformation['key'],
),
];
@@ -140,8 +140,8 @@ router.get(
defaultSrc: ['none'],
},
},
getConfigFromEnv('ASSETS_CONTENT_SECURITY_POLICY')
)
getConfigFromEnv('ASSETS_CONTENT_SECURITY_POLICY'),
),
)(req, res, next);
}),
@@ -158,7 +158,7 @@ router.get(
const transformationParams: TransformationParams = {
...(res.locals['shortcuts'] as TransformationParams[]).find(
(transformation) => transformation['key'] === res.locals['transformation']?.key
(transformation) => transformation['key'] === res.locals['transformation']?.key,
),
...res.locals['transformation'],
};
@@ -267,7 +267,7 @@ router.get(
});
return undefined;
})
}),
);
export default router;

View File

@@ -104,7 +104,7 @@ router.post(
res.locals['payload'] = payload;
return next();
}),
respond
respond,
);
router.post(
@@ -145,7 +145,7 @@ router.post(
return next();
}),
respond
respond,
);
router.post(
@@ -180,7 +180,7 @@ router.post(
}
}
}),
respond
respond,
);
router.post(
@@ -209,7 +209,7 @@ router.post(
await service.resetPassword(req.body.token, req.body.password);
return next();
}),
respond
respond,
);
router.get(
@@ -222,7 +222,7 @@ router.get(
return next();
}),
respond
respond,
);
export default router;

View File

@@ -30,7 +30,7 @@ router.post(
return next();
}),
respond
respond,
);
const readHandler = asyncHandler(async (req, res, next) => {
@@ -74,7 +74,7 @@ router.get(
return next();
}),
respond
respond,
);
router.patch(
@@ -100,7 +100,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.patch(
@@ -126,7 +126,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.delete(
@@ -141,7 +141,7 @@ router.delete(
return next();
}),
respond
respond,
);
export default router;

View File

@@ -50,7 +50,7 @@ router.post(
return next();
}),
respond
respond,
);
const readHandler = asyncHandler(async (req, res, next) => {
@@ -87,7 +87,7 @@ router.get(
res.locals['payload'] = { data: record || null };
return next();
}),
respond
respond,
);
router.patch(
@@ -123,7 +123,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.patch(
@@ -149,7 +149,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.delete(
@@ -171,7 +171,7 @@ router.delete(
return next();
}),
respond
respond,
);
router.delete(
@@ -186,7 +186,7 @@ router.delete(
return next();
}),
respond
respond,
);
export default router;

View File

@@ -25,7 +25,7 @@ router.get(
res.locals['payload'] = { data: extensions || null };
return next();
}),
respond
respond,
);
router.patch(
@@ -51,7 +51,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.get(
@@ -76,12 +76,12 @@ router.get(
res.setHeader(
'Cache-Control',
getCacheControlHeader(req, getMilliseconds(env['EXTENSIONS_CACHE_TTL']), false, false)
getCacheControlHeader(req, getMilliseconds(env['EXTENSIONS_CACHE_TTL']), false, false),
);
res.setHeader('Vary', 'Origin, Cache-Control');
res.end(source);
})
}),
);
export default router;

View File

@@ -28,7 +28,7 @@ router.get(
res.locals['payload'] = { data: fields || null };
return next();
}),
respond
respond,
);
router.get(
@@ -45,7 +45,7 @@ router.get(
res.locals['payload'] = { data: fields || null };
return next();
}),
respond
respond,
);
router.get(
@@ -62,7 +62,7 @@ router.get(
res.locals['payload'] = { data: field || null };
return next();
}),
respond
respond,
);
const newFieldSchema = Joi.object({
@@ -114,7 +114,7 @@ router.post(
return next();
}),
respond
respond,
);
router.patch(
@@ -152,7 +152,7 @@ router.patch(
return next();
}),
respond
respond,
);
const updateSchema = Joi.object({
@@ -207,7 +207,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.delete(
@@ -222,7 +222,7 @@ router.delete(
await service.deleteField(req.params['collection']!, req.params['field']!);
return next();
}),
respond
respond,
);
export default router;

View File

@@ -46,8 +46,8 @@ describe('multipartHandler', () => {
fakeForm.append(
'file',
Buffer.from(
'<?xml version="1.0" encoding="UTF-8" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg width="100%" height="100%" viewBox="0 0 243 266" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" xmlns:serif="http://www.serif.com/" style="fill-rule:evenodd;clip-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:1.41421;"><g id="Calligraphy"><path d="M67.097,135.868c0,3.151 0.598,14.121 -11.586,14.121c-17.076,0 -15.95,-12.947 -15.95,-12.947c0,-2.521 4.597,-5.638 4.597,-7.318c0,-0.63 0.041,-3.519 -2.27,-3.519c-5.671,0 -5.671,10.083 -5.671,10.083c0,0 0.419,15.336 19.116,15.336c20.205,0 30.04,-23.712 30.04,-30.88c0,-18.197 -51.112,-27.701 -51.112,-57.949c0,1.575 -2.205,-13.864 14.18,-13.864c28.358,0 44.426,42.536 44.426,71.524c0,28.988 -16.699,55.455 -16.699,55.455c0,0 33.4,-25.837 33.4,-76.25c0,-70.264 -46.003,-69.634 -46.003,-69.634c-4.792,0 -7.602,-0.241 -28.398,20.555c-20.797,20.797 -17.646,29.83 -17.646,29.83c0,31.93 49.576,32.35 49.576,55.457Z" style="fill-rule:nonzero;"/><path d="M241.886,174.861c-1.602,-9.142 -15.448,-9.916 -22.675,-9.682c-0.7,-0.003 -1.172,0.02 -1.327,0.03c-8.804,0.01 -19.314,4.179 -33.072,13.115c-3.554,2.308 -7.19,4.847 -10.902,7.562c-6.979,-31.39 -13.852,-63.521 -28.033,-63.521c20.415,-20.119 22.19,-16.272 22.19,-39.054c0,-11.244 14.498,-21.35 14.498,-21.35l-0.296,-2.024c-19.193,5.304 -37.307,-8.577 -42.2,-12.755c5.375,-9.663 9.584,-12.565 9.584,-12.565c1.891,-20.377 15.965,-27.31 15.965,-27.31c1.681,-4.201 6.092,-7.142 6.092,-7.142c-70.162,22.267 -54.247,189.298 -54.247,189.298c-0.475,-55.91 5.238,-92.242 11.977,-115.55c9.094,8.248 24.425,11.765 24.425,11.765c-7.396,3.55 -5.324,12.13 -5.324,19.527c0,7.397 -3.848,10.651 -3.848,10.651l-21.893,22.782c17.043,0.294 23.638,31.657 30.689,63.056c-2.548,2.042 -5.125,4.12 -7.728,6.219c-16.396,13.223 -33.351,26.897 -50.266,37.354c-19.086,11.797 -35.151,17.533 -49.116,17.533c-25.25,0 -44.118,-24.368 -44.118,-46.154c0,-9.838 3.227,-17.831 5.935,-22.805c2.935,-5.39 5.911,-8.503 5.967,-8.561c0.001,0 0.001,0 0.001,-0.001l-0.013,-0.012c1.803,-1.885 4.841,-5.181 10.423,-5.181c20.715,0 27.475,40.776 55.603,40.776c24.857,0 31.834,-20.497 37.286,-31.399c0,0 -8.94,11.12 -21.587,11.12c-27.038,0 -35.323,-40.557 -55.166,-40.557c-13.41,0 -22.743,15.506 -31.029,27.281c0,0 0.018,-0.001 0.048,-0.003c-1.02,1.415 -2.214,3.233 -3.41,5.425c-2.847,5.21 -6.239,13.587 -6.239,23.917c0,22.816 19.801,48.334 46.299,48.334c14.381,0 30.822,-5.84 50.262,-17.858c17.033,-10.529 34.04,-24.246 50.489,-37.511c2.309,-1.862 4.607,-3.715 6.891,-5.549c6.952,30.814 14.606,60.912 33.278,60.912c14.794,0 26.923,-25.445 26.923,-25.445c-7.987,7.101 -13.313,5.621 -13.313,5.621c-13.139,0.379 -19.937,-27.594 -26.48,-56.931c16.455,-12.099 31.46,-20.829 43.488,-20.829l0.072,-0.003c0.082,-0.005 5.246,-0.305 9.957,1.471c-2.95,1.636 -4.947,4.782 -4.947,8.394c0,5.299 4.296,9.594 9.594,9.594c5.298,0 9.594,-4.295 9.594,-9.594c0,-0.826 -0.104,-1.627 -0.301,-2.391Z" style="fill-rule:nonzero;"/></g></svg>'
)
'<?xml version="1.0" encoding="UTF-8" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg width="100%" height="100%" viewBox="0 0 243 266" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" xmlns:serif="http://www.serif.com/" style="fill-rule:evenodd;clip-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:1.41421;"><g id="Calligraphy"><path d="M67.097,135.868c0,3.151 0.598,14.121 -11.586,14.121c-17.076,0 -15.95,-12.947 -15.95,-12.947c0,-2.521 4.597,-5.638 4.597,-7.318c0,-0.63 0.041,-3.519 -2.27,-3.519c-5.671,0 -5.671,10.083 -5.671,10.083c0,0 0.419,15.336 19.116,15.336c20.205,0 30.04,-23.712 30.04,-30.88c0,-18.197 -51.112,-27.701 -51.112,-57.949c0,1.575 -2.205,-13.864 14.18,-13.864c28.358,0 44.426,42.536 44.426,71.524c0,28.988 -16.699,55.455 -16.699,55.455c0,0 33.4,-25.837 33.4,-76.25c0,-70.264 -46.003,-69.634 -46.003,-69.634c-4.792,0 -7.602,-0.241 -28.398,20.555c-20.797,20.797 -17.646,29.83 -17.646,29.83c0,31.93 49.576,32.35 49.576,55.457Z" style="fill-rule:nonzero;"/><path d="M241.886,174.861c-1.602,-9.142 -15.448,-9.916 -22.675,-9.682c-0.7,-0.003 -1.172,0.02 -1.327,0.03c-8.804,0.01 -19.314,4.179 -33.072,13.115c-3.554,2.308 -7.19,4.847 -10.902,7.562c-6.979,-31.39 -13.852,-63.521 -28.033,-63.521c20.415,-20.119 22.19,-16.272 22.19,-39.054c0,-11.244 14.498,-21.35 14.498,-21.35l-0.296,-2.024c-19.193,5.304 -37.307,-8.577 -42.2,-12.755c5.375,-9.663 9.584,-12.565 9.584,-12.565c1.891,-20.377 15.965,-27.31 15.965,-27.31c1.681,-4.201 6.092,-7.142 6.092,-7.142c-70.162,22.267 -54.247,189.298 -54.247,189.298c-0.475,-55.91 5.238,-92.242 11.977,-115.55c9.094,8.248 24.425,11.765 24.425,11.765c-7.396,3.55 -5.324,12.13 -5.324,19.527c0,7.397 -3.848,10.651 -3.848,10.651l-21.893,22.782c17.043,0.294 23.638,31.657 30.689,63.056c-2.548,2.042 -5.125,4.12 -7.728,6.219c-16.396,13.223 -33.351,26.897 -50.266,37.354c-19.086,11.797 -35.151,17.533 -49.116,17.533c-25.25,0 -44.118,-24.368 -44.118,-46.154c0,-9.838 3.227,-17.831 5.935,-22.805c2.935,-5.39 5.911,-8.503 5.967,-8.561c0.001,0 0.001,0 0.001,-0.001l-0.013,-0.012c1.803,-1.885 4.841,-5.181 10.423,-5.181c20.715,0 27.475,40.776 55.603,40.776c24.857,0 31.834,-20.497 37.286,-31.399c0,0 -8.94,11.12 -21.587,11.12c-27.038,0 -35.323,-40.557 -55.166,-40.557c-13.41,0 -22.743,15.506 -31.029,27.281c0,0 0.018,-0.001 0.048,-0.003c-1.02,1.415 -2.214,3.233 -3.41,5.425c-2.847,5.21 -6.239,13.587 -6.239,23.917c0,22.816 19.801,48.334 46.299,48.334c14.381,0 30.822,-5.84 50.262,-17.858c17.033,-10.529 34.04,-24.246 50.489,-37.511c2.309,-1.862 4.607,-3.715 6.891,-5.549c6.952,30.814 14.606,60.912 33.278,60.912c14.794,0 26.923,-25.445 26.923,-25.445c-7.987,7.101 -13.313,5.621 -13.313,5.621c-13.139,0.379 -19.937,-27.594 -26.48,-56.931c16.455,-12.099 31.46,-20.829 43.488,-20.829l0.072,-0.003c0.082,-0.005 5.246,-0.305 9.957,1.471c-2.95,1.636 -4.947,4.782 -4.947,8.394c0,5.299 4.296,9.594 9.594,9.594c5.298,0 9.594,-4.295 9.594,-9.594c0,-0.826 -0.104,-1.627 -0.301,-2.391Z" style="fill-rule:nonzero;"/></g></svg>',
),
);
const req = {

View File

@@ -181,7 +181,7 @@ router.post(
return next();
}),
respond
respond,
);
const importSchema = Joi.object({
@@ -218,7 +218,7 @@ router.post(
return next();
}),
respond
respond,
);
const readHandler = asyncHandler(async (req, res, next) => {
@@ -263,7 +263,7 @@ router.get(
res.locals['payload'] = { data: record || null };
return next();
}),
respond
respond,
);
router.patch(
@@ -299,7 +299,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.patch(
@@ -326,7 +326,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.delete(
@@ -349,7 +349,7 @@ router.delete(
return next();
}),
respond
respond,
);
router.delete(
@@ -364,7 +364,7 @@ router.delete(
return next();
}),
respond
respond,
);
export default router;

View File

@@ -31,7 +31,7 @@ const webhookFlowHandler = asyncHandler(async (req, res, next) => {
{
accountability: req.accountability,
schema: req.schema,
}
},
);
if (!cacheEnabled) {
@@ -81,7 +81,7 @@ router.post(
return next();
}),
respond
respond,
);
const readHandler = asyncHandler(async (req, res, next) => {
@@ -118,7 +118,7 @@ router.get(
res.locals['payload'] = { data: record || null };
return next();
}),
respond
respond,
);
router.patch(
@@ -154,7 +154,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.patch(
@@ -180,7 +180,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.delete(
@@ -202,7 +202,7 @@ router.delete(
return next();
}),
respond
respond,
);
router.delete(
@@ -217,7 +217,7 @@ router.delete(
return next();
}),
respond
respond,
);
export default router;

View File

@@ -50,7 +50,7 @@ router.post(
return next();
}),
respond
respond,
);
const readHandler = asyncHandler(async (req, res, next) => {
@@ -96,7 +96,7 @@ router.get(
res.locals['payload'] = { data: record || null };
return next();
}),
respond
respond,
);
router.patch(
@@ -132,7 +132,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.patch(
@@ -158,7 +158,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.delete(
@@ -181,7 +181,7 @@ router.delete(
return next();
}),
respond
respond,
);
router.delete(
@@ -196,7 +196,7 @@ router.delete(
return next();
}),
respond
respond,
);
export default router;

View File

@@ -24,7 +24,7 @@ router.use(
return next();
}),
respond
respond,
);
router.use(
@@ -45,7 +45,7 @@ router.use(
return next();
}),
respond
respond,
);
export default router;

View File

@@ -55,7 +55,7 @@ router.post(
return next();
}),
respond
respond,
);
const readHandler = asyncHandler(async (req, res, next) => {
@@ -113,7 +113,7 @@ router.get(
return next();
}),
respond
respond,
);
router.patch(
@@ -160,7 +160,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.patch(
@@ -193,7 +193,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.delete(
@@ -219,7 +219,7 @@ router.delete(
return next();
}),
respond
respond,
);
router.delete(
@@ -236,7 +236,7 @@ router.delete(
await service.deleteOne(req.params['pk']!);
return next();
}),
respond
respond,
);
export default router;

View File

@@ -24,7 +24,7 @@ const notFound: RequestHandler = async (req, res, next) => {
database: getDatabase(),
schema: req.schema,
accountability: req.accountability ?? null,
}
},
);
if (hooksResult) {

View File

@@ -50,7 +50,7 @@ router.post(
return next();
}),
respond
respond,
);
const readHandler = asyncHandler(async (req, res, next) => {
@@ -96,7 +96,7 @@ router.get(
res.locals['payload'] = { data: record || null };
return next();
}),
respond
respond,
);
router.patch(
@@ -132,7 +132,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.patch(
@@ -158,7 +158,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.delete(
@@ -181,7 +181,7 @@ router.delete(
return next();
}),
respond
respond,
);
router.delete(
@@ -196,7 +196,7 @@ router.delete(
return next();
}),
respond
respond,
);
export default router;

View File

@@ -50,7 +50,7 @@ router.post(
return next();
}),
respond
respond,
);
const readHandler = asyncHandler(async (req, res, next) => {
@@ -87,7 +87,7 @@ router.get(
res.locals['payload'] = { data: record || null };
return next();
}),
respond
respond,
);
router.patch(
@@ -123,7 +123,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.patch(
@@ -149,7 +149,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.delete(
@@ -171,7 +171,7 @@ router.delete(
return next();
}),
respond
respond,
);
router.delete(
@@ -186,7 +186,7 @@ router.delete(
return next();
}),
respond
respond,
);
export default router;

View File

@@ -50,7 +50,7 @@ router.post(
return next();
}),
respond
respond,
);
const readHandler = asyncHandler(async (req, res, next) => {
@@ -87,7 +87,7 @@ router.get(
res.locals['payload'] = { data: record || null };
return next();
}),
respond
respond,
);
router.patch(
@@ -123,7 +123,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.patch(
@@ -149,7 +149,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.delete(
@@ -171,7 +171,7 @@ router.delete(
return next();
}),
respond
respond,
);
router.delete(
@@ -186,7 +186,7 @@ router.delete(
return next();
}),
respond
respond,
);
export default router;

View File

@@ -50,7 +50,7 @@ router.post(
return next();
}),
respond
respond,
);
const readHandler = asyncHandler(async (req, res, next) => {
@@ -102,7 +102,7 @@ router.get(
res.locals['payload'] = { data: record };
return next();
}),
respond
respond,
);
router.patch(
@@ -138,7 +138,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.patch(
@@ -164,7 +164,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.delete(
@@ -187,7 +187,7 @@ router.delete(
return next();
}),
respond
respond,
);
router.delete(
@@ -202,7 +202,7 @@ router.delete(
return next();
}),
respond
respond,
);
export default router;

View File

@@ -50,7 +50,7 @@ router.post(
return next();
}),
respond
respond,
);
const readHandler = asyncHandler(async (req, res, next) => {
@@ -96,7 +96,7 @@ router.get(
res.locals['payload'] = { data: record || null };
return next();
}),
respond
respond,
);
router.patch(
@@ -132,7 +132,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.patch(
@@ -158,7 +158,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.delete(
@@ -181,7 +181,7 @@ router.delete(
return next();
}),
respond
respond,
);
router.delete(
@@ -196,7 +196,7 @@ router.delete(
return next();
}),
respond
respond,
);
export default router;

View File

@@ -24,7 +24,7 @@ router.get(
res.locals['payload'] = { data: relations || null };
return next();
}),
respond
respond,
);
router.get(
@@ -41,7 +41,7 @@ router.get(
res.locals['payload'] = { data: relations || null };
return next();
}),
respond
respond,
);
router.get(
@@ -58,7 +58,7 @@ router.get(
res.locals['payload'] = { data: relation || null };
return next();
}),
respond
respond,
);
const newRelationSchema = Joi.object({
@@ -102,7 +102,7 @@ router.post(
return next();
}),
respond
respond,
);
const updateRelationSchema = Joi.object({
@@ -147,7 +147,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.delete(
@@ -162,7 +162,7 @@ router.delete(
await service.deleteOne(req.params['collection']!, req.params['field']!);
return next();
}),
respond
respond,
);
export default router;

View File

@@ -44,7 +44,7 @@ router.get(
res.locals['payload'] = { data: record || null };
return next();
}),
respond
respond,
);
export default router;

View File

@@ -50,7 +50,7 @@ router.post(
return next();
}),
respond
respond,
);
const readHandler = asyncHandler(async (req, res, next) => {
@@ -87,7 +87,7 @@ router.get(
res.locals['payload'] = { data: record || null };
return next();
}),
respond
respond,
);
router.patch(
@@ -123,7 +123,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.patch(
@@ -149,7 +149,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.delete(
@@ -172,7 +172,7 @@ router.delete(
return next();
}),
respond
respond,
);
router.delete(
@@ -187,7 +187,7 @@ router.delete(
return next();
}),
respond
respond,
);
export default router;

View File

@@ -21,7 +21,7 @@ router.get(
res.locals['payload'] = { data: currentSnapshot };
return next();
}),
respond
respond,
);
const schemaMultipartHandler: RequestHandler = (req, res, next) => {
@@ -111,7 +111,7 @@ router.post(
res.locals['payload'] = { data: { hash: currentSnapshotHash, diff: snapshotDiff } };
return next();
}),
respond
respond,
);
router.post(
@@ -123,7 +123,7 @@ router.post(
await service.apply(diff);
return next();
}),
respond
respond,
);
export default router;

View File

@@ -19,7 +19,7 @@ router.get(
res.locals['payload'] = await service.oas.generate(req.headers.host);
return next();
}),
respond
respond,
);
router.get(
@@ -45,7 +45,7 @@ router.get(
res.attachment(filename);
res.send(result);
})
}),
);
router.get(
@@ -60,7 +60,7 @@ router.get(
res.locals['payload'] = { data };
return next();
}),
respond
respond,
);
router.get(
@@ -80,7 +80,7 @@ router.get(
res.locals['cache'] = false;
return next();
}),
respond
respond,
);
export default router;

View File

@@ -22,7 +22,7 @@ router.get(
res.locals['payload'] = { data: records || null };
return next();
}),
respond
respond,
);
router.patch(
@@ -48,7 +48,7 @@ router.patch(
return next();
}),
respond
respond,
);
export default router;

View File

@@ -43,7 +43,7 @@ router.post(
return next();
}),
respond
respond,
);
const sharedInviteSchema = Joi.object({
@@ -69,7 +69,7 @@ router.post(
return next();
}),
respond
respond,
);
router.post(
@@ -108,7 +108,7 @@ router.post(
return next();
}),
respond
respond,
);
const readHandler = asyncHandler(async (req, res, next) => {
@@ -172,7 +172,7 @@ router.get(
res.locals['payload'] = { data: record || null };
return next();
}),
respond
respond,
);
router.get(
@@ -188,7 +188,7 @@ router.get(
res.locals['payload'] = { data: record || null };
return next();
}),
respond
respond,
);
router.patch(
@@ -224,7 +224,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.patch(
@@ -250,7 +250,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.delete(
@@ -272,7 +272,7 @@ router.delete(
return next();
}),
respond
respond,
);
router.delete(
@@ -287,7 +287,7 @@ router.delete(
return next();
}),
respond
respond,
);
export default router;

View File

@@ -50,7 +50,7 @@ router.post(
return next();
}),
respond
respond,
);
const readHandler = asyncHandler(async (req, res, next) => {
@@ -96,7 +96,7 @@ router.get(
res.locals['payload'] = { data: record || null };
return next();
}),
respond
respond,
);
router.patch(
@@ -132,7 +132,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.patch(
@@ -158,7 +158,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.delete(
@@ -181,7 +181,7 @@ router.delete(
return next();
}),
respond
respond,
);
router.delete(
@@ -196,7 +196,7 @@ router.delete(
return next();
}),
respond
respond,
);
export default router;

View File

@@ -55,7 +55,7 @@ router.post(
return next();
}),
respond
respond,
);
const readHandler = asyncHandler(async (req, res, next) => {
@@ -119,7 +119,7 @@ router.get(
return next();
}),
respond
respond,
);
router.get(
@@ -137,7 +137,7 @@ router.get(
res.locals['payload'] = { data: items || null };
return next();
}),
respond
respond,
);
router.patch(
@@ -158,7 +158,7 @@ router.patch(
res.locals['payload'] = { data: item || null };
return next();
}),
respond
respond,
);
router.patch(
@@ -177,7 +177,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.patch(
@@ -213,7 +213,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.patch(
@@ -239,7 +239,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.delete(
@@ -262,7 +262,7 @@ router.delete(
return next();
}),
respond
respond,
);
router.delete(
@@ -277,7 +277,7 @@ router.delete(
return next();
}),
respond
respond,
);
const inviteSchema = Joi.object({
@@ -300,7 +300,7 @@ router.post(
await service.inviteUser(req.body.email, req.body.role, req.body.invite_url || null);
return next();
}),
respond
respond,
);
const acceptInviteSchema = Joi.object({
@@ -322,7 +322,7 @@ router.post(
await service.acceptInvite(req.body.token, req.body.password);
return next();
}),
respond
respond,
);
router.post(
@@ -353,7 +353,7 @@ router.post(
res.locals['payload'] = { data: { secret, otpauth_url: url } };
return next();
}),
respond
respond,
);
router.post(
@@ -381,7 +381,7 @@ router.post(
if (role && role.enforce_tfa) {
const existingPermission = await req.accountability.permissions?.find(
(p) => p.collection === 'directus_users' && p.action === 'update'
(p) => p.collection === 'directus_users' && p.action === 'update',
);
if (existingPermission) {
@@ -412,7 +412,7 @@ router.post(
return next();
}),
respond
respond,
);
router.post(
@@ -436,7 +436,7 @@ router.post(
if (role && role.enforce_tfa) {
const existingPermission = await req.accountability.permissions?.find(
(p) => p.collection === 'directus_users' && p.action === 'update'
(p) => p.collection === 'directus_users' && p.action === 'update',
);
if (existingPermission) {
@@ -472,7 +472,7 @@ router.post(
await service.disableTFA(req.accountability.user);
return next();
}),
respond
respond,
);
router.post(
@@ -494,7 +494,7 @@ router.post(
await service.disableTFA(req.params['pk']);
return next();
}),
respond
respond,
);
export default router;

View File

@@ -29,7 +29,7 @@ router.get(
const string = nanoid(req.query?.['length'] ? Number(req.query['length']) : 32);
return res.json({ data: string });
})
}),
);
router.post(
@@ -42,7 +42,7 @@ router.post(
const hash = await generateHash(req.body.string);
return res.json({ data: hash });
})
}),
);
router.post(
@@ -59,7 +59,7 @@ router.post(
const result = await argon2.verify(req.body.hash, req.body.string);
return res.json({ data: result });
})
}),
);
const SortSchema = Joi.object({
@@ -82,7 +82,7 @@ router.post(
await service.sort(req.collection, req.body);
return res.status(200).end();
})
}),
);
router.post(
@@ -96,7 +96,7 @@ router.post(
await service.revert(req.params['revision']!);
next();
}),
respond
respond,
);
router.post(
@@ -158,7 +158,7 @@ router.post(
busboy.on('error', (err: Error) => next(err));
req.pipe(busboy);
})
}),
);
router.post(
@@ -187,7 +187,7 @@ router.post(
return next();
}),
respond
respond,
);
router.post(
@@ -201,7 +201,7 @@ router.post(
await service.clearCache();
res.status(200).end();
})
}),
);
export default router;

View File

@@ -50,7 +50,7 @@ router.post(
return next();
}),
respond
respond,
);
const readHandler = asyncHandler(async (req, res, next) => {
@@ -96,7 +96,7 @@ router.get(
res.locals['payload'] = { data: record || null };
return next();
}),
respond
respond,
);
router.patch(
@@ -132,7 +132,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.patch(
@@ -158,7 +158,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.delete(
@@ -181,7 +181,7 @@ router.delete(
return next();
}),
respond
respond,
);
router.delete(
@@ -196,7 +196,7 @@ router.delete(
return next();
}),
respond
respond,
);
router.get(
@@ -221,7 +221,7 @@ router.get(
return next();
}),
respond
respond,
);
router.post(
@@ -246,7 +246,7 @@ router.post(
return next();
}),
respond
respond,
);
router.post(
@@ -267,7 +267,7 @@ router.post(
return next();
}),
respond
respond,
);
export default router;

View File

@@ -50,7 +50,7 @@ router.post(
return next();
}),
respond
respond,
);
const readHandler = asyncHandler(async (req, res, next) => {
@@ -87,7 +87,7 @@ router.get(
res.locals['payload'] = { data: record || null };
return next();
}),
respond
respond,
);
router.patch(
@@ -121,7 +121,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.patch(
@@ -147,7 +147,7 @@ router.patch(
return next();
}),
respond
respond,
);
router.delete(
@@ -169,7 +169,7 @@ router.delete(
return next();
}),
respond
respond,
);
router.delete(
@@ -184,7 +184,7 @@ router.delete(
return next();
}),
respond
respond,
);
export default router;

View File

@@ -47,7 +47,7 @@ export async function translateDatabaseError(error: SQLError): Promise<any> {
database: getDatabase(),
schema: null,
accountability: null,
}
},
);
return hookError;

View File

@@ -10,7 +10,10 @@ export type FnHelperOptions = {
};
export abstract class FnHelper extends DatabaseHelper {
constructor(knex: Knex, protected schema: SchemaOverview) {
constructor(
knex: Knex,
protected schema: SchemaOverview,
) {
super(knex);
this.schema = schema;
}
@@ -29,7 +32,7 @@ export abstract class FnHelper extends DatabaseHelper {
const collectionName = options?.originalCollectionName || table;
const relation = this.schema.relations.find(
(relation) => relation.related_collection === collectionName && relation?.meta?.one_field === column
(relation) => relation.related_collection === collectionName && relation?.meta?.one_field === column,
);
const currentPrimary = this.schema.collections[collectionName]!.primary;

View File

@@ -5,7 +5,7 @@ export class GeometryHelperMySQL extends GeometryHelper {
override collect(table: string, column: string): Knex.Raw {
return this.knex.raw(
`concat('geometrycollection(', group_concat(? separator ', '), ')'`,
this.asText(table, column)
this.asText(table, column),
);
}

View File

@@ -7,7 +7,7 @@ export class SchemaHelperCockroachDb extends SchemaHelper {
table: string,
column: string,
type: (typeof KNEX_TYPES)[number],
options: Options = {}
options: Options = {},
): Promise<void> {
await this.changeToTypeByCopy(table, column, type, options);
}

View File

@@ -9,7 +9,7 @@ export class SchemaHelperMySQL extends SchemaHelper {
table: string,
primaryKey: string,
orderByString: string,
orderByFields: Knex.Raw[]
orderByFields: Knex.Raw[],
): Knex.QueryBuilder {
if (getDatabaseVersion()?.startsWith('5.7')) {
dbQuery.orderByRaw(`?? asc, ${orderByString}`, [`${table}.${primaryKey}`, ...orderByFields]);
@@ -18,8 +18,8 @@ export class SchemaHelperMySQL extends SchemaHelper {
.select(
knex.raw(
`??, ( @rank := IF ( @cur_id = deep.${primaryKey}, @rank + 1, 1 ) ) AS directus_row_number, ( @cur_id := deep.${primaryKey} ) AS current_id`,
'deep.*'
)
'deep.*',
),
)
.from(knex.raw('? as ??, (SELECT @rank := 0, @cur_id := null) vars', [dbQuery, 'deep']));

View File

@@ -8,7 +8,7 @@ export class SchemaHelperOracle extends SchemaHelper {
table: string,
column: string,
type: (typeof KNEX_TYPES)[number],
options: Options = {}
options: Options = {},
): Promise<void> {
await this.changeToTypeByCopy(table, column, type, options);
}

View File

@@ -26,7 +26,7 @@ export abstract class SchemaHelper extends DatabaseHelper {
table: string,
column: string,
type: (typeof KNEX_TYPES)[number],
options: Options = {}
options: Options = {},
): Promise<void> {
await this.knex.schema.alterTable(table, (builder) => {
const b = type === 'string' ? builder.string(column, options.length) : builder[type](column);
@@ -51,7 +51,7 @@ export abstract class SchemaHelper extends DatabaseHelper {
table: string,
column: string,
type: (typeof KNEX_TYPES)[number],
options: Options
options: Options,
): Promise<void> {
const tempName = `${column}__temp`;
@@ -126,11 +126,11 @@ export abstract class SchemaHelper extends DatabaseHelper {
table: string,
primaryKey: string,
orderByString: string,
orderByFields: Knex.Raw[]
orderByFields: Knex.Raw[],
): Knex.QueryBuilder {
dbQuery.rowNumber(
knex.ref('directus_row_number').toQuery(),
knex.raw(`partition by ?? order by ${orderByString}`, [`${table}.${primaryKey}`, ...orderByFields])
knex.raw(`partition by ?? order by ${orderByString}`, [`${table}.${primaryKey}`, ...orderByFields]),
);
return dbQuery;

View File

@@ -12,7 +12,7 @@ export class AutoIncrementHelperPostgres extends AutoSequenceHelper {
override async resetAutoIncrementSequence(table: string, column: string): Promise<Knex.Raw | void> {
return await this.knex.raw(
`WITH sequence_infos AS (SELECT pg_get_serial_sequence(?, ?) AS seq_name, MAX(??) as max_val FROM ??) SELECT SETVAL(seq_name, max_val) FROM sequence_infos;`,
[`"${table}"`, column, column, table]
[`"${table}"`, column, column, table],
);
}
}

View File

@@ -265,7 +265,7 @@ export async function validateMigrations(): Promise<boolean> {
((await fse.pathExists(customMigrationsPath)) && (await fse.readdir(customMigrationsPath))) || [];
migrationFiles = migrationFiles.filter(
(file: string) => file.startsWith('run') === false && file.endsWith('.d.ts') === false
(file: string) => file.startsWith('run') === false && file.endsWith('.d.ts') === false,
);
customMigrationFiles = customMigrationFiles.filter((file: string) => file.endsWith('.js'));
@@ -275,7 +275,7 @@ export async function validateMigrations(): Promise<boolean> {
const requiredVersions = migrationFiles.map((filePath) => filePath.split('-')[0]);
const completedVersions = (await database.select('version').from('directus_migrations')).map(
({ version }) => version
({ version }) => version,
);
return requiredVersions.every((version) => completedVersions.includes(version));
@@ -341,7 +341,7 @@ async function validateDatabaseCharset(database?: Knex): Promise<void> {
if (inconsistencies) {
logger.warn(
`Some tables and columns do not match your database's default collation (${collation}):\n${inconsistencies}`
`Some tables and columns do not match your database's default collation (${collation}):\n${inconsistencies}`,
);
}
}

View File

@@ -15,7 +15,7 @@ export async function up(knex: Knex): Promise<void> {
const constraintsToAdd = relations.filter((relation) => {
const exists = !!foreignKeys.find(
(fk) => fk.table === relation?.many_collection && fk.column === relation?.many_field
(fk) => fk.table === relation?.many_collection && fk.column === relation?.many_field,
);
return exists === false;
@@ -31,7 +31,7 @@ export async function up(knex: Knex): Promise<void> {
(await inspector.hasTable(constraint.one_collection)) === false
) {
logger.warn(
`Ignoring ${constraint.many_collection}.${constraint.many_field}<->${constraint.one_collection}. Tables don't exist.`
`Ignoring ${constraint.many_collection}.${constraint.many_field}<->${constraint.one_collection}. Tables don't exist.`,
);
corruptedRelations.push(constraint.id);
@@ -43,7 +43,7 @@ export async function up(knex: Knex): Promise<void> {
if (constraint.many_field === currentPrimaryKeyField) {
logger.warn(
`Illegal relationship ${constraint.many_collection}.${constraint.many_field}<->${constraint.one_collection} encountered. Many field equals collections primary key.`
`Illegal relationship ${constraint.many_collection}.${constraint.many_field}<->${constraint.one_collection} encountered. Many field equals collections primary key.`,
);
corruptedRelations.push(constraint.id);
@@ -58,13 +58,13 @@ export async function up(knex: Knex): Promise<void> {
.leftJoin(
{ related: constraint.one_collection },
`main.${constraint.many_field}`,
`related.${relatedPrimaryKeyField}`
`related.${relatedPrimaryKeyField}`,
)
.whereNull(`related.${relatedPrimaryKeyField}`);
if (rowsWithIllegalFKValues.length > 0) {
const ids: (string | number)[] = rowsWithIllegalFKValues.map<string | number>(
(row) => row[currentPrimaryKeyField]
(row) => row[currentPrimaryKeyField],
);
try {
@@ -73,7 +73,7 @@ export async function up(knex: Knex): Promise<void> {
.whereIn(currentPrimaryKeyField, ids);
} catch (err: any) {
logger.error(
`${constraint.many_collection}.${constraint.many_field} contains illegal foreign keys which couldn't be set to NULL. Please fix these references and rerun this migration to complete the upgrade.`
`${constraint.many_collection}.${constraint.many_field} contains illegal foreign keys which couldn't be set to NULL. Please fix these references and rerun this migration to complete the upgrade.`,
);
if (ids.length < 25) {
@@ -117,7 +117,7 @@ export async function up(knex: Knex): Promise<void> {
});
} catch (err: any) {
logger.warn(
`Couldn't add foreign key constraint for ${constraint.many_collection}.${constraint.many_field}<->${constraint.one_collection}`
`Couldn't add foreign key constraint for ${constraint.many_collection}.${constraint.many_field}<->${constraint.one_collection}`,
);
logger.warn(err);
@@ -127,8 +127,8 @@ export async function up(knex: Knex): Promise<void> {
if (corruptedRelations.length > 0) {
logger.warn(
`Encountered one or more corrupted relationships. Please check the following rows in "directus_relations": ${corruptedRelations.join(
', '
)}`
', ',
)}`,
);
}
}
@@ -147,7 +147,7 @@ export async function down(knex: Knex): Promise<void> {
});
} catch (err: any) {
logger.warn(
`Couldn't drop foreign key constraint for ${relation.many_collection}.${relation.many_field}<->${relation.one_collection}`
`Couldn't drop foreign key constraint for ${relation.many_collection}.${relation.many_field}<->${relation.one_collection}`,
);
logger.warn(err);

View File

@@ -92,7 +92,7 @@ export async function up(knex: Knex): Promise<void> {
fk.table === update.table &&
fk.column === constraint.column &&
fk.foreign_key_table === constraint.references.split('.')[0] &&
fk.foreign_key_column === constraint.references.split('.')[1]
fk.foreign_key_column === constraint.references.split('.')[1],
);
try {
@@ -116,7 +116,7 @@ export async function up(knex: Knex): Promise<void> {
});
} catch (err: any) {
logger.warn(
`Couldn't clean up index for foreign key ${update.table}.${constraint.column}->${constraint.references}`
`Couldn't clean up index for foreign key ${update.table}.${constraint.column}->${constraint.references}`,
);
logger.warn(err);
@@ -159,7 +159,7 @@ export async function down(knex: Knex): Promise<void> {
});
} catch (err: any) {
logger.warn(
`Couldn't clean up index for foreign key ${update.table}.${constraint.column}->${constraint.references}`
`Couldn't clean up index for foreign key ${update.table}.${constraint.column}->${constraint.references}`,
);
logger.warn(err);

View File

@@ -18,7 +18,7 @@ export async function up(knex: Knex): Promise<void> {
.select<{ id: number; filters: string | OldFilter[]; layout_query: string | Record<string, any> }[]>(
'id',
'filters',
'layout_query'
'layout_query',
)
.from('directus_presets');
@@ -84,7 +84,7 @@ export async function down(knex: Knex): Promise<void> {
.select<{ id: number; filter: string | OldFilter[]; layout_query: string | Record<string, any> }[]>(
'id',
'filter',
'layout_query'
'layout_query',
)
.from('directus_presets');

View File

@@ -33,7 +33,7 @@ function transformStringsOldFormat(newStrings: NewTranslationString[]): OldTrans
set(keyCache, [key, language], value);
}
return Object.entries(keyCache).map(([key, translations]) => ({ key, translations } as OldTranslationString));
return Object.entries(keyCache).map(([key, translations]) => ({ key, translations }) as OldTranslationString);
}
export async function up(knex: Knex): Promise<void> {

View File

@@ -18,7 +18,7 @@ export async function up(knex: Knex) {
default: column.default_value,
nullable: column.is_nullable,
});
})
}),
);
}
@@ -39,7 +39,7 @@ export async function down(knex: Knex) {
default: column.default_value,
nullable: column.is_nullable,
});
})
}),
);
}

View File

@@ -43,7 +43,7 @@ type RunASTOptions = {
export default async function runAST(
originalAST: AST | NestedCollectionNode,
schema: SchemaOverview,
options?: RunASTOptions
options?: RunASTOptions,
): Promise<null | Item | Item[]> {
const ast = cloneDeep(originalAST);
@@ -64,14 +64,14 @@ export default async function runAST(
async function run(
collection: string,
children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[],
query: Query
query: Query,
) {
// Retrieve the database columns to select in the current AST
const { fieldNodes, primaryKeyField, nestedCollectionNodes } = await parseCurrentLevel(
schema,
collection,
children,
query
query,
);
// The actual knex query builder instance. This is a promise that resolves with the raw items from the db
@@ -149,7 +149,7 @@ async function parseCurrentLevel(
schema: SchemaOverview,
collection: string,
children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[],
query: Query
query: Query,
) {
const primaryKeyField = schema.collections[collection]!.primary;
const columnsInCollection = Object.keys(schema.collections[collection]!.fields);
@@ -198,12 +198,12 @@ async function parseCurrentLevel(
(column: string) =>
children.find(
(childNode) =>
(childNode.type === 'field' || childNode.type === 'functionField') && childNode.fieldKey === column
(childNode.type === 'field' || childNode.type === 'functionField') && childNode.fieldKey === column,
) ?? {
type: 'field',
name: column,
fieldKey: column,
}
},
) as FieldNode[];
return { fieldNodes, nestedCollectionNodes, primaryKeyField };
@@ -244,7 +244,7 @@ async function getDBQuery(
knex: Knex,
table: string,
fieldNodes: (FieldNode | FunctionFieldNode)[],
query: Query
query: Query,
): Promise<Knex.QueryBuilder> {
const preProcess = getColumnPreprocessor(knex, schema, table);
const queryCopy = clone(query);
@@ -329,7 +329,7 @@ async function getDBQuery(
table,
primaryKey,
orderByString,
orderByFields
orderByFields,
);
}
} else {
@@ -373,7 +373,7 @@ async function getDBQuery(
function applyParentFilters(
schema: SchemaOverview,
nestedCollectionNodes: NestedCollectionNode[],
parentItem: Item | Item[]
parentItem: Item | Item[],
) {
const parentItems = toArray(parentItem);
@@ -437,7 +437,7 @@ function mergeWithParentItems(
schema: SchemaOverview,
nestedItem: Item | Item[],
parentItem: Item | Item[],
nestedNode: NestedCollectionNode
nestedNode: NestedCollectionNode,
) {
const nestedItems = toArray(nestedItem);
const parentItems = clone(toArray(parentItem));
@@ -474,7 +474,7 @@ function mergeWithParentItems(
if (nestedNode.query.page && nestedNode.query.page > 1) {
parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].slice(
(nestedNode.query.limit ?? Number(env['QUERY_LIMIT_DEFAULT'])) * (nestedNode.query.page - 1)
(nestedNode.query.limit ?? Number(env['QUERY_LIMIT_DEFAULT'])) * (nestedNode.query.page - 1),
);
}
@@ -485,7 +485,7 @@ function mergeWithParentItems(
if (nestedNode.query.limit !== -1) {
parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].slice(
0,
nestedNode.query.limit ?? Number(env['QUERY_LIMIT_DEFAULT'])
nestedNode.query.limit ?? Number(env['QUERY_LIMIT_DEFAULT']),
);
}
@@ -541,7 +541,7 @@ function removeTemporaryFields(
rawItem: Item | Item[],
ast: AST | NestedCollectionNode,
primaryKeyField: string,
parentItem?: Item
parentItem?: Item,
): null | Item | Item[] {
const rawItems = cloneDeep(toArray(rawItem));
const items: Item[] = [];
@@ -577,7 +577,7 @@ function removeTemporaryFields(
item[nestedNode.fieldKey],
nestedNode,
schema.collections[nestedNode.relation.collection]!.primary,
item
item,
);
}
@@ -624,7 +624,7 @@ function removeTemporaryFields(
nestedNode.type === 'm2o'
? schema.collections[nestedNode.relation.related_collection!]!.primary
: schema.collections[nestedNode.relation.collection]!.primary,
item
item,
);
}

View File

@@ -20,5 +20,5 @@ const permissions = requireYAML(path.resolve(__dirname, './app-access-permission
export const schemaPermissions: Permission[] = schemaPermissionsRaw.map((row) => merge({}, defaults, row));
export const appAccessMinimalPermissions: Permission[] = [...schemaPermissions, ...permissions].map((row) =>
merge({}, defaults, row)
merge({}, defaults, row),
);

View File

@@ -35,7 +35,7 @@ export class Emitter {
event: string | string[],
payload: T,
meta: Record<string, any>,
context: EventContext | null = null
context: EventContext | null = null,
): Promise<T> {
const events = Array.isArray(event) ? event : [event];

View File

@@ -421,7 +421,7 @@ async function processConfiguration() {
}
throw new Error(
`Invalid JS configuration file export type. Requires one of "function", "object", received: "${typeof config}"`
`Invalid JS configuration file export type. Requires one of "function", "object", received: "${typeof config}"`,
);
}
@@ -497,7 +497,7 @@ export function processValues(env: Record<string, any>) {
if (allowedEnvironmentVars.some((pattern) => pattern.test(newKey as string))) {
if (newKey in env && !(newKey in defaults && env[newKey] === defaults[newKey])) {
throw new Error(
`Duplicate environment variable encountered: you can't use "${newKey}" and "${key}" simultaneously.`
`Duplicate environment variable encountered: you can't use "${newKey}" and "${key}" simultaneously.`,
);
}

View File

@@ -11,13 +11,13 @@ export const getExtensions = async () => {
const filterDuplicates = ({ name }: Extension) => loadedNames.includes(name) === false;
const localPackageExtensions = (await resolvePackageExtensions(getExtensionsPath())).filter((extension) =>
filterDuplicates(extension)
filterDuplicates(extension),
);
loadedNames.push(...localPackageExtensions.map(({ name }) => name));
const packageExtensions = (await getPackageExtensions(env['PACKAGE_FILE_LOCATION'])).filter((extension) =>
filterDuplicates(extension)
filterDuplicates(extension),
);
return [...packageExtensions, ...localPackageExtensions, ...localExtensions];

View File

@@ -19,7 +19,7 @@ import {
export function generateApiExtensionsSandboxEntrypoint(
type: ApiExtensionType | HybridExtensionType,
name: string,
endpointRouter: Router
endpointRouter: Router,
) {
const index = numberGenerator();

View File

@@ -9,7 +9,7 @@
export function generateHostFunctionReference(
index: Generator<number, number, number>,
args: string[],
options: { async: boolean }
options: { async: boolean },
): string {
const argsList = args.join(', ');
const i = index.next().value;

View File

@@ -7,7 +7,7 @@ type Result<T> = T extends (...args: any) => infer Result ? Result : unknown;
export async function callReference<T extends (...args: any[]) => unknown | Promise<unknown>>(
fn: Reference<T>,
args: Args<T>
args: Args<T>,
): Promise<Reference<Result<T>>> {
const sandboxTimeout = Number(env['EXTENSIONS_SANDBOX_TIMEOUT']);

View File

@@ -8,7 +8,7 @@ export function registerFilterGenerator() {
const registerFilter = (
event: Reference<string>,
cb: Reference<(payload: unknown) => unknown | Promise<unknown>>
cb: Reference<(payload: unknown) => unknown | Promise<unknown>>,
) => {
if (event.typeof !== 'string') throw new TypeError('Filter event has to be of type string');
if (cb.typeof !== 'function') throw new TypeError('Filter handler has to be of type function');

View File

@@ -11,7 +11,7 @@ export function registerOperationGenerator() {
const registerOperation = (
id: Reference<string>,
cb: Reference<(data: Record<string, unknown>) => unknown | Promise<unknown> | void>
cb: Reference<(data: Record<string, unknown>) => unknown | Promise<unknown> | void>,
) => {
if (id.typeof !== 'string') throw new TypeError('Operation config id has to be of type string');
if (cb.typeof !== 'function') throw new TypeError('Operation config handler has to be of type function');

View File

@@ -19,7 +19,7 @@ export function registerRouteGenerator(endpointName: string, endpointRouter: Rou
headers: IncomingHttpHeaders;
body: string;
}) => { status: number; body: string } | Promise<{ status: number; body: string }>
>
>,
) => {
if (path.typeof !== 'string') throw new TypeError('Route path has to be of type string');
if (method.typeof !== 'string') throw new TypeError('Route method has to be of type string');

View File

@@ -10,7 +10,7 @@ export function requestGenerator(requestedScopes: ExtensionSandboxRequestedScope
method?: 'GET' | 'POST' | 'PUT' | 'PATCH' | 'DELETE';
body?: Record<string, any> | string;
headers?: Record<string, string>;
}>
}>,
) => Promise<{
status: number;
statusText: string;

View File

@@ -3,7 +3,7 @@ import type { Reference } from 'isolated-vm';
import { setTimeout } from 'node:timers/promises';
export function sleepGenerator(
requestedScopes: ExtensionSandboxRequestedScopes
requestedScopes: ExtensionSandboxRequestedScopes,
): (milliseconds: Reference<number>) => Promise<void> {
return async (milliseconds) => {
if (requestedScopes.sleep === undefined) throw new Error('No permission to access "sleep"');

View File

@@ -15,7 +15,7 @@ import { wrap } from './utils/wrap.js';
*/
export async function instantiateSandboxSdk(
isolate: Isolate,
requestedScopes: ExtensionSandboxRequestedScopes
requestedScopes: ExtensionSandboxRequestedScopes,
): Promise<Module> {
const apiContext = await isolate.createContext();
@@ -31,7 +31,7 @@ export async function instantiateSandboxSdk(
await apiContext.evalClosure(
handlerCode,
sdk.map(({ generator, async }) => (async ? wrap(generator(requestedScopes)) : generator(requestedScopes))),
{ filename: '<extensions-sdk>', arguments: { reference: true } }
{ filename: '<extensions-sdk>', arguments: { reference: true } },
);
const exportCode = sdk.map(({ name }) => `export const ${name} = sdk.${name};`).join('\n');

View File

@@ -229,11 +229,11 @@ export class ExtensionManager {
await this.load();
const added = this.extensions.filter(
(extension) => !prevExtensions.some((prevExtension) => extension.path === prevExtension.path)
(extension) => !prevExtensions.some((prevExtension) => extension.path === prevExtension.path),
);
const removed = prevExtensions.filter(
(prevExtension) => !this.extensions.some((extension) => prevExtension.path === extension.path)
(prevExtension) => !this.extensions.some((extension) => prevExtension.path === extension.path),
);
this.updateWatchedExtensions(added, removed);
@@ -317,7 +317,7 @@ export class ExtensionManager {
[path.resolve('package.json'), path.posix.join(extensionDirUrl, '*', 'package.json'), ...localExtensionUrls],
{
ignoreInitial: true,
}
},
);
this.watcher
@@ -352,7 +352,7 @@ export class ExtensionManager {
path.resolve(extension.path, extension.entrypoint.app),
path.resolve(extension.path, extension.entrypoint.api),
]
: path.resolve(extension.path, extension.entrypoint)
: path.resolve(extension.path, extension.entrypoint),
);
const addedPackageExtensionPaths = toPackageExtensionPaths(added);
@@ -410,7 +410,7 @@ export class ExtensionManager {
const entrypointPath = path.resolve(
extension.path,
isTypeIn(extension, HYBRID_EXTENSION_TYPES) ? extension.entrypoint.api : extension.entrypoint
isTypeIn(extension, HYBRID_EXTENSION_TYPES) ? extension.entrypoint.api : extension.entrypoint,
);
const extensionCode = await readFile(entrypointPath, 'utf-8');
@@ -446,7 +446,7 @@ export class ExtensionManager {
const { code, hostFunctions, unregisterFunction } = generateApiExtensionsSandboxEntrypoint(
extension.type,
extension.name,
this.endpointRouter
this.endpointRouter,
);
await context.evalClosure(code, [cb, ...hostFunctions.map((fn) => new ivm.Reference(fn))], {
@@ -523,7 +523,7 @@ export class ExtensionManager {
import.meta.url,
{
fresh: true,
}
},
);
const config = getModuleDefault(endpointInstance);
@@ -561,7 +561,7 @@ export class ExtensionManager {
}
const operations = this.extensions.filter(
(extension): extension is HybridExtension => extension.type === 'operation'
(extension): extension is HybridExtension => extension.type === 'operation',
);
for (const operation of operations) {
@@ -580,7 +580,7 @@ export class ExtensionManager {
import.meta.url,
{
fresh: true,
}
},
);
const config = getModuleDefault(operationInstance);
@@ -616,7 +616,7 @@ export class ExtensionManager {
import.meta.url,
{
fresh: true,
}
},
);
const configs = getModuleDefault(bundleInstances);

View File

@@ -112,7 +112,7 @@ class FlowManager {
public async runWebhookFlow(
id: string,
data: unknown,
context: Record<string, unknown>
context: Record<string, unknown>,
): Promise<{ result: unknown; cacheEnabled?: boolean }> {
if (!(id in this.webhookFlowHandlers)) {
logger.warn(`Couldn't find webhook or manual triggered flow with id "${id}"`);
@@ -169,7 +169,7 @@ class FlowManager {
accountability: context['accountability'],
database: context['database'],
getSchema: context['schema'] ? () => context['schema'] : getSchema,
}
},
);
events.forEach((event) => emitter.onFilter(event, handler));
@@ -368,7 +368,7 @@ class FlowManager {
],
values: this.envs,
},
getRedactedString
getRedactedString,
),
},
});
@@ -391,7 +391,7 @@ class FlowManager {
private async executeOperation(
operation: Operation,
keyedData: Record<string, unknown>,
context: Record<string, unknown> = {}
context: Record<string, unknown> = {},
): Promise<{
successor: Operation | null;
status: 'resolve' | 'reject' | 'unknown';

View File

@@ -63,7 +63,7 @@ export default function getMailer(): Transporter {
domain: env['EMAIL_MAILGUN_DOMAIN'],
},
host: env['EMAIL_MAILGUN_HOST'] || 'api.mailgun.net',
}) as any
}) as any,
);
} else if (transportName === 'sendgrid') {
const sg = require('nodemailer-sendgrid');
@@ -71,7 +71,7 @@ export default function getMailer(): Transporter {
transporter = nodemailer.createTransport(
sg({
apiKey: env['EMAIL_SENDGRID_API_KEY'],
}) as any
}) as any,
);
} else {
logger.warn('Illegal transport given for email. Check the EMAIL_TRANSPORT env var.');

View File

@@ -102,7 +102,7 @@ test('Sets accountability to payload contents if valid token is passed', async (
share_scope: shareScope,
},
env['SECRET'],
{ issuer: 'directus' }
{ issuer: 'directus' },
);
const req = {
@@ -152,7 +152,7 @@ test('Sets accountability to payload contents if valid token is passed', async (
share_scope: shareScope,
},
env['SECRET'],
{ issuer: 'directus' }
{ issuer: 'directus' },
);
await handler(req, res, next);

View File

@@ -37,7 +37,7 @@ export const handler = async (req: Request, _res: Response, next: NextFunction)
database,
schema: null,
accountability: null,
}
},
);
if (customAccountability && isEqual(customAccountability, defaultAccountability) === false) {

View File

@@ -89,7 +89,7 @@ const errorHandler: ErrorRequestHandler = (err, req, res, _next) => {
database: getDatabase(),
schema: req.schema,
accountability: req.accountability ?? null,
}
},
)
.then((updatedErrors) => {
return res.json({ ...payload, errors: updatedErrors });

View File

@@ -16,7 +16,7 @@ const sanitizeQueryMiddleware: RequestHandler = (req, _res, next) => {
fields: req.query['fields'] || '*',
...req.query,
},
req.accountability || null
req.accountability || null,
);
Object.freeze(req.sanitizedQuery);

View File

@@ -21,7 +21,7 @@ test('Rejects when Isolate uses more than allowed memory', async () => {
FLOWS_RUN_SCRIPT_MAX_MEMORY: 8,
FLOWS_RUN_SCRIPT_TIMEOUT: 10000,
},
} as any)
} as any),
).rejects.toThrow('Array buffer allocation failed');
});
@@ -43,7 +43,7 @@ test('Rejects when operation runs for longer than allowed ', async () => {
FLOWS_RUN_SCRIPT_MAX_MEMORY: 8,
FLOWS_RUN_SCRIPT_TIMEOUT: 250,
},
} as any)
} as any),
).rejects.toThrow('Script execution timed out.');
await expect(
@@ -53,7 +53,7 @@ test('Rejects when operation runs for longer than allowed ', async () => {
FLOWS_RUN_SCRIPT_MAX_MEMORY: 8,
FLOWS_RUN_SCRIPT_TIMEOUT: 250,
},
} as any)
} as any),
).rejects.toThrow('Script execution timed out.');
});
@@ -69,7 +69,7 @@ test('Rejects when cjs modules are used', async () => {
FLOWS_RUN_SCRIPT_MAX_MEMORY: 8,
FLOWS_RUN_SCRIPT_TIMEOUT: 10000,
},
} as any)
} as any),
).rejects.toThrow('require is not defined');
});
@@ -85,7 +85,7 @@ test('Rejects when esm modules are used', async () => {
FLOWS_RUN_SCRIPT_MAX_MEMORY: 8,
FLOWS_RUN_SCRIPT_TIMEOUT: 10000,
},
} as any)
} as any),
).rejects.toThrow('Cannot use import statement outside a module [<isolated-vm>:2:3]');
});
@@ -101,7 +101,7 @@ test('Rejects when code contains syntax errors', async () => {
FLOWS_RUN_SCRIPT_MAX_MEMORY: 8,
FLOWS_RUN_SCRIPT_TIMEOUT: 10000,
},
} as any)
} as any),
).rejects.toThrow('Unexpected end of input [<isolated-vm>:3:2]');
});
@@ -119,7 +119,7 @@ test('Rejects when code does something illegal', async () => {
FLOWS_RUN_SCRIPT_MAX_MEMORY: 8,
FLOWS_RUN_SCRIPT_TIMEOUT: 10000,
},
} as any)
} as any),
).rejects.toThrow('a is not defined');
});
@@ -135,7 +135,7 @@ test("Rejects when code doesn't return valid function", async () => {
FLOWS_RUN_SCRIPT_MAX_MEMORY: 8,
FLOWS_RUN_SCRIPT_TIMEOUT: 10000,
},
} as any)
} as any),
).rejects.toThrow('module.exports is not a function');
});
@@ -153,7 +153,7 @@ test('Rejects when returned function throws', async () => {
FLOWS_RUN_SCRIPT_MAX_MEMORY: 8,
FLOWS_RUN_SCRIPT_TIMEOUT: 10000,
},
} as any)
} as any),
).rejects.toThrow('yup, this failed');
});
@@ -171,7 +171,7 @@ test('Resolves when synchronous function is valid', async () => {
FLOWS_RUN_SCRIPT_MAX_MEMORY: 8,
FLOWS_RUN_SCRIPT_TIMEOUT: 10000,
},
} as any)
} as any),
).resolves.toEqual({ result: 'Hello, I ran synchronously' });
});
@@ -189,7 +189,7 @@ test('Resolves when asynchronous function is valid', async () => {
FLOWS_RUN_SCRIPT_MAX_MEMORY: 8,
FLOWS_RUN_SCRIPT_TIMEOUT: 10000,
},
} as any)
} as any),
).resolves.toEqual({ result: 'Hello, I ran asynchronously' });
});
@@ -207,7 +207,7 @@ test('Rejects when wrong unit is passed to max memory config', async () => {
FLOWS_RUN_SCRIPT_MAX_MEMORY: 'thisShouldFail',
FLOWS_RUN_SCRIPT_TIMEOUT: 10000,
},
} as any)
} as any),
).rejects.toThrow('`memoryLimit` must be a number');
});
@@ -225,6 +225,6 @@ test('Rejects when wrong unit is passed to timeout config', async () => {
FLOWS_RUN_SCRIPT_MAX_MEMORY: 8,
FLOWS_RUN_SCRIPT_TIMEOUT: 'thisShouldFail',
},
} as any)
} as any),
).rejects.toThrow('`timeout` must be a 32-bit number');
});

View File

@@ -40,7 +40,7 @@ export default defineOperationApi<Options>({
trace: new ivm.Callback((...args: any[]) => logger.trace(unpackArgs(args)), { sync: true }),
debug: new ivm.Callback((...args: any[]) => logger.debug(unpackArgs(args)), { sync: true }),
},
{ copy: true }
{ copy: true },
);
// Run the operation once to define the module.exports function

View File

@@ -32,12 +32,12 @@ test.each([
])('accountability for permissions "$permissions" should be $expected', async ({ permissions, expected }) => {
await config.handler(
{ collection: testCollection, permissions } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(vi.mocked(ItemsService)).toHaveBeenCalledWith(
testCollection,
expect.objectContaining({ schema: {}, accountability: expected, knex: undefined })
expect.objectContaining({ schema: {}, accountability: expected, knex: undefined }),
);
});
@@ -47,7 +47,7 @@ test.each([
])('payload $payload should be passed as $expected', async ({ payload, expected }) => {
await config.handler(
{ collection: testCollection, payload } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
if (expected) {
@@ -60,7 +60,7 @@ test.each([
test('should emit events when true', async () => {
await config.handler(
{ collection: testCollection, payload: {}, emitEvents: true } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(vi.mocked(ItemsService).prototype.createMany).toHaveBeenCalledWith([{}], { emitEvents: true });
@@ -69,7 +69,7 @@ test('should emit events when true', async () => {
test.each([undefined, false])('should not emit events when %s', async (emitEvents) => {
await config.handler(
{ collection: testCollection, payload: {}, emitEvents } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(vi.mocked(ItemsService).prototype.createMany).toHaveBeenCalledWith([{}], { emitEvents: false });

View File

@@ -36,19 +36,19 @@ describe('Operations / Item Delete', () => {
])('accountability for permissions "$permissions" should be $expected', async ({ permissions, expected }) => {
await config.handler(
{ collection: testCollection, query: testQuery, permissions } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService).toHaveBeenCalledWith(
testCollection,
expect.objectContaining({ schema: {}, accountability: expected, knex: undefined })
expect.objectContaining({ schema: {}, accountability: expected, knex: undefined }),
);
});
test('should have fallback when query is not defined', async () => {
await config.handler(
{ collection: testCollection } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService.prototype.deleteByQuery).toHaveBeenCalledWith({}, expect.anything());
@@ -57,7 +57,7 @@ describe('Operations / Item Delete', () => {
test.each([undefined, []])('should call deleteByQuery with correct query when key is $payload', async (key) => {
await config.handler(
{ collection: testCollection, query: testQuery, key } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService.prototype.deleteByQuery).toHaveBeenCalledWith(testQuery, expect.anything());
@@ -68,7 +68,7 @@ describe('Operations / Item Delete', () => {
test('should emit events for deleteByQuery when true', async () => {
await config.handler(
{ collection: testCollection, query: testQuery, emitEvents: true } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService.prototype.deleteByQuery).toHaveBeenCalledWith(testQuery, { emitEvents: true });
@@ -77,7 +77,7 @@ describe('Operations / Item Delete', () => {
test.each([undefined, false])('should not emit events for deleteByQuery when %s', async (emitEvents) => {
await config.handler(
{ collection: testCollection, query: testQuery, emitEvents } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService.prototype.deleteByQuery).toHaveBeenCalledWith(testQuery, { emitEvents: false });
@@ -86,7 +86,7 @@ describe('Operations / Item Delete', () => {
test.each([1, [1]])('should call deleteOne when key is $payload', async (key) => {
await config.handler(
{ collection: testCollection, key } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService.prototype.deleteByQuery).not.toHaveBeenCalled();
@@ -99,7 +99,7 @@ describe('Operations / Item Delete', () => {
await config.handler(
{ collection: testCollection, key, emitEvents: true } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService.prototype.deleteOne).toHaveBeenCalledWith(key, { emitEvents: true });
@@ -110,7 +110,7 @@ describe('Operations / Item Delete', () => {
await config.handler(
{ collection: testCollection, key, emitEvents } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService.prototype.deleteOne).toHaveBeenCalledWith(key, { emitEvents: false });
@@ -119,7 +119,7 @@ describe('Operations / Item Delete', () => {
test('should call deleteMany when key is an array with more than one item', async () => {
await config.handler(
{ collection: testCollection, key: [1, 2, 3] } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService.prototype.deleteByQuery).not.toHaveBeenCalled();
@@ -132,7 +132,7 @@ describe('Operations / Item Delete', () => {
await config.handler(
{ collection: testCollection, key: keys, emitEvents: true } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService.prototype.deleteMany).toHaveBeenCalledWith(keys, { emitEvents: true });
@@ -143,7 +143,7 @@ describe('Operations / Item Delete', () => {
await config.handler(
{ collection: testCollection, key: keys, emitEvents } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService.prototype.deleteMany).toHaveBeenCalledWith(keys, { emitEvents: false });

View File

@@ -36,19 +36,19 @@ describe('Operations / Item Read', () => {
])('accountability for permissions "$permissions" should be $expected', async ({ permissions, expected }) => {
await config.handler(
{ collection: testCollection, query: testQuery, permissions } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService).toHaveBeenCalledWith(
testCollection,
expect.objectContaining({ schema: {}, accountability: expected, knex: undefined })
expect.objectContaining({ schema: {}, accountability: expected, knex: undefined }),
);
});
test('should have fallback when query is not defined', async () => {
await config.handler(
{ collection: testCollection } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService.prototype.readByQuery).toHaveBeenCalledWith({}, expect.anything());
@@ -57,7 +57,7 @@ describe('Operations / Item Read', () => {
test.each([undefined, []])('should call readByQuery with correct query when key is $payload', async (key) => {
await config.handler(
{ collection: testCollection, query: testQuery, key } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService.prototype.readByQuery).toHaveBeenCalledWith(testQuery, expect.anything());
@@ -68,7 +68,7 @@ describe('Operations / Item Read', () => {
test('should emit events for readByQuery when true', async () => {
await config.handler(
{ collection: testCollection, query: testQuery, emitEvents: true } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService.prototype.readByQuery).toHaveBeenCalledWith(testQuery, { emitEvents: true });
@@ -77,7 +77,7 @@ describe('Operations / Item Read', () => {
test.each([undefined, false])('should not emit events for readByQuery when %s', async (emitEvents) => {
await config.handler(
{ collection: testCollection, query: testQuery, emitEvents } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService.prototype.readByQuery).toHaveBeenCalledWith(testQuery, { emitEvents: false });
@@ -86,7 +86,7 @@ describe('Operations / Item Read', () => {
test.each([1, [1]])('should call readOne when key is $payload', async (key) => {
await config.handler(
{ collection: testCollection, key } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService.prototype.readByQuery).not.toHaveBeenCalled();
@@ -99,7 +99,7 @@ describe('Operations / Item Read', () => {
await config.handler(
{ collection: testCollection, key, emitEvents: true } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService.prototype.readOne).toHaveBeenCalledWith(key, {}, { emitEvents: true });
@@ -110,7 +110,7 @@ describe('Operations / Item Read', () => {
await config.handler(
{ collection: testCollection, key, emitEvents } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService.prototype.readOne).toHaveBeenCalledWith(key, {}, { emitEvents: false });
@@ -119,7 +119,7 @@ describe('Operations / Item Read', () => {
test('should call readMany when key is an array with more than one item', async () => {
await config.handler(
{ collection: testCollection, key: [1, 2, 3] } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService.prototype.readByQuery).not.toHaveBeenCalled();
@@ -132,7 +132,7 @@ describe('Operations / Item Read', () => {
await config.handler(
{ collection: testCollection, key: keys, emitEvents: true } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService.prototype.readMany).toHaveBeenCalledWith(keys, {}, { emitEvents: true });
@@ -143,7 +143,7 @@ describe('Operations / Item Read', () => {
await config.handler(
{ collection: testCollection, key: keys, emitEvents } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(ItemsService.prototype.readMany).toHaveBeenCalledWith(keys, {}, { emitEvents: false });

View File

@@ -36,19 +36,19 @@ describe('Operations / Item Update', () => {
])('accountability for permissions "$permissions" should be $expected', async ({ permissions, expected }) => {
await config.handler(
{ collection: testCollection, payload: testPayload, permissions } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(vi.mocked(ItemsService)).toHaveBeenCalledWith(
testCollection,
expect.objectContaining({ schema: {}, accountability: expected, knex: undefined })
expect.objectContaining({ schema: {}, accountability: expected, knex: undefined }),
);
});
test('should return null when payload is not defined', async () => {
const result = await config.handler(
{ collection: testCollection } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(result).toBe(null);
@@ -62,7 +62,7 @@ describe('Operations / Item Update', () => {
await config.handler(
{ collection: testCollection, payload: testPayload, query, key } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(vi.mocked(ItemsService).prototype.updateByQuery).toHaveBeenCalledWith(query, testPayload, expect.anything());
@@ -75,7 +75,7 @@ describe('Operations / Item Update', () => {
await config.handler(
{ collection: testCollection, payload: testPayload, query, emitEvents: true } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(vi.mocked(ItemsService).prototype.updateByQuery).toHaveBeenCalledWith(query, testPayload, {
@@ -88,7 +88,7 @@ describe('Operations / Item Update', () => {
await config.handler(
{ collection: testCollection, payload: testPayload, query, emitEvents } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(vi.mocked(ItemsService).prototype.updateByQuery).toHaveBeenCalledWith(query, testPayload, {
@@ -99,7 +99,7 @@ describe('Operations / Item Update', () => {
test.each([1, [1]])('should call updateOne when key is $payload', async (key) => {
await config.handler(
{ collection: testCollection, payload: testPayload, key } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(vi.mocked(ItemsService).prototype.updateByQuery).not.toHaveBeenCalled();
@@ -112,7 +112,7 @@ describe('Operations / Item Update', () => {
await config.handler(
{ collection: testCollection, payload: testPayload, key, emitEvents: true } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(vi.mocked(ItemsService).prototype.updateOne).toHaveBeenCalledWith(key, testPayload, { emitEvents: true });
@@ -123,7 +123,7 @@ describe('Operations / Item Update', () => {
await config.handler(
{ collection: testCollection, payload: testPayload, key: key, emitEvents } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(vi.mocked(ItemsService).prototype.updateOne).toHaveBeenCalledWith(key, testPayload, { emitEvents: false });
@@ -132,7 +132,7 @@ describe('Operations / Item Update', () => {
test('should call updateMany when key is an array with more than one item', async () => {
await config.handler(
{ collection: testCollection, payload: testPayload, key: [1, 2, 3] } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(vi.mocked(ItemsService).prototype.updateByQuery).not.toHaveBeenCalled();
@@ -145,7 +145,7 @@ describe('Operations / Item Update', () => {
await config.handler(
{ collection: testCollection, payload: testPayload, key: keys, emitEvents: true } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(vi.mocked(ItemsService).prototype.updateMany).toHaveBeenCalledWith(keys, testPayload, { emitEvents: true });
@@ -156,7 +156,7 @@ describe('Operations / Item Update', () => {
await config.handler(
{ collection: testCollection, payload: testPayload, key: keys, emitEvents } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(vi.mocked(ItemsService).prototype.updateMany).toHaveBeenCalledWith(keys, testPayload, { emitEvents: false });

View File

@@ -20,7 +20,7 @@ export default defineOperationApi<Options>({
handler: async (
{ collection, key, payload, query, emitEvents, permissions },
{ accountability, database, getSchema }
{ accountability, database, getSchema },
) => {
const schema = await getSchema({ database });
let customAccountability: Accountability | null;

View File

@@ -33,7 +33,7 @@ describe('Operations / Mail', () => {
await config.handler(options, mockOperationContext);
expect(mailServiceSendSpy).toHaveBeenCalledWith(
expect.objectContaining({ to: options.to, subject: options.subject, template: { name: 'base', data: {} } })
expect.objectContaining({ to: options.to, subject: options.subject, template: { name: 'base', data: {} } }),
);
expect(mailServiceSendSpy).toHaveBeenCalledWith(expect.not.objectContaining({ html: expect.any(String) }));
@@ -50,7 +50,7 @@ describe('Operations / Mail', () => {
await config.handler(options, mockOperationContext);
expect(mailServiceSendSpy).toHaveBeenCalledWith(
expect.objectContaining({ to: options.to, subject: options.subject, template: { name: 'custom', data: {} } })
expect.objectContaining({ to: options.to, subject: options.subject, template: { name: 'custom', data: {} } }),
);
expect(mailServiceSendSpy).toHaveBeenCalledWith(expect.not.objectContaining({ html: expect.any(String) }));
@@ -71,7 +71,7 @@ describe('Operations / Mail', () => {
to: options.to,
subject: options.subject,
template: { name: 'base', data: { key: 'value' } },
})
}),
);
expect(mailServiceSendSpy).toHaveBeenCalledWith(expect.not.objectContaining({ html: expect.any(String) }));
@@ -93,7 +93,7 @@ describe('Operations / Mail', () => {
to: options.to,
subject: options.subject,
template: { name: 'custom', data: { key: 'value' } },
})
}),
);
expect(mailServiceSendSpy).toHaveBeenCalledWith(expect.not.objectContaining({ html: expect.any(String) }));
@@ -114,7 +114,7 @@ describe('Operations / Mail', () => {
to: options.to,
subject: options.subject,
html: options.body,
})
}),
);
expect(mdSpy).not.toHaveBeenCalled();
@@ -135,7 +135,7 @@ describe('Operations / Mail', () => {
to: options.to,
subject: options.subject,
html: '<p>test body</p>\n',
})
}),
);
expect(mdSpy).toHaveBeenCalled();

View File

@@ -44,10 +44,10 @@ test.each([
])('message $message should be sent as string $expected', async ({ message, expected }) => {
await config.handler(
{ recipient: testRecipient, message } as any,
{ accountability: testAccountability, getSchema } as any
{ accountability: testAccountability, getSchema } as any,
);
expect(vi.mocked(NotificationsService).prototype.createMany).toHaveBeenCalledWith(
expect.arrayContaining([expect.objectContaining({ message: expected })])
expect.arrayContaining([expect.objectContaining({ message: expected })]),
);
});

View File

@@ -18,7 +18,7 @@ export default defineOperationApi<Options>({
handler: async (
{ recipient, subject, message, permissions, collection, item },
{ accountability, database, getSchema }
{ accountability, database, getSchema },
) => {
const schema = await getSchema({ database });
let customAccountability: Accountability | null;

View File

@@ -31,7 +31,7 @@ test('no headers configured', async () => {
method,
data: body,
headers: {},
})
}),
);
});
@@ -54,7 +54,7 @@ test('headers array is converted to object', async () => {
header1: 'value1',
header2: 'value2',
}),
})
}),
);
});
@@ -71,7 +71,7 @@ test('should not automatically set Content-Type header when it is already define
headers: expect.objectContaining({
'Content-Type': expect.not.stringContaining('application/json'),
}),
})
}),
);
});
@@ -88,7 +88,7 @@ test('should not automatically set Content-Type header to "application/json" whe
headers: expect.not.objectContaining({
'Content-Type': 'application/json',
}),
})
}),
);
});
@@ -106,6 +106,6 @@ test('should automatically set Content-Type header to "application/json" when th
header1: 'value1',
'Content-Type': 'application/json',
}),
})
}),
);
});

View File

@@ -16,10 +16,13 @@ export default defineOperationApi<Options>({
handler: async ({ url, method, body, headers }) => {
const customHeaders =
headers?.reduce((acc, { header, value }) => {
acc[header] = value;
return acc;
}, {} as Record<string, string>) ?? {};
headers?.reduce(
(acc, { header, value }) => {
acc[header] = value;
return acc;
},
{} as Record<string, string>,
) ?? {};
if (!customHeaders['Content-Type'] && (typeof body === 'object' || isValidJSON(body))) {
customHeaders['Content-Type'] = 'application/json';

View File

@@ -40,7 +40,7 @@ export default defineOperationApi<Options>({
const batchResults = await Promise.all(
batch.map((payload) => {
return flowManager.runOperationFlow(flow, payload, omit(context, 'data'));
})
}),
);
result.push(...batchResults);
@@ -53,7 +53,7 @@ export default defineOperationApi<Options>({
return await Promise.all(
payloadObject.map((payload) => {
return flowManager.runOperationFlow(flow, payload, omit(context, 'data'));
})
}),
);
}
}

View File

@@ -12,7 +12,7 @@ type IRateLimiterOptionsOverrides = Partial<IRateLimiterOptions> | Partial<IRate
export function createRateLimiter(
configPrefix = 'RATE_LIMITER',
configOverrides?: IRateLimiterOptionsOverrides
configOverrides?: IRateLimiterOptionsOverrides,
): RateLimiterAbstract {
switch (env['RATE_LIMITER_STORE']) {
case 'redis':
@@ -26,17 +26,17 @@ export function createRateLimiter(
function getConfig(
store: 'memory',
configPrefix: string,
overrides?: IRateLimiterOptionsOverrides
overrides?: IRateLimiterOptionsOverrides,
): IRateLimiterOptions;
function getConfig(
store: 'redis',
configPrefix: string,
overrides?: IRateLimiterOptionsOverrides
overrides?: IRateLimiterOptionsOverrides,
): IRateLimiterStoreOptions;
function getConfig(
store: 'memory' | 'redis' = 'memory',
configPrefix = 'RATE_LIMITER',
overrides?: IRateLimiterOptionsOverrides
overrides?: IRateLimiterOptionsOverrides,
): IRateLimiterOptions | IRateLimiterStoreOptions {
const config: any = getConfigFromEnv(`${configPrefix}_`, `${configPrefix}_${store}_`);

View File

@@ -137,7 +137,7 @@ export async function createServer(): Promise<http.Server> {
database: getDatabase(),
schema: null,
accountability: null,
}
},
);
if (env['NODE_ENV'] !== 'development') {
@@ -165,7 +165,7 @@ export async function startServer(): Promise<void> {
database: getDatabase(),
schema: null,
accountability: null,
}
},
);
})
.once('error', (err: any) => {

View File

@@ -62,10 +62,13 @@ export class ActivityService extends ItemsService {
filter: { id: { _in: mentions.map((mention) => mention.substring(1)) } },
});
const userPreviews = templateData.reduce((acc, user) => {
acc[user['id']] = `<em>${userName(user)}</em>`;
return acc;
}, {} as Record<string, string>);
const userPreviews = templateData.reduce(
(acc, user) => {
acc[user['id']] = `<em>${userName(user)}</em>`;
return acc;
},
{} as Record<string, string>,
);
let comment = data['comment'];

View File

@@ -38,7 +38,7 @@ export class AssetsService {
async getAsset(
id: string,
transformation?: TransformationSet,
range?: Range
range?: Range,
): Promise<{ stream: Readable; file: any; stat: Stat }> {
const storage = await getStorage();

View File

@@ -43,7 +43,7 @@ export class AuthenticationService {
async login(
providerName: string = DEFAULT_AUTH_PROVIDER,
payload: Record<string, any>,
otp?: string
otp?: string,
): Promise<LoginResult> {
const { nanoid } = await import('nanoid');
@@ -75,7 +75,7 @@ export class AuthenticationService {
'u.tfa_secret',
'u.provider',
'u.external_identifier',
'u.auth_data'
'u.auth_data',
)
.from('directus_users as u')
.leftJoin('directus_roles as r', 'u.role', 'r.id')
@@ -94,7 +94,7 @@ export class AuthenticationService {
database: this.knex,
schema: this.schema,
accountability: this.accountability,
}
},
);
const emitStatus = (status: 'fail' | 'success') => {
@@ -110,7 +110,7 @@ export class AuthenticationService {
database: this.knex,
schema: this.schema,
accountability: this.accountability,
}
},
);
};
@@ -197,7 +197,7 @@ export class AuthenticationService {
database: this.knex,
schema: this.schema,
accountability: this.accountability,
}
},
);
const accessToken = jwt.sign(customClaims, env['SECRET'] as string, {
@@ -368,7 +368,7 @@ export class AuthenticationService {
database: this.knex,
schema: this.schema,
accountability: this.accountability,
}
},
);
const accessToken = jwt.sign(customClaims, env['SECRET'] as string, {
@@ -410,7 +410,7 @@ export class AuthenticationService {
'u.role',
'u.provider',
'u.external_identifier',
'u.auth_data'
'u.auth_data',
)
.from('directus_sessions as s')
.innerJoin('directus_users as u', 's.user', 'u.id')
@@ -439,7 +439,7 @@ export class AuthenticationService {
'role',
'provider',
'external_identifier',
'auth_data'
'auth_data',
)
.from('directus_users')
.where('id', userID)

View File

@@ -56,7 +56,7 @@ export class AuthorizationService {
collectionsRequested.map(({ collection }) => collection).includes(permission.collection)
);
}),
(curr, prev) => curr.collection === prev.collection && curr.action === prev.action && curr.role === prev.role
(curr, prev) => curr.collection === prev.collection && curr.action === prev.action && curr.role === prev.role,
) ?? [];
// If the permissions don't match the collections, you don't have permission to read all of them
@@ -123,7 +123,7 @@ export class AuthorizationService {
function checkFields(
collection: string,
children: (NestedCollectionNode | FieldNode | FunctionFieldNode)[],
aggregate?: Aggregate | null
aggregate?: Aggregate | null,
) {
// We check the availability of the permissions in the step before this is run
const permissions = permissionsForCollections.find((permission) => permission.collection === collection)!;
@@ -161,7 +161,7 @@ export class AuthorizationService {
ast: AST | NestedCollectionNode | FieldNode | FunctionFieldNode,
schema: SchemaOverview,
action: PermissionsAction,
accountability: Accountability | null
accountability: Accountability | null,
) {
let requiredFieldPermissions: Record<string, Set<string>> = {};
@@ -170,7 +170,7 @@ export class AuthorizationService {
for (const collection of Object.keys(ast.children)) {
requiredFieldPermissions = mergeRequiredFieldPermissions(
requiredFieldPermissions,
extractRequiredFieldPermissions(collection, ast.query?.[collection]?.filter ?? {})
extractRequiredFieldPermissions(collection, ast.query?.[collection]?.filter ?? {}),
);
for (const child of ast.children[collection]!) {
@@ -180,7 +180,7 @@ export class AuthorizationService {
//Only add relational field if deep child has a filter
if (child.type !== 'field') {
(requiredFieldPermissions[collection] || (requiredFieldPermissions[collection] = new Set())).add(
child.fieldKey
child.fieldKey,
);
}
@@ -191,7 +191,7 @@ export class AuthorizationService {
} else {
requiredFieldPermissions = mergeRequiredFieldPermissions(
requiredFieldPermissions,
extractRequiredFieldPermissions(ast.name, ast.query?.filter ?? {})
extractRequiredFieldPermissions(ast.name, ast.query?.filter ?? {}),
);
for (const child of ast.children) {
@@ -201,7 +201,7 @@ export class AuthorizationService {
// Only add relational field if deep child has a filter
if (child.type !== 'field') {
(requiredFieldPermissions[ast.name] || (requiredFieldPermissions[ast.name] = new Set())).add(
child.fieldKey
child.fieldKey,
);
}
@@ -222,7 +222,7 @@ export class AuthorizationService {
collection: string,
filter: Filter,
parentCollection?: string,
parentField?: string
parentField?: string,
) {
return reduce(
filter,
@@ -235,7 +235,7 @@ export class AuthorizationService {
collection,
filter,
parentCollection,
parentField
parentField,
);
result = mergeRequiredFieldPermissions(result, requiredPermissions);
@@ -325,7 +325,7 @@ export class AuthorizationService {
'',
filter,
parentCollection,
filterKey
filterKey,
);
result = mergeRequiredFieldPermissions(result, requiredPermissions);
@@ -337,7 +337,7 @@ export class AuthorizationService {
'',
filterValue,
parentCollection,
filterKey
filterKey,
);
result = mergeRequiredFieldPermissions(result, requiredPermissions);
@@ -348,7 +348,7 @@ export class AuthorizationService {
return result;
},
{}
{},
);
}
@@ -369,13 +369,13 @@ export class AuthorizationService {
schema: SchemaOverview,
action: PermissionsAction,
requiredPermissions: Record<string, Set<string>>,
aliasMap?: Record<string, string> | null
aliasMap?: Record<string, string> | null,
) {
if (accountability?.admin === true) return;
for (const collection of Object.keys(requiredPermissions)) {
const permission = accountability?.permissions?.find(
(permission) => permission.collection === collection && permission.action === 'read'
(permission) => permission.collection === collection && permission.action === 'read',
);
let allowedFields: string[];
@@ -383,7 +383,7 @@ export class AuthorizationService {
// Allow the filtering of top level ID for actions such as update and delete
if (action !== 'read' && collection === rootCollection) {
const actionPermission = accountability?.permissions?.find(
(permission) => permission.collection === collection && permission.action === action
(permission) => permission.collection === collection && permission.action === action,
);
if (!actionPermission || !actionPermission.fields) {
@@ -422,7 +422,7 @@ export class AuthorizationService {
function applyFilters(
ast: AST | NestedCollectionNode | FieldNode | FunctionFieldNode,
accountability: Accountability | null
accountability: Accountability | null,
): AST | NestedCollectionNode | FieldNode | FunctionFieldNode {
if (ast.type === 'functionField') {
const collection = ast.relatedCollection;
@@ -579,9 +579,9 @@ export class AuthorizationService {
validationErrors.push(
...flatten(
validatePayload(permission.validation!, payloadWithPresets).map((error) =>
error.details.map((details) => new FailedValidationError(joiValidationErrorItemToErrorExtensions(details)))
)
)
error.details.map((details) => new FailedValidationError(joiValidationErrorItemToErrorExtensions(details))),
),
),
);
if (validationErrors.length > 0) throw validationErrors;

View File

@@ -156,14 +156,14 @@ export class CollectionsService {
if (sortedFieldPayloads.length < fieldPayloads.length) {
const fieldsWithGroups = groupBy(
fieldPayloads.filter((field) => field?.group),
(field) => field?.group
(field) => field?.group,
);
// The sort order is restarted from 1 for fields in each group and appended to sortedFieldPayloads.
// Lodash merge is used so that the "sort" can be overridden if defined.
for (const [_group, fields] of Object.entries(fieldsWithGroups)) {
sortedFieldPayloads = sortedFieldPayloads.concat(
fields.map((field, index) => merge({ sort: index + 1 }, field))
fields.map((field, index) => merge({ sort: index + 1 }, field)),
);
}
}
@@ -190,7 +190,7 @@ export class CollectionsService {
{
bypassEmitAction: (params) =>
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
}
},
);
}
@@ -292,7 +292,7 @@ export class CollectionsService {
...meta,
[item.collection]: item.group,
}),
{}
{},
);
let collectionsYouHavePermissionToRead: string[] = this.accountability
@@ -426,7 +426,7 @@ export class CollectionsService {
...opts,
bypassEmitAction: (params) =>
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
}
},
);
}
@@ -619,7 +619,7 @@ export class CollectionsService {
if (revisionsToDelete.length > 0) {
const chunks = chunk(
revisionsToDelete.map((record) => record.id),
10000
10000,
);
for (const keys of chunks) {

View File

@@ -85,7 +85,7 @@ export class FieldsService {
...column,
default_value: getDefaultValue(
column,
fields.find((field) => field.collection === column.table && field.field === column.name)
fields.find((field) => field.collection === column.table && field.field === column.name),
),
}));
@@ -148,7 +148,7 @@ export class FieldsService {
const knownCollections = Object.keys(this.schema.collections);
const result = [...columnsWithSystem, ...aliasFieldsAsField].filter((field) =>
knownCollections.includes(field.collection)
knownCollections.includes(field.collection),
);
// Filter the result so we only return the fields you have read access to
@@ -250,7 +250,7 @@ export class FieldsService {
collection: string,
field: Partial<Field> & { field: string; type: Type | null },
table?: Knex.CreateTableBuilder, // allows collection creation to
opts?: MutationOptions
opts?: MutationOptions,
): Promise<void> {
if (this.accountability && this.accountability.admin !== true) {
throw new ForbiddenError();
@@ -263,7 +263,7 @@ export class FieldsService {
const exists =
field.field in this.schema.collections[collection]!.fields ||
isNil(
await this.knex.select('id').from('directus_fields').where({ collection, field: field.field }).first()
await this.knex.select('id').from('directus_fields').where({ collection, field: field.field }).first(),
) === false;
// Check if field already exists, either as a column, or as a row in directus_fields
@@ -297,7 +297,7 @@ export class FieldsService {
database: trx,
schema: this.schema,
accountability: this.accountability,
}
},
);
if (hookAdjustedField.type && ALIAS_TYPES.includes(hookAdjustedField.type) === false) {
@@ -325,7 +325,7 @@ export class FieldsService {
collection: collection,
field: hookAdjustedField.field,
},
{ emitEvents: false }
{ emitEvents: false },
);
}
@@ -393,7 +393,7 @@ export class FieldsService {
database: this.knex,
schema: this.schema,
accountability: this.accountability,
}
},
);
const record = field.meta
@@ -437,7 +437,7 @@ export class FieldsService {
collection: collection,
field: hookAdjustedField.field,
},
{ emitEvents: false }
{ emitEvents: false },
);
} else {
await this.itemsService.createOne(
@@ -446,7 +446,7 @@ export class FieldsService {
collection: collection,
field: hookAdjustedField.field,
},
{ emitEvents: false }
{ emitEvents: false },
);
}
}
@@ -515,7 +515,7 @@ export class FieldsService {
database: this.knex,
schema: this.schema,
accountability: this.accountability,
}
},
);
await this.knex.transaction(async (trx) => {

View File

@@ -40,7 +40,7 @@ export class FilesService extends ItemsService {
stream: BusboyFileStream | Readable,
data: Partial<File> & { storage: string },
primaryKey?: PrimaryKey,
opts?: MutationOptions
opts?: MutationOptions,
): Promise<PrimaryKey> {
const storage = await getStorage();
@@ -94,9 +94,8 @@ export class FilesService extends ItemsService {
// Used to clean up if something goes wrong
const cleanUp = async () => {
try {
if (isReplacement === true ){
if (isReplacement === true) {
// If this is a replacement that failed, we need to delete the temp file
await disk.delete(tempFilenameDisk);
} else {
@@ -107,9 +106,8 @@ export class FilesService extends ItemsService {
// delete the final file
await disk.delete(payload.filename_disk!);
}
} catch (err: any) {
if (isReplacement === true ){
if (isReplacement === true) {
logger.warn(`Couldn't delete temp file ${tempFilenameDisk}`);
} else {
logger.warn(`Couldn't delete file ${payload.filename_disk}`);
@@ -213,7 +211,7 @@ export class FilesService extends ItemsService {
database: this.knex,
schema: this.schema,
accountability: this.accountability,
}
},
);
}
@@ -334,7 +332,7 @@ export class FilesService extends ItemsService {
}
resolve(metadata);
})
}),
);
});
}
@@ -344,7 +342,7 @@ export class FilesService extends ItemsService {
*/
async importOne(importURL: string, body: Partial<File>): Promise<PrimaryKey> {
const fileCreatePermissions = this.accountability?.permissions?.find(
(permission) => permission.collection === 'directus_files' && permission.action === 'create'
(permission) => permission.collection === 'directus_files' && permission.action === 'create',
);
if (this.accountability && this.accountability?.admin !== true && !fileCreatePermissions) {

Some files were not shown because too many files have changed in this diff Show More