mirror of
https://github.com/directus/directus.git
synced 2026-01-28 13:17:56 -05:00
* Step 1 * Step 2 * False sense of confidence * Couple more before dinner * Update schema package * Update format-title * Upgrade specs file * Close * Replace ts-node-dev with tsx, and various others * Replace lodash with lodash-es * Add lodash-es types * Update knex import * More fun is had * FSE * Consolidate repos * Various tweaks and fixes * Fix specs * Remove dependency on knex-schema-inspector * Fix wrong imports of inspector * Move shared exceptions to new package * Move constants to separate module * Move types to new types package * Use directus/types * I believe this is no longer needed * [WIP] Start moving utils to esm * ESMify Shared * Move shared utils to @directus/utils * Use @directus/utils instead of @directus/shared/utils * It runs! * Use correct schemaoverview type * Fix imports * Fix the thing * Start on new update-checker lib * Use new update-check package * Swap out directus/shared in app * Pushing through the last bits now * Dangerously make extensions SDK ESM * Use @directus/types in tests * Copy util function to test * Fix linter config * Add missing import * Hot takes * Fix build * Curse these default exports * No tests in constants * Add tests * Remove tests from types * Add tests for exceptions * Fix test * Fix app tests * Fix import in test * Fix various tests * Fix specs export * Some more tests * Remove broken integration tests These were broken beyond repair.. They were also written before we really knew what we we're doing with tests, so I think it's better to say goodbye and start over with these * Regenerate lockfile * Fix imports from merge * I create my own problems * Make sharp play nice * Add vitest config * Install missing blackbox dep * Consts shouldn't be in types tsk tsk tsk tsk * Fix type/const usage in extensions-sdk * cursed.default * Reduce circular deps * Fix circular dep in items service * vvv * Trigger testing for all vendors * Add workaround for rollup * Prepend the file protocol for the ESM loader to be compatible with Windows "WARN: Only URLs with a scheme in: file and data are supported by the default ESM loader. On Windows, absolute paths must be valid file:// URLs. Received protocol 'c:'" * Fix postgres * Schema package updates Co-authored-by: Azri Kahar <42867097+azrikahar@users.noreply.github.com> * Resolve cjs/mjs extensions * Clean-up eslint config * fixed extension concatination * using string interpolation for consistency * Revert MySQL optimisation * Revert testing for all vendors * Replace tsx with esbuild-kit/esm-loader Is a bit faster and we can rely on the built-in `watch` and `inspect` functionalities of Node.js Note: The possibility to watch other files (.env in our case) might be added in the future, see https://github.com/nodejs/node/issues/45467 * Use exact version for esbuild-kit/esm-loader * Fix import --------- Co-authored-by: ian <licitdev@gmail.com> Co-authored-by: Brainslug <tim@brainslug.nl> Co-authored-by: Azri Kahar <42867097+azrikahar@users.noreply.github.com> Co-authored-by: Pascal Jufer <pascal-jufer@bluewin.ch>
264 lines
7.8 KiB
TypeScript
264 lines
7.8 KiB
TypeScript
import config, { getUrl } from '@common/config';
|
|
import vendors from '@common/get-dbs-to-test';
|
|
import * as common from '@common/index';
|
|
import { TestLogger } from '@common/test-logger';
|
|
import { awaitDirectusConnection } from '@utils/await-connection';
|
|
import { ChildProcess, spawn } from 'child_process';
|
|
import { EnumType } from 'json-to-graphql-query';
|
|
import knex from 'knex';
|
|
import type { Knex } from 'knex';
|
|
import { cloneDeep } from 'lodash';
|
|
import request from 'supertest';
|
|
|
|
describe('Logger Redact Tests', () => {
|
|
const databases = new Map<string, Knex>();
|
|
const directusInstances = {} as { [vendor: string]: ChildProcess };
|
|
const env = cloneDeep(config.envs);
|
|
const authModes = ['json', 'cookie'];
|
|
|
|
for (const vendor of vendors) {
|
|
env[vendor].LOG_STYLE = 'raw';
|
|
env[vendor].LOG_LEVEL = 'info';
|
|
env[vendor].PORT = String(Number(env[vendor]!.PORT) + 500);
|
|
}
|
|
|
|
beforeAll(async () => {
|
|
const promises = [];
|
|
|
|
for (const vendor of vendors) {
|
|
databases.set(vendor, knex(config.knexConfig[vendor]!));
|
|
|
|
const server = spawn('node', ['api/cli', 'start'], { env: env[vendor] });
|
|
directusInstances[vendor] = server;
|
|
|
|
promises.push(awaitDirectusConnection(Number(env[vendor].PORT)));
|
|
}
|
|
|
|
// Give the server some time to start
|
|
await Promise.all(promises);
|
|
}, 180000);
|
|
|
|
afterAll(async () => {
|
|
for (const [vendor, connection] of databases) {
|
|
directusInstances[vendor]!.kill();
|
|
|
|
await connection.destroy();
|
|
}
|
|
});
|
|
|
|
describe('POST /refresh', () => {
|
|
describe('refreshes with refresh_token in the body', () => {
|
|
describe.each(authModes)('for %s mode', (mode) => {
|
|
common.TEST_USERS.forEach((userKey) => {
|
|
describe(common.USER[userKey].NAME, () => {
|
|
it.each(vendors)('%s', async (vendor) => {
|
|
// Setup
|
|
const refreshToken = (
|
|
await request(getUrl(vendor, env))
|
|
.post(`/auth/login`)
|
|
.send({ email: common.USER[userKey].EMAIL, password: common.USER[userKey].PASSWORD })
|
|
.expect('Content-Type', /application\/json/)
|
|
).body.data.refresh_token;
|
|
|
|
const refreshToken2 = (
|
|
await common.requestGraphQL(getUrl(vendor, env), true, null, {
|
|
mutation: {
|
|
auth_login: {
|
|
__args: {
|
|
email: common.USER[userKey].EMAIL,
|
|
password: common.USER[userKey].PASSWORD,
|
|
},
|
|
refresh_token: true,
|
|
},
|
|
},
|
|
})
|
|
).body.data.auth_login.refresh_token;
|
|
|
|
// Action
|
|
const logger = new TestLogger(directusInstances[vendor], '/auth/refresh', true);
|
|
|
|
const response = await request(getUrl(vendor, env))
|
|
.post(`/auth/refresh`)
|
|
.send({ refresh_token: refreshToken, mode })
|
|
.expect('Content-Type', /application\/json/);
|
|
|
|
const logs = await logger.getLogs();
|
|
|
|
const loggerGql = new TestLogger(directusInstances[vendor], '/graphql/system', true);
|
|
|
|
const mutationKey = 'auth_refresh';
|
|
|
|
const gqlResponse = await common.requestGraphQL(getUrl(vendor, env), true, null, {
|
|
mutation: {
|
|
[mutationKey]: {
|
|
__args: {
|
|
refresh_token: refreshToken2,
|
|
mode: new EnumType(mode),
|
|
},
|
|
access_token: true,
|
|
expires: true,
|
|
refresh_token: true,
|
|
},
|
|
},
|
|
});
|
|
|
|
const logsGql = await loggerGql.getLogs();
|
|
|
|
// Assert
|
|
expect(response.statusCode).toBe(200);
|
|
if (mode === 'cookie') {
|
|
expect(response.body).toMatchObject({
|
|
data: {
|
|
access_token: expect.any(String),
|
|
expires: expect.any(Number),
|
|
},
|
|
});
|
|
|
|
for (const log of [logs, logsGql]) {
|
|
expect((log.match(/"cookie":"--redact--"/g) || []).length).toBe(0);
|
|
expect((log.match(/"set-cookie":"--redact--"/g) || []).length).toBe(1);
|
|
}
|
|
} else {
|
|
expect(response.body).toMatchObject({
|
|
data: {
|
|
access_token: expect.any(String),
|
|
expires: expect.any(Number),
|
|
refresh_token: expect.any(String),
|
|
},
|
|
});
|
|
|
|
for (const log of [logs, logsGql]) {
|
|
expect((log.match(/"cookie":"--redact--"/g) || []).length).toBe(0);
|
|
expect((log.match(/"set-cookie":"--redact--"/g) || []).length).toBe(0);
|
|
}
|
|
}
|
|
|
|
expect(gqlResponse.statusCode).toBe(200);
|
|
expect(gqlResponse.body).toMatchObject({
|
|
data: {
|
|
[mutationKey]: {
|
|
access_token: expect.any(String),
|
|
expires: expect.any(String),
|
|
refresh_token: expect.any(String),
|
|
},
|
|
},
|
|
});
|
|
});
|
|
});
|
|
});
|
|
});
|
|
});
|
|
|
|
describe('refreshes with refresh_token in the cookie', () => {
|
|
describe.each(authModes)('for %s mode', (mode) => {
|
|
common.TEST_USERS.forEach((userKey) => {
|
|
describe(common.USER[userKey].NAME, () => {
|
|
it.each(vendors)('%s', async (vendor) => {
|
|
// Setup
|
|
const cookieName = 'directus_refresh_token';
|
|
|
|
const refreshToken = (
|
|
await request(getUrl(vendor, env))
|
|
.post(`/auth/login`)
|
|
.send({ email: common.USER[userKey].EMAIL, password: common.USER[userKey].PASSWORD })
|
|
.expect('Content-Type', /application\/json/)
|
|
).body.data.refresh_token;
|
|
|
|
const refreshToken2 = (
|
|
await common.requestGraphQL(getUrl(vendor, env), true, null, {
|
|
mutation: {
|
|
auth_login: {
|
|
__args: {
|
|
email: common.USER[userKey].EMAIL,
|
|
password: common.USER[userKey].PASSWORD,
|
|
},
|
|
refresh_token: true,
|
|
},
|
|
},
|
|
})
|
|
).body.data.auth_login.refresh_token;
|
|
|
|
// Action
|
|
const logger = new TestLogger(directusInstances[vendor], '/auth/refresh', true);
|
|
|
|
const response = await request(getUrl(vendor, env))
|
|
.post(`/auth/refresh`)
|
|
.set('Cookie', `${cookieName}=${refreshToken}`)
|
|
.send({ mode })
|
|
.expect('Content-Type', /application\/json/);
|
|
|
|
const logs = await logger.getLogs();
|
|
|
|
const loggerGql = new TestLogger(directusInstances[vendor], '/graphql/system', true);
|
|
|
|
const mutationKey = 'auth_refresh';
|
|
|
|
const gqlResponse = await common.requestGraphQL(
|
|
getUrl(vendor, env),
|
|
true,
|
|
null,
|
|
{
|
|
mutation: {
|
|
[mutationKey]: {
|
|
__args: {
|
|
refresh_token: refreshToken2,
|
|
mode: new EnumType(mode),
|
|
},
|
|
access_token: true,
|
|
expires: true,
|
|
refresh_token: true,
|
|
},
|
|
},
|
|
},
|
|
{ cookies: [`${cookieName}=${refreshToken2}`] }
|
|
);
|
|
|
|
const logsGql = await loggerGql.getLogs();
|
|
|
|
// Assert
|
|
expect(response.statusCode).toBe(200);
|
|
if (mode === 'cookie') {
|
|
expect(response.body).toMatchObject({
|
|
data: {
|
|
access_token: expect.any(String),
|
|
expires: expect.any(Number),
|
|
},
|
|
});
|
|
|
|
for (const log of [logs, logsGql]) {
|
|
expect((log.match(/"cookie":"--redact--"/g) || []).length).toBe(1);
|
|
expect((log.match(/"set-cookie":"--redact--"/g) || []).length).toBe(1);
|
|
}
|
|
} else {
|
|
expect(response.body).toMatchObject({
|
|
data: {
|
|
access_token: expect.any(String),
|
|
expires: expect.any(Number),
|
|
refresh_token: expect.any(String),
|
|
},
|
|
});
|
|
|
|
for (const log of [logs, logsGql]) {
|
|
expect((log.match(/"cookie":"--redact--"/g) || []).length).toBe(1);
|
|
expect((log.match(/"set-cookie":"--redact--"/g) || []).length).toBe(0);
|
|
}
|
|
}
|
|
|
|
expect(gqlResponse.statusCode).toBe(200);
|
|
expect(gqlResponse.body).toMatchObject({
|
|
data: {
|
|
[mutationKey]: {
|
|
access_token: expect.any(String),
|
|
expires: expect.any(String),
|
|
refresh_token: expect.any(String),
|
|
},
|
|
},
|
|
});
|
|
});
|
|
});
|
|
});
|
|
});
|
|
});
|
|
});
|
|
});
|