mirror of
https://github.com/directus/directus.git
synced 2026-04-25 03:00:53 -04:00
Collection export limit 0 leads to 500 error (#16722)
* prevent 500 error when limit=0 * short circuit parser instead * removed unneeded comment * setting a sensible minimum and step for export limit * fixed notif message when exporting limit=-1 * re-added !exportSettings.limit fallback behavior * guard against negative limit values lower than -1 * add unit test for sanitizeQuery * remove unnecessary test cases because offset and page are validated in validateQuery * delegate validation to validateQuery instead * use sanitizeQuery util for export * Added check for integers Co-authored-by: ian <licitdev@gmail.com> * validate instead of sanitize * removed obsolete test * added integer checks for limit, offset, page and sensible minimums Co-authored-by: Azri Kahar <42867097+azrikahar@users.noreply.github.com> Co-authored-by: ian <licitdev@gmail.com> Co-authored-by: Rijk van Zanten <rijkvanzanten@me.com>
This commit is contained in:
@@ -14,6 +14,7 @@ import asyncHandler from '../utils/async-handler';
|
||||
import Busboy from 'busboy';
|
||||
import { flushCaches } from '../cache';
|
||||
import { generateHash } from '../utils/generate-hash';
|
||||
import { sanitizeQuery } from '../utils/sanitize-query';
|
||||
|
||||
const router = Router();
|
||||
|
||||
@@ -154,8 +155,10 @@ router.post(
|
||||
schema: req.schema,
|
||||
});
|
||||
|
||||
const sanitizedQuery = sanitizeQuery(req.body.query, req.accountability ?? null);
|
||||
|
||||
// We're not awaiting this, as it's supposed to run async in the background
|
||||
service.exportToFile(req.params.collection, req.body.query, req.body.format, {
|
||||
service.exportToFile(req.params.collection, sanitizedQuery, req.body.format, {
|
||||
file: req.body.file,
|
||||
});
|
||||
|
||||
|
||||
@@ -339,6 +339,7 @@ export class ExportService {
|
||||
}
|
||||
|
||||
if (format === 'csv') {
|
||||
if (input.length === 0) return '';
|
||||
const parser = new CSVParser({
|
||||
transforms: [CSVTransforms.flatten({ separator: '.' })],
|
||||
header: options?.includeHeader !== false,
|
||||
|
||||
287
api/src/utils/sanitize-query.test.ts
Normal file
287
api/src/utils/sanitize-query.test.ts
Normal file
@@ -0,0 +1,287 @@
|
||||
import { describe, expect, test, vi } from 'vitest';
|
||||
|
||||
import { sanitizeQuery } from './sanitize-query';
|
||||
|
||||
vi.mock('@directus/shared/utils', async () => {
|
||||
const actual = (await vi.importActual('@directus/shared/utils')) as any;
|
||||
|
||||
return {
|
||||
...actual,
|
||||
parseFilter: vi.fn().mockImplementation((value) => value),
|
||||
};
|
||||
});
|
||||
|
||||
describe('limit', () => {
|
||||
test.each([-1, 0, 100])('should accept number %i', (limit) => {
|
||||
const sanitizedQuery = sanitizeQuery({ limit });
|
||||
|
||||
expect(sanitizedQuery.limit).toBe(limit);
|
||||
});
|
||||
|
||||
test('should accept string 1', () => {
|
||||
const limit = '1';
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ limit });
|
||||
|
||||
expect(sanitizedQuery.limit).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('fields', () => {
|
||||
test('should accept valid value', () => {
|
||||
const fields = ['field_a', 'field_b'];
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ fields });
|
||||
|
||||
expect(sanitizedQuery.fields).toEqual(['field_a', 'field_b']);
|
||||
});
|
||||
|
||||
test('should split as csv when it is a string', () => {
|
||||
const fields = 'field_a,field_b';
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ fields });
|
||||
|
||||
expect(sanitizedQuery.fields).toEqual(['field_a', 'field_b']);
|
||||
});
|
||||
|
||||
test('should split as nested csv when it is an array', () => {
|
||||
const fields = ['field_a,field_b', 'field_c'];
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ fields });
|
||||
|
||||
expect(sanitizedQuery.fields).toEqual(['field_a', 'field_b', 'field_c']);
|
||||
});
|
||||
|
||||
test('should trim', () => {
|
||||
const fields = [' field_a '];
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ fields });
|
||||
|
||||
expect(sanitizedQuery.fields).toEqual(['field_a']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('group', () => {
|
||||
test('should accept valid value', () => {
|
||||
const groupBy = ['group_a', 'group_b'];
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ groupBy });
|
||||
|
||||
expect(sanitizedQuery.group).toEqual(['group_a', 'group_b']);
|
||||
});
|
||||
|
||||
test('should split as csv when it is a string', () => {
|
||||
const groupBy = 'group_a,group_b';
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ groupBy });
|
||||
|
||||
expect(sanitizedQuery.group).toEqual(['group_a', 'group_b']);
|
||||
});
|
||||
|
||||
test('should split as nested csv when it is an array', () => {
|
||||
const groupBy = ['group_a,group_b', 'group_c'];
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ groupBy });
|
||||
|
||||
expect(sanitizedQuery.group).toEqual(['group_a', 'group_b', 'group_c']);
|
||||
});
|
||||
|
||||
test('should trim', () => {
|
||||
const groupBy = [' group_a '];
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ groupBy });
|
||||
|
||||
expect(sanitizedQuery.group).toEqual(['group_a']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('aggregate', () => {
|
||||
test('should accept valid value', () => {
|
||||
const aggregate = { count: '*' };
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ aggregate });
|
||||
|
||||
expect(sanitizedQuery.aggregate).toEqual({ count: ['*'] });
|
||||
});
|
||||
|
||||
test('should parse as json when it is a string', () => {
|
||||
const aggregate = '{ "count": "*" }';
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ aggregate });
|
||||
|
||||
expect(sanitizedQuery.aggregate).toEqual({ count: ['*'] });
|
||||
});
|
||||
});
|
||||
|
||||
describe('sort', () => {
|
||||
test('should accept valid value', () => {
|
||||
const sort = ['field_a', 'field_b'];
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ sort });
|
||||
|
||||
expect(sanitizedQuery.sort).toEqual(['field_a', 'field_b']);
|
||||
});
|
||||
|
||||
test('should split as csv when it is a string', () => {
|
||||
const sort = 'field_a,field_b';
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ sort });
|
||||
|
||||
expect(sanitizedQuery.sort).toEqual(['field_a', 'field_b']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('filter', () => {
|
||||
test('should accept valid value', () => {
|
||||
const filter = { field_a: { _eq: 'test' } };
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ filter });
|
||||
|
||||
expect(sanitizedQuery.filter).toEqual({ field_a: { _eq: 'test' } });
|
||||
});
|
||||
|
||||
test('should parse as json when it is a string', () => {
|
||||
const filter = '{ "field_a": { "_eq": "test" } }';
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ filter });
|
||||
|
||||
expect(sanitizedQuery.filter).toEqual({ field_a: { _eq: 'test' } });
|
||||
});
|
||||
});
|
||||
|
||||
describe('offset', () => {
|
||||
test('should accept number 1', () => {
|
||||
const offset = 1;
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ offset });
|
||||
|
||||
expect(sanitizedQuery.offset).toBe(1);
|
||||
});
|
||||
|
||||
test('should accept string 1', () => {
|
||||
const offset = '1';
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ offset });
|
||||
|
||||
expect(sanitizedQuery.offset).toBe(1);
|
||||
});
|
||||
|
||||
test('should ignore zero', () => {
|
||||
const offset = 0;
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ offset });
|
||||
|
||||
expect(sanitizedQuery.offset).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('page', () => {
|
||||
test('should accept number 1', () => {
|
||||
const page = 1;
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ page });
|
||||
|
||||
expect(sanitizedQuery.page).toBe(1);
|
||||
});
|
||||
|
||||
test('should accept string 1', () => {
|
||||
const page = '1';
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ page });
|
||||
|
||||
expect(sanitizedQuery.page).toBe(1);
|
||||
});
|
||||
|
||||
test('should ignore zero', () => {
|
||||
const page = 0;
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ page });
|
||||
|
||||
expect(sanitizedQuery.page).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('meta', () => {
|
||||
test.each([
|
||||
{ input: '*', expected: ['total_count', 'filter_count'] },
|
||||
{ input: 'total_count', expected: ['total_count'] },
|
||||
{ input: 'total_count,filter_count', expected: ['total_count', 'filter_count'] },
|
||||
{ input: ['total_count', 'filter_count'], expected: ['total_count', 'filter_count'] },
|
||||
])('should accept $input', ({ input, expected }) => {
|
||||
const sanitizedQuery = sanitizeQuery({ meta: input }) as any;
|
||||
|
||||
expect(sanitizedQuery.meta).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('search', () => {
|
||||
test('should accept valid value', () => {
|
||||
const search = 'test';
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ search });
|
||||
|
||||
expect(sanitizedQuery.search).toBe('test');
|
||||
});
|
||||
|
||||
test('should ignore non-string', () => {
|
||||
const search = ['test'];
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ search });
|
||||
|
||||
expect(sanitizedQuery.search).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('export', () => {
|
||||
test('should accept valid value', () => {
|
||||
const format = 'json';
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ export: format });
|
||||
|
||||
expect(sanitizedQuery.export).toBe('json');
|
||||
});
|
||||
});
|
||||
|
||||
describe('deep', () => {
|
||||
test('should accept valid value', () => {
|
||||
const deep = { deep: { relational_field: { _sort: ['name'] } } };
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ deep });
|
||||
|
||||
expect(sanitizedQuery.deep).toEqual({ deep: { relational_field: { _sort: ['name'] } } });
|
||||
});
|
||||
|
||||
test('should parse as json when it is a string', () => {
|
||||
const deep = { deep: { relational_field: { _sort: ['name'] } } };
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ deep });
|
||||
|
||||
expect(sanitizedQuery.deep).toEqual({ deep: { relational_field: { _sort: ['name'] } } });
|
||||
});
|
||||
|
||||
test('should ignore non-underscore-prefixed queries', () => {
|
||||
const deep = { deep: { relational_field_a: { _sort: ['name'] }, relational_field_b: { sort: ['name'] } } };
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ deep });
|
||||
|
||||
expect(sanitizedQuery.deep).toEqual({ deep: { relational_field_a: { _sort: ['name'] } } });
|
||||
});
|
||||
});
|
||||
|
||||
describe('alias', () => {
|
||||
test('should accept valid value', () => {
|
||||
const alias = { field_a: 'testField' };
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ alias });
|
||||
|
||||
expect(sanitizedQuery.alias).toEqual({ field_a: 'testField' });
|
||||
});
|
||||
|
||||
test('should parse as json when it is a string', () => {
|
||||
const alias = '{ "field_a": "testField" }';
|
||||
|
||||
const sanitizedQuery = sanitizeQuery({ alias });
|
||||
|
||||
expect(sanitizedQuery.alias).toEqual({ field_a: 'testField' });
|
||||
});
|
||||
});
|
||||
@@ -11,9 +11,9 @@ const querySchema = Joi.object({
|
||||
group: Joi.array().items(Joi.string()),
|
||||
sort: Joi.array().items(Joi.string()),
|
||||
filter: Joi.object({}).unknown(),
|
||||
limit: Joi.number(),
|
||||
offset: Joi.number(),
|
||||
page: Joi.number(),
|
||||
limit: Joi.number().integer().min(-1),
|
||||
offset: Joi.number().integer().min(1),
|
||||
page: Joi.number().integer().min(0),
|
||||
meta: Joi.array().items(Joi.string().valid('total_count', 'filter_count')),
|
||||
search: Joi.string(),
|
||||
export: Joi.string().valid('json', 'csv', 'xml'),
|
||||
|
||||
Reference in New Issue
Block a user