mirror of
https://github.com/directus/directus.git
synced 2026-01-22 12:28:01 -05:00
Add max batch mutation (#17535)
Co-authored-by: Brainslug <br41nslug@users.noreply.github.com> Co-authored-by: Pascal Jufer <pascal-jufer@bluewin.ch>
This commit is contained in:
@@ -43,6 +43,9 @@ PUBLIC_URL="/"
|
||||
# Whether or not to enable GraphQL Introspection [true]
|
||||
# GRAPHQL_INTROSPECTION=true
|
||||
|
||||
# The maximum number of items for batch mutations when creating, updating and deleting. ["Infinity"]
|
||||
# MAX_BATCH_MUTATION="Infinity"
|
||||
|
||||
####################################################################################################
|
||||
### Database
|
||||
|
||||
|
||||
@@ -26,6 +26,7 @@ const allowedEnvironmentVars = [
|
||||
'ROOT_REDIRECT',
|
||||
'SERVE_APP',
|
||||
'GRAPHQL_INTROSPECTION',
|
||||
'MAX_BATCH_MUTATION',
|
||||
'LOGGER_.+',
|
||||
'ROBOTS_TXT',
|
||||
// server
|
||||
@@ -201,6 +202,7 @@ const defaults: Record<string, any> = {
|
||||
PUBLIC_URL: '/',
|
||||
MAX_PAYLOAD_SIZE: '1mb',
|
||||
MAX_RELATIONAL_DEPTH: 10,
|
||||
MAX_BATCH_MUTATION: Infinity,
|
||||
ROBOTS_TXT: 'User-agent: *\nDisallow: /',
|
||||
|
||||
DB_EXCLUDE_TABLES: 'spatial_ref_sys,sysdiagrams',
|
||||
@@ -312,6 +314,8 @@ const typeMap: Record<string, string> = {
|
||||
|
||||
GRAPHQL_INTROSPECTION: 'boolean',
|
||||
|
||||
MAX_BATCH_MUTATION: 'number',
|
||||
|
||||
SERVER_SHUTDOWN_TIMEOUT: 'number',
|
||||
};
|
||||
|
||||
|
||||
@@ -143,6 +143,7 @@ export class CollectionsService {
|
||||
await fieldItemsService.createMany(fieldPayloads, {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
bypassLimits: true,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -29,6 +29,11 @@ export type QueryOptions = {
|
||||
emitEvents?: boolean;
|
||||
};
|
||||
|
||||
export type MutationTracker = {
|
||||
trackMutations: (count: number) => void;
|
||||
getCount: () => number;
|
||||
};
|
||||
|
||||
export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractService {
|
||||
collection: string;
|
||||
knex: Knex;
|
||||
@@ -48,6 +53,22 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
return this;
|
||||
}
|
||||
|
||||
createMutationTracker(initialCount = 0): MutationTracker {
|
||||
const maxCount = Number(env['MAX_BATCH_MUTATION']);
|
||||
let mutationCount = initialCount;
|
||||
return {
|
||||
trackMutations(count: number) {
|
||||
mutationCount += count;
|
||||
if (mutationCount > maxCount) {
|
||||
throw new InvalidPayloadException(`Exceeded max batch mutation limit of ${maxCount}.`);
|
||||
}
|
||||
},
|
||||
getCount() {
|
||||
return mutationCount;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async getKeysByQuery(query: Query): Promise<PrimaryKey[]> {
|
||||
const primaryKeyField = this.schema.collections[this.collection]!.primary;
|
||||
const readQuery = cloneDeep(query);
|
||||
@@ -68,7 +89,12 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
/**
|
||||
* Create a single new item.
|
||||
*/
|
||||
async createOne(data: Partial<Item>, opts?: MutationOptions): Promise<PrimaryKey> {
|
||||
async createOne(data: Partial<Item>, opts: MutationOptions = {}): Promise<PrimaryKey> {
|
||||
if (!opts.mutationTracker) opts.mutationTracker = this.createMutationTracker();
|
||||
if (!opts.bypassLimits) {
|
||||
opts.mutationTracker.trackMutations(1);
|
||||
}
|
||||
|
||||
const { ActivityService } = await import('./activity.js');
|
||||
const { RevisionsService } = await import('./revisions.js');
|
||||
|
||||
@@ -102,7 +128,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
// Run all hooks that are attached to this event so the end user has the chance to augment the
|
||||
// item that is about to be saved
|
||||
const payloadAfterHooks =
|
||||
opts?.emitEvents !== false
|
||||
opts.emitEvents !== false
|
||||
? await emitter.emitFilter(
|
||||
this.eventScope === 'items'
|
||||
? ['items.create', `${this.collection}.items.create`]
|
||||
@@ -123,7 +149,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
? authorizationService.validatePayload('create', this.collection, payloadAfterHooks)
|
||||
: payloadAfterHooks;
|
||||
|
||||
if (opts?.preMutationException) {
|
||||
if (opts.preMutationException) {
|
||||
throw opts.preMutationException;
|
||||
}
|
||||
|
||||
@@ -222,10 +248,10 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
const childrenRevisions = [...revisionsM2O, ...revisionsA2O, ...revisionsO2M];
|
||||
|
||||
if (childrenRevisions.length > 0) {
|
||||
await revisionsService.updateMany(childrenRevisions, { parent: revision });
|
||||
await revisionsService.updateMany(childrenRevisions, { parent: revision }, { bypassLimits: true });
|
||||
}
|
||||
|
||||
if (opts?.onRevisionCreate) {
|
||||
if (opts.onRevisionCreate) {
|
||||
opts.onRevisionCreate(revision);
|
||||
}
|
||||
}
|
||||
@@ -234,7 +260,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
if (opts?.emitEvents !== false) {
|
||||
if (opts.emitEvents !== false) {
|
||||
const actionEvent = {
|
||||
event:
|
||||
this.eventScope === 'items'
|
||||
@@ -252,14 +278,14 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
},
|
||||
};
|
||||
|
||||
if (opts?.bypassEmitAction) {
|
||||
if (opts.bypassEmitAction) {
|
||||
opts.bypassEmitAction(actionEvent);
|
||||
} else {
|
||||
emitter.emitAction(actionEvent.event, actionEvent.meta, actionEvent.context);
|
||||
}
|
||||
|
||||
for (const nestedActionEvent of nestedActionEvents) {
|
||||
if (opts?.bypassEmitAction) {
|
||||
if (opts.bypassEmitAction) {
|
||||
opts.bypassEmitAction(nestedActionEvent);
|
||||
} else {
|
||||
emitter.emitAction(nestedActionEvent.event, nestedActionEvent.meta, nestedActionEvent.context);
|
||||
@@ -267,7 +293,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
}
|
||||
}
|
||||
|
||||
if (this.cache && env['CACHE_AUTO_PURGE'] && opts?.autoPurgeCache !== false) {
|
||||
if (this.cache && env['CACHE_AUTO_PURGE'] && opts.autoPurgeCache !== false) {
|
||||
await this.cache.clear();
|
||||
}
|
||||
|
||||
@@ -277,7 +303,9 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
/**
|
||||
* Create multiple new items at once. Inserts all provided records sequentially wrapped in a transaction.
|
||||
*/
|
||||
async createMany(data: Partial<Item>[], opts?: MutationOptions): Promise<PrimaryKey[]> {
|
||||
async createMany(data: Partial<Item>[], opts: MutationOptions = {}): Promise<PrimaryKey[]> {
|
||||
if (!opts.mutationTracker) opts.mutationTracker = this.createMutationTracker();
|
||||
|
||||
const { primaryKeys, nestedActionEvents } = await this.knex.transaction(async (trx) => {
|
||||
const service = new ItemsService(this.collection, {
|
||||
accountability: this.accountability,
|
||||
@@ -293,6 +321,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
...(opts || {}),
|
||||
autoPurgeCache: false,
|
||||
bypassEmitAction: (params) => nestedActionEvents.push(params),
|
||||
mutationTracker: opts.mutationTracker,
|
||||
});
|
||||
primaryKeys.push(primaryKey);
|
||||
}
|
||||
@@ -300,9 +329,9 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
return { primaryKeys, nestedActionEvents };
|
||||
});
|
||||
|
||||
if (opts?.emitEvents !== false) {
|
||||
if (opts.emitEvents !== false) {
|
||||
for (const nestedActionEvent of nestedActionEvents) {
|
||||
if (opts?.bypassEmitAction) {
|
||||
if (opts.bypassEmitAction) {
|
||||
opts.bypassEmitAction(nestedActionEvent);
|
||||
} else {
|
||||
emitter.emitAction(nestedActionEvent.event, nestedActionEvent.meta, nestedActionEvent.context);
|
||||
@@ -310,7 +339,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
}
|
||||
}
|
||||
|
||||
if (this.cache && env['CACHE_AUTO_PURGE'] && opts?.autoPurgeCache !== false) {
|
||||
if (this.cache && env['CACHE_AUTO_PURGE'] && opts.autoPurgeCache !== false) {
|
||||
await this.cache.clear();
|
||||
}
|
||||
|
||||
@@ -469,11 +498,13 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
/**
|
||||
* Update multiple items in a single transaction
|
||||
*/
|
||||
async updateBatch(data: Partial<Item>[], opts?: MutationOptions): Promise<PrimaryKey[]> {
|
||||
async updateBatch(data: Partial<Item>[], opts: MutationOptions = {}): Promise<PrimaryKey[]> {
|
||||
if (!Array.isArray(data)) {
|
||||
throw new InvalidPayloadException('Input should be an array of items.');
|
||||
}
|
||||
|
||||
if (!opts.mutationTracker) opts.mutationTracker = this.createMutationTracker();
|
||||
|
||||
const primaryKeyField = this.schema.collections[this.collection]!.primary;
|
||||
|
||||
const keys: PrimaryKey[] = [];
|
||||
@@ -493,7 +524,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
}
|
||||
});
|
||||
} finally {
|
||||
if (this.cache && env['CACHE_AUTO_PURGE'] && opts?.autoPurgeCache !== false) {
|
||||
if (this.cache && env['CACHE_AUTO_PURGE'] && opts.autoPurgeCache !== false) {
|
||||
await this.cache.clear();
|
||||
}
|
||||
}
|
||||
@@ -504,7 +535,12 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
/**
|
||||
* Update many items by primary key, setting all items to the same change
|
||||
*/
|
||||
async updateMany(keys: PrimaryKey[], data: Partial<Item>, opts?: MutationOptions): Promise<PrimaryKey[]> {
|
||||
async updateMany(keys: PrimaryKey[], data: Partial<Item>, opts: MutationOptions = {}): Promise<PrimaryKey[]> {
|
||||
if (!opts.mutationTracker) opts.mutationTracker = this.createMutationTracker();
|
||||
if (!opts.bypassLimits) {
|
||||
opts.mutationTracker.trackMutations(keys.length);
|
||||
}
|
||||
|
||||
const { ActivityService } = await import('./activity.js');
|
||||
const { RevisionsService } = await import('./revisions.js');
|
||||
|
||||
@@ -528,7 +564,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
// Run all hooks that are attached to this event so the end user has the chance to augment the
|
||||
// item that is about to be saved
|
||||
const payloadAfterHooks =
|
||||
opts?.emitEvents !== false
|
||||
opts.emitEvents !== false
|
||||
? await emitter.emitFilter(
|
||||
this.eventScope === 'items'
|
||||
? ['items.update', `${this.collection}.items.update`]
|
||||
@@ -557,7 +593,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
? authorizationService.validatePayload('update', this.collection, payloadAfterHooks)
|
||||
: payloadAfterHooks;
|
||||
|
||||
if (opts?.preMutationException) {
|
||||
if (opts.preMutationException) {
|
||||
throw opts.preMutationException;
|
||||
}
|
||||
|
||||
@@ -621,7 +657,8 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
user_agent: this.accountability!.userAgent,
|
||||
origin: this.accountability!.origin,
|
||||
item: key,
|
||||
}))
|
||||
})),
|
||||
{ bypassLimits: true }
|
||||
);
|
||||
|
||||
if (this.schema.collections[this.collection]!.accountability === 'all') {
|
||||
@@ -650,12 +687,12 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
)
|
||||
).filter((revision) => revision.delta);
|
||||
|
||||
const revisionIDs = await revisionsService.createMany(revisions);
|
||||
const revisionIDs = await revisionsService.createMany(revisions, { bypassLimits: true });
|
||||
|
||||
for (let i = 0; i < revisionIDs.length; i++) {
|
||||
const revisionID = revisionIDs[i]!;
|
||||
|
||||
if (opts?.onRevisionCreate) {
|
||||
if (opts.onRevisionCreate) {
|
||||
opts.onRevisionCreate(revisionID);
|
||||
}
|
||||
|
||||
@@ -665,7 +702,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
// with all other revisions on the current level as regular "flat" updates, and
|
||||
// nested revisions as children of this first "root" item.
|
||||
if (childrenRevisions.length > 0) {
|
||||
await revisionsService.updateMany(childrenRevisions, { parent: revisionID });
|
||||
await revisionsService.updateMany(childrenRevisions, { parent: revisionID }, { bypassLimits: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -673,11 +710,11 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
}
|
||||
});
|
||||
|
||||
if (this.cache && env['CACHE_AUTO_PURGE'] && opts?.autoPurgeCache !== false) {
|
||||
if (this.cache && env['CACHE_AUTO_PURGE'] && opts.autoPurgeCache !== false) {
|
||||
await this.cache.clear();
|
||||
}
|
||||
|
||||
if (opts?.emitEvents !== false) {
|
||||
if (opts.emitEvents !== false) {
|
||||
const actionEvent = {
|
||||
event:
|
||||
this.eventScope === 'items'
|
||||
@@ -695,14 +732,14 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
},
|
||||
};
|
||||
|
||||
if (opts?.bypassEmitAction) {
|
||||
if (opts.bypassEmitAction) {
|
||||
opts.bypassEmitAction(actionEvent);
|
||||
} else {
|
||||
emitter.emitAction(actionEvent.event, actionEvent.meta, actionEvent.context);
|
||||
}
|
||||
|
||||
for (const nestedActionEvent of nestedActionEvents) {
|
||||
if (opts?.bypassEmitAction) {
|
||||
if (opts.bypassEmitAction) {
|
||||
opts.bypassEmitAction(nestedActionEvent);
|
||||
} else {
|
||||
emitter.emitAction(nestedActionEvent.event, nestedActionEvent.meta, nestedActionEvent.context);
|
||||
@@ -742,7 +779,9 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
/**
|
||||
* Upsert many items
|
||||
*/
|
||||
async upsertMany(payloads: Partial<Item>[], opts?: MutationOptions): Promise<PrimaryKey[]> {
|
||||
async upsertMany(payloads: Partial<Item>[], opts: MutationOptions = {}): Promise<PrimaryKey[]> {
|
||||
if (!opts.mutationTracker) opts.mutationTracker = this.createMutationTracker();
|
||||
|
||||
const primaryKeys = await this.knex.transaction(async (trx) => {
|
||||
const service = new ItemsService(this.collection, {
|
||||
accountability: this.accountability,
|
||||
@@ -760,7 +799,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
return primaryKeys;
|
||||
});
|
||||
|
||||
if (this.cache && env['CACHE_AUTO_PURGE'] && opts?.autoPurgeCache !== false) {
|
||||
if (this.cache && env['CACHE_AUTO_PURGE'] && opts.autoPurgeCache !== false) {
|
||||
await this.cache.clear();
|
||||
}
|
||||
|
||||
@@ -793,7 +832,12 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
/**
|
||||
* Delete multiple items by primary key
|
||||
*/
|
||||
async deleteMany(keys: PrimaryKey[], opts?: MutationOptions): Promise<PrimaryKey[]> {
|
||||
async deleteMany(keys: PrimaryKey[], opts: MutationOptions = {}): Promise<PrimaryKey[]> {
|
||||
if (!opts.mutationTracker) opts.mutationTracker = this.createMutationTracker();
|
||||
if (!opts.bypassLimits) {
|
||||
opts.mutationTracker.trackMutations(keys.length);
|
||||
}
|
||||
|
||||
const { ActivityService } = await import('./activity.js');
|
||||
|
||||
const primaryKeyField = this.schema.collections[this.collection]!.primary;
|
||||
@@ -809,11 +853,11 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
await authorizationService.checkAccess('delete', this.collection, keys);
|
||||
}
|
||||
|
||||
if (opts?.preMutationException) {
|
||||
if (opts.preMutationException) {
|
||||
throw opts.preMutationException;
|
||||
}
|
||||
|
||||
if (opts?.emitEvents !== false) {
|
||||
if (opts.emitEvents !== false) {
|
||||
await emitter.emitFilter(
|
||||
this.eventScope === 'items' ? ['items.delete', `${this.collection}.items.delete`] : `${this.eventScope}.delete`,
|
||||
keys,
|
||||
@@ -846,7 +890,8 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
user_agent: this.accountability!.userAgent,
|
||||
origin: this.accountability!.origin,
|
||||
item: key,
|
||||
}))
|
||||
})),
|
||||
{ bypassLimits: true }
|
||||
);
|
||||
}
|
||||
});
|
||||
@@ -855,7 +900,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
await this.cache.clear();
|
||||
}
|
||||
|
||||
if (opts?.emitEvents !== false) {
|
||||
if (opts.emitEvents !== false) {
|
||||
const actionEvent = {
|
||||
event:
|
||||
this.eventScope === 'items'
|
||||
@@ -873,7 +918,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
|
||||
},
|
||||
};
|
||||
|
||||
if (opts?.bypassEmitAction) {
|
||||
if (opts.bypassEmitAction) {
|
||||
opts.bypassEmitAction(actionEvent);
|
||||
} else {
|
||||
emitter.emitAction(actionEvent.event, actionEvent.meta, actionEvent.context);
|
||||
|
||||
@@ -444,6 +444,7 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
@@ -452,6 +453,7 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -523,6 +525,7 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
@@ -531,6 +534,7 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -638,6 +642,7 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
}))
|
||||
);
|
||||
|
||||
@@ -665,6 +670,7 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
});
|
||||
} else {
|
||||
await itemsService.updateByQuery(
|
||||
@@ -675,6 +681,7 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -723,6 +730,7 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -741,6 +749,7 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -769,6 +778,7 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
});
|
||||
} else {
|
||||
await itemsService.updateByQuery(
|
||||
@@ -779,6 +789,7 @@ export class PayloadService {
|
||||
bypassEmitAction: (params) =>
|
||||
opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
|
||||
emitEvents: opts?.emitEvents,
|
||||
mutationTracker: opts?.mutationTracker,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@@ -159,14 +159,14 @@ export class RolesService extends ItemsService {
|
||||
{
|
||||
filter: { role: { _in: keys } },
|
||||
},
|
||||
opts
|
||||
{ ...opts, bypassLimits: true }
|
||||
);
|
||||
|
||||
await presetsService.deleteByQuery(
|
||||
{
|
||||
filter: { role: { _in: keys } },
|
||||
},
|
||||
opts
|
||||
{ ...opts, bypassLimits: true }
|
||||
);
|
||||
|
||||
await usersService.updateByQuery(
|
||||
@@ -177,7 +177,7 @@ export class RolesService extends ItemsService {
|
||||
status: 'suspended',
|
||||
role: null,
|
||||
},
|
||||
opts
|
||||
{ ...opts, bypassLimits: true }
|
||||
);
|
||||
|
||||
await itemsService.deleteMany(keys, opts);
|
||||
|
||||
@@ -180,7 +180,9 @@ export class UsersService extends ItemsService {
|
||||
return key;
|
||||
}
|
||||
|
||||
override async updateBatch(data: Partial<Item>[], opts?: MutationOptions): Promise<PrimaryKey[]> {
|
||||
override async updateBatch(data: Partial<Item>[], opts: MutationOptions = {}): Promise<PrimaryKey[]> {
|
||||
if (!opts.mutationTracker) opts.mutationTracker = this.createMutationTracker();
|
||||
|
||||
const primaryKeyField = this.schema.collections[this.collection]!.primary;
|
||||
|
||||
const keys: PrimaryKey[] = [];
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
|
||||
import type { BaseException } from '@directus/exceptions';
|
||||
import type { EventContext } from '@directus/types';
|
||||
import type { MutationTracker } from '../services/items.js';
|
||||
|
||||
export type Item = Record<string, any>;
|
||||
|
||||
@@ -24,17 +25,17 @@ export type MutationOptions = {
|
||||
/**
|
||||
* Callback function that's fired whenever a revision is made in the mutation
|
||||
*/
|
||||
onRevisionCreate?: (pk: PrimaryKey) => void;
|
||||
onRevisionCreate?: ((pk: PrimaryKey) => void) | undefined;
|
||||
|
||||
/**
|
||||
* Flag to disable the auto purging of the cache. Is ignored when CACHE_AUTO_PURGE isn't enabled.
|
||||
*/
|
||||
autoPurgeCache?: false;
|
||||
autoPurgeCache?: false | undefined;
|
||||
|
||||
/**
|
||||
* Flag to disable the auto purging of the system cache.
|
||||
*/
|
||||
autoPurgeSystemCache?: false;
|
||||
autoPurgeSystemCache?: false | undefined;
|
||||
|
||||
/**
|
||||
* Allow disabling the emitting of hooks. Useful if a custom hook is fired (like files.upload)
|
||||
@@ -45,12 +46,22 @@ export type MutationOptions = {
|
||||
* To bypass the emitting of action events if emitEvents is enabled
|
||||
* Can be used to queue up the nested events from item service's create, update and delete
|
||||
*/
|
||||
bypassEmitAction?: (params: ActionEventParams) => void;
|
||||
bypassEmitAction?: ((params: ActionEventParams) => void) | undefined;
|
||||
|
||||
/**
|
||||
* To bypass limits so that functions would work as intended
|
||||
*/
|
||||
bypassLimits?: boolean | undefined;
|
||||
|
||||
/**
|
||||
* To keep track of mutation limits
|
||||
*/
|
||||
mutationTracker?: MutationTracker | undefined;
|
||||
|
||||
/*
|
||||
* The validation error to throw right before the mutation takes place
|
||||
*/
|
||||
preMutationException?: BaseException;
|
||||
preMutationException?: BaseException | undefined;
|
||||
};
|
||||
|
||||
export type ActionEventParams = {
|
||||
|
||||
@@ -38,6 +38,7 @@ export async function applyDiff(
|
||||
const mutationOptions: MutationOptions = {
|
||||
autoPurgeSystemCache: false,
|
||||
bypassEmitAction: (params) => nestedActionEvents.push(params),
|
||||
bypassLimits: true,
|
||||
};
|
||||
|
||||
await database.transaction(async (trx) => {
|
||||
|
||||
@@ -23,6 +23,7 @@ describe('applySnapshot', () => {
|
||||
const mutationOptions = {
|
||||
autoPurgeSystemCache: false,
|
||||
bypassEmitAction: expect.any(Function),
|
||||
bypassLimits: true,
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
|
||||
@@ -203,6 +203,7 @@ prefixing the value with `{type}:`. The following types are available:
|
||||
| `ROOT_REDIRECT` | Where to redirect to when navigating to `/`. Accepts a relative path, absolute URL, or `false` to disable. | `./admin` |
|
||||
| `SERVE_APP` | Whether or not to serve the Admin App under `/admin`. | `true` |
|
||||
| `GRAPHQL_INTROSPECTION` | Whether or not to enable GraphQL Introspection | `true` |
|
||||
| `MAX_BATCH_MUTATION` | The maximum number of items for batch mutations when creating, updating and deleting. | `Infinity` |
|
||||
| `MAX_RELATIONAL_DEPTH` | The maximum depth when filtering / querying relational fields, with a minimum value of `2`. | `10` |
|
||||
| `ROBOTS_TXT` | What the `/robots.txt` endpoint should return | `User-agent: *\nDisallow: /` |
|
||||
|
||||
|
||||
@@ -72,6 +72,7 @@ const directusConfig = {
|
||||
MAX_PAYLOAD_SIZE: '10mb',
|
||||
EXTENSIONS_PATH: './tests-blackbox/extensions',
|
||||
ASSETS_TRANSFORM_MAX_CONCURRENT: '2',
|
||||
MAX_BATCH_MUTATION: '100', // Must be in multiples of 10 for tests
|
||||
ACCESS_TOKEN_TTL: '25d', // should be larger than 24.86 days to test Expires value larger than 32-bit signed integer
|
||||
...directusAuthConfig,
|
||||
...directusStorageConfig,
|
||||
|
||||
@@ -2,7 +2,8 @@ import request from 'supertest';
|
||||
import { Env, getUrl } from './config';
|
||||
import * as common from './index';
|
||||
import vendors from './get-dbs-to-test';
|
||||
import type { Filter } from '@directus/types';
|
||||
import type { Query } from '@directus/types';
|
||||
import { omit } from 'lodash';
|
||||
|
||||
export function DisableTestCachingSetup() {
|
||||
beforeEach(async () => {
|
||||
@@ -663,9 +664,7 @@ export async function CreateItem(vendor: string, options: OptionsCreateItem) {
|
||||
|
||||
export type OptionsReadItem = {
|
||||
collection: string;
|
||||
filter?: Filter;
|
||||
fields?: string;
|
||||
};
|
||||
} & Query;
|
||||
|
||||
export async function ReadItem(vendor: string, options: OptionsReadItem) {
|
||||
// Parse options
|
||||
@@ -680,10 +679,7 @@ export async function ReadItem(vendor: string, options: OptionsReadItem) {
|
||||
const response = await request(getUrl(vendor))
|
||||
.get(`/items/${options.collection}`)
|
||||
.set('Authorization', `Bearer ${common.USER.TESTS_FLOW.TOKEN}`)
|
||||
.query({
|
||||
filter: options.filter,
|
||||
fields: options.fields,
|
||||
});
|
||||
.query(omit(options, 'collection'));
|
||||
|
||||
return response.body.data;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import request from 'supertest';
|
||||
import request, { Response } from 'supertest';
|
||||
import { jsonToGraphQLQuery } from 'json-to-graphql-query';
|
||||
|
||||
export function processGraphQLJson(jsonQuery: any) {
|
||||
@@ -11,7 +11,7 @@ export async function requestGraphQL(
|
||||
token: string | null,
|
||||
jsonQuery: any,
|
||||
options?: { variables?: any; cookies?: string[] }
|
||||
): Promise<any> {
|
||||
): Promise<Response> {
|
||||
const req = request(host)
|
||||
.post(isSystemCollection ? '/graphql/system' : '/graphql')
|
||||
.send({
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import request from 'supertest';
|
||||
import { getUrl } from '@common/config';
|
||||
import config, { getUrl } from '@common/config';
|
||||
import vendors from '@common/get-dbs-to-test';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { CreateItem, ReadItem } from '@common/functions';
|
||||
@@ -654,7 +654,7 @@ describe.each(common.PRIMARY_KEY_TYPES)('/items', (pkType) => {
|
||||
|
||||
const retrievedShape = await ReadItem(vendor, {
|
||||
collection: localCollectionShapes,
|
||||
fields: '*.*.*',
|
||||
fields: ['*.*.*'],
|
||||
filter: { id: { _eq: insertedShape.id } },
|
||||
});
|
||||
|
||||
@@ -1673,6 +1673,307 @@ describe.each(common.PRIMARY_KEY_TYPES)('/items', (pkType) => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('MAX_BATCH_MUTATION Tests', () => {
|
||||
describe('createOne', () => {
|
||||
describe('passes when below limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// TODO: Fix Oracle exceeded directus_revisions limit of 4000
|
||||
if (vendor === 'oracle') {
|
||||
expect(true).toBe(true);
|
||||
return;
|
||||
}
|
||||
|
||||
// Setup
|
||||
const countNested = Number(config.envs[vendor].MAX_BATCH_MUTATION) / 2 - 1;
|
||||
const shape: any = createShape(pkType);
|
||||
|
||||
shape.children = Array(countNested)
|
||||
.fill(0)
|
||||
.map((_, index) => {
|
||||
if (index < countNested / 2) {
|
||||
return { collection: localCollectionCircles, item: createCircle(pkType) };
|
||||
} else {
|
||||
return { collection: localCollectionSquares, item: createSquare(pkType) };
|
||||
}
|
||||
});
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.post(`/items/${localCollectionShapes}`)
|
||||
.send(shape)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.children.length).toBe(countNested);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
|
||||
describe('errors when above limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// TODO: Fix Oracle ORA-01086 savepoint never established in this session or is invalid
|
||||
if (vendor === 'oracle') {
|
||||
expect(true).toBe(true);
|
||||
return;
|
||||
}
|
||||
|
||||
// Setup
|
||||
const countNested = Number(config.envs[vendor].MAX_BATCH_MUTATION) / 2;
|
||||
const shape: any = createShape(pkType);
|
||||
|
||||
shape.children = Array(countNested)
|
||||
.fill(0)
|
||||
.map((_, index) => {
|
||||
if (index < countNested / 2) {
|
||||
return { collection: localCollectionCircles, item: createCircle(pkType) };
|
||||
} else {
|
||||
return { collection: localCollectionSquares, item: createSquare(pkType) };
|
||||
}
|
||||
});
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.post(`/items/${localCollectionShapes}`)
|
||||
.send(shape)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('createMany', () => {
|
||||
describe('passes when below limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) / 10;
|
||||
const countNested = 4;
|
||||
const shapes: any[] = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
shapes.push(createShape(pkType));
|
||||
shapes[i].children = Array(countNested)
|
||||
.fill(0)
|
||||
.map((_, index) => {
|
||||
if (index < countNested / 2) {
|
||||
return { collection: localCollectionCircles, item: createCircle(pkType) };
|
||||
} else {
|
||||
return { collection: localCollectionSquares, item: createSquare(pkType) };
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.post(`/items/${localCollectionShapes}`)
|
||||
.send(shapes)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.length).toBe(count);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
|
||||
describe('errors when above limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// TODO: Fix Oracle ORA-01086 savepoint never established in this session or is invalid
|
||||
if (vendor === 'oracle') {
|
||||
expect(true).toBe(true);
|
||||
return;
|
||||
}
|
||||
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) / 10;
|
||||
const countNested = 5;
|
||||
const shapes: any[] = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
shapes.push(createShape(pkType));
|
||||
shapes[i].children = Array(countNested)
|
||||
.fill(0)
|
||||
.map((_, index) => {
|
||||
if (index < countNested / 2) {
|
||||
return { collection: localCollectionCircles, item: createCircle(pkType) };
|
||||
} else {
|
||||
return { collection: localCollectionSquares, item: createSquare(pkType) };
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.post(`/items/${localCollectionShapes}`)
|
||||
.send(shapes)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateBatch', () => {
|
||||
describe('passes when below limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) / 10;
|
||||
const countCreate = 2;
|
||||
const countUpdate = 2;
|
||||
const countDelete = 1;
|
||||
const shapesID = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
const shape: any = createShape(pkType);
|
||||
shape.children = Array(countUpdate + countDelete)
|
||||
.fill(0)
|
||||
.map((_, index) => {
|
||||
if (index < (countUpdate + countDelete) / 2) {
|
||||
return { collection: localCollectionCircles, item: createCircle(pkType) };
|
||||
} else {
|
||||
return { collection: localCollectionSquares, item: createSquare(pkType) };
|
||||
}
|
||||
});
|
||||
shapesID.push((await CreateItem(vendor, { collection: localCollectionShapes, item: shape })).id);
|
||||
}
|
||||
|
||||
const shapes = await ReadItem(vendor, {
|
||||
collection: localCollectionShapes,
|
||||
fields: ['*', 'children.id', 'children.collection', 'children.item.id', 'children.item.name'],
|
||||
filter: { id: { _in: shapesID } },
|
||||
});
|
||||
|
||||
for (const shape of shapes) {
|
||||
const children = shape.children;
|
||||
shape.children = {
|
||||
create: Array(countCreate)
|
||||
.fill(0)
|
||||
.map((_, index) => {
|
||||
if (index < countCreate / 2) {
|
||||
return { collection: localCollectionCircles, item: createCircle(pkType) };
|
||||
} else {
|
||||
return { collection: localCollectionSquares, item: createSquare(pkType) };
|
||||
}
|
||||
}),
|
||||
update: children.slice(0, countUpdate),
|
||||
delete: children.slice(-countDelete).map((child: Circle | Square) => child.id),
|
||||
};
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.patch(`/items/${localCollectionShapes}`)
|
||||
.send(shapes)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.length).toBe(count);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
|
||||
describe('errors when above limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// TODO: Fix Oracle ORA-01086 savepoint never established in this session or is invalid
|
||||
if (vendor === 'oracle') {
|
||||
expect(true).toBe(true);
|
||||
return;
|
||||
}
|
||||
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) / 10;
|
||||
const countCreate = 2;
|
||||
const countUpdate = 2;
|
||||
const countDelete = 2;
|
||||
const shapesID = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
const shape: any = createShape(pkType);
|
||||
shape.children = Array(countUpdate + countDelete)
|
||||
.fill(0)
|
||||
.map((_, index) => {
|
||||
if (index < (countUpdate + countDelete) / 2) {
|
||||
return { collection: localCollectionCircles, item: createCircle(pkType) };
|
||||
} else {
|
||||
return { collection: localCollectionSquares, item: createSquare(pkType) };
|
||||
}
|
||||
});
|
||||
shapesID.push((await CreateItem(vendor, { collection: localCollectionShapes, item: shape })).id);
|
||||
}
|
||||
|
||||
const shapes = await ReadItem(vendor, {
|
||||
collection: localCollectionShapes,
|
||||
fields: ['*', 'children.id', 'children.collection', 'children.item.id', 'children.item.name'],
|
||||
filter: { id: { _in: shapesID } },
|
||||
});
|
||||
|
||||
for (const shape of shapes) {
|
||||
const children = shape.children;
|
||||
shape.children = {
|
||||
create: Array(countCreate)
|
||||
.fill(0)
|
||||
.map((_, index) => {
|
||||
if (index < countCreate / 2) {
|
||||
return { collection: localCollectionCircles, item: createCircle(pkType) };
|
||||
} else {
|
||||
return { collection: localCollectionSquares, item: createSquare(pkType) };
|
||||
}
|
||||
}),
|
||||
update: children.slice(0, countUpdate),
|
||||
delete: children.slice(-countDelete).map((child: Circle | Square) => child.id),
|
||||
};
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.patch(`/items/${localCollectionShapes}`)
|
||||
.send(shapes)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
CheckQueryFilters(
|
||||
{
|
||||
method: 'get',
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import request from 'supertest';
|
||||
import { getUrl } from '@common/config';
|
||||
import config, { getUrl } from '@common/config';
|
||||
import vendors from '@common/get-dbs-to-test';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { CreateItem, ReadItem } from '@common/functions';
|
||||
@@ -245,7 +245,7 @@ describe.each(common.PRIMARY_KEY_TYPES)('/items', (pkType) => {
|
||||
|
||||
const retrievedIngredient = await ReadItem(vendor, {
|
||||
collection: localCollectionIngredients,
|
||||
fields: '*.*.*',
|
||||
fields: ['*.*.*'],
|
||||
filter: { id: { _eq: insertedIngredient.id } },
|
||||
});
|
||||
|
||||
@@ -463,7 +463,7 @@ describe.each(common.PRIMARY_KEY_TYPES)('/items', (pkType) => {
|
||||
|
||||
const retrievedIngredient = await ReadItem(vendor, {
|
||||
collection: localCollectionIngredients,
|
||||
fields: '*.*.*',
|
||||
fields: ['*.*.*'],
|
||||
filter: { id: { _eq: insertedIngredient.id } },
|
||||
});
|
||||
|
||||
@@ -2204,5 +2204,491 @@ describe.each(common.PRIMARY_KEY_TYPES)('/items', (pkType) => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('MAX_BATCH_MUTATION Tests', () => {
|
||||
describe('createOne', () => {
|
||||
describe('passes when below limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// TODO: Fix Oracle exceeded directus_revisions limit of 4000
|
||||
if (vendor === 'oracle') {
|
||||
expect(true).toBe(true);
|
||||
return;
|
||||
}
|
||||
|
||||
// Setup
|
||||
const countNested = Number(config.envs[vendor].MAX_BATCH_MUTATION) / 2 - 1;
|
||||
const food: any = createFood(pkType);
|
||||
const food2: any = createFood(pkType);
|
||||
|
||||
food.ingredients = Array(countNested)
|
||||
.fill(0)
|
||||
.map(() => {
|
||||
return { [`${localCollectionIngredients}_id`]: createIngredient(pkType) };
|
||||
});
|
||||
|
||||
food2.ingredients = Array(countNested)
|
||||
.fill(0)
|
||||
.map(() => {
|
||||
return {
|
||||
[`${localCollectionIngredients}_id`]: createIngredient(pkType),
|
||||
};
|
||||
});
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.post(`/items/${localCollectionFoods}`)
|
||||
.send(food)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `create_${localCollectionFoods}_item`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
data: food2,
|
||||
},
|
||||
id: true,
|
||||
ingredients: {
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.ingredients.length).toBe(countNested);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.data[mutationKey].ingredients.length).toEqual(countNested);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
|
||||
describe('errors when above limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// TODO: Fix Oracle ORA-01086 savepoint never established in this session or is invalid
|
||||
if (vendor === 'oracle') {
|
||||
expect(true).toBe(true);
|
||||
return;
|
||||
}
|
||||
|
||||
// Setup
|
||||
const countNested = Number(config.envs[vendor].MAX_BATCH_MUTATION) / 2;
|
||||
const food: any = createFood(pkType);
|
||||
const food2: any = createFood(pkType);
|
||||
|
||||
food.ingredients = Array(countNested)
|
||||
.fill(0)
|
||||
.map(() => {
|
||||
return { [`${localCollectionIngredients}_id`]: createIngredient(pkType) };
|
||||
});
|
||||
|
||||
food2.ingredients = Array(countNested)
|
||||
.fill(0)
|
||||
.map(() => {
|
||||
return {
|
||||
[`${localCollectionIngredients}_id`]: createIngredient(pkType),
|
||||
};
|
||||
});
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.post(`/items/${localCollectionFoods}`)
|
||||
.send(food)
|
||||
.query({ fields: '*,ingredients.test_items_m2m_ingredients_integer_id.*' })
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `create_${localCollectionFoods}_item`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
data: food2,
|
||||
},
|
||||
id: true,
|
||||
ingredients: {
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.errors).toBeDefined();
|
||||
expect(gqlResponse.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('createMany', () => {
|
||||
describe('passes when below limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) / 10;
|
||||
const countNested = 4;
|
||||
const foods: any[] = [];
|
||||
const foods2: any[] = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
foods.push(createFood(pkType));
|
||||
foods[i].ingredients = Array(countNested)
|
||||
.fill(0)
|
||||
.map(() => {
|
||||
return { [`${localCollectionIngredients}_id`]: createIngredient(pkType) };
|
||||
});
|
||||
|
||||
foods2.push(createFood(pkType));
|
||||
foods2[i].ingredients = Array(countNested)
|
||||
.fill(0)
|
||||
.map(() => {
|
||||
return { [`${localCollectionIngredients}_id`]: createIngredient(pkType) };
|
||||
});
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.post(`/items/${localCollectionFoods}`)
|
||||
.send(foods)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `create_${localCollectionFoods}_items`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
data: foods2,
|
||||
},
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.length).toBe(count);
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.data[mutationKey].length).toEqual(count);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
|
||||
describe('errors when above limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// TODO: Fix Oracle ORA-01086 savepoint never established in this session or is invalid
|
||||
if (vendor === 'oracle') {
|
||||
expect(true).toBe(true);
|
||||
return;
|
||||
}
|
||||
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) / 10;
|
||||
const countNested = 5;
|
||||
const foods: any[] = [];
|
||||
const foods2: any[] = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
foods.push(createFood(pkType));
|
||||
foods[i].ingredients = Array(countNested)
|
||||
.fill(0)
|
||||
.map(() => {
|
||||
return { [`${localCollectionIngredients}_id`]: createIngredient(pkType) };
|
||||
});
|
||||
|
||||
foods2.push(createFood(pkType));
|
||||
foods2[i].ingredients = Array(countNested)
|
||||
.fill(0)
|
||||
.map(() => {
|
||||
return { [`${localCollectionIngredients}_id`]: createIngredient(pkType) };
|
||||
});
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.post(`/items/${localCollectionFoods}`)
|
||||
.send(foods)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `create_${localCollectionFoods}_items`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
data: foods2,
|
||||
},
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.errors).toBeDefined();
|
||||
expect(gqlResponse.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateBatch', () => {
|
||||
describe('passes when below limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) / 10;
|
||||
const countCreate = 2;
|
||||
const countUpdate = 3;
|
||||
const countDelete = 2;
|
||||
const foodsID = [];
|
||||
const foodsID2 = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
const food: any = createFood(pkType);
|
||||
food.ingredients = Array(countUpdate + countDelete)
|
||||
.fill(0)
|
||||
.map(() => {
|
||||
return { [`${localCollectionIngredients}_id`]: createIngredient(pkType) };
|
||||
});
|
||||
foodsID.push((await CreateItem(vendor, { collection: localCollectionFoods, item: food })).id);
|
||||
|
||||
const food2: any = createFood(pkType);
|
||||
food2.ingredients = Array(countUpdate + countDelete)
|
||||
.fill(0)
|
||||
.map(() => {
|
||||
return { [`${localCollectionIngredients}_id`]: createIngredient(pkType) };
|
||||
});
|
||||
foodsID2.push((await CreateItem(vendor, { collection: localCollectionFoods, item: food2 })).id);
|
||||
}
|
||||
|
||||
const foods = await ReadItem(vendor, {
|
||||
collection: localCollectionFoods,
|
||||
fields: [
|
||||
'*',
|
||||
'ingredients.id',
|
||||
`ingredients.${localCollectionIngredients}.id`,
|
||||
`ingredients.${localCollectionIngredients}.name`,
|
||||
],
|
||||
filter: { id: { _in: foodsID } },
|
||||
});
|
||||
|
||||
const foods2 = await ReadItem(vendor, {
|
||||
collection: localCollectionFoods,
|
||||
fields: [
|
||||
'*',
|
||||
'ingredients.id',
|
||||
`ingredients.${localCollectionIngredients}.id`,
|
||||
`ingredients.${localCollectionIngredients}.name`,
|
||||
],
|
||||
filter: { id: { _in: foodsID2 } },
|
||||
});
|
||||
|
||||
for (const food of foods) {
|
||||
const ingredients = food.ingredients;
|
||||
food.ingredients = {
|
||||
create: Array(countCreate)
|
||||
.fill(0)
|
||||
.map(() => {
|
||||
return { [`${localCollectionIngredients}_id`]: createIngredient(pkType) };
|
||||
}),
|
||||
update: ingredients.slice(0, countUpdate),
|
||||
delete: ingredients.slice(-countDelete).map((ingredient: Ingredient) => ingredient.id),
|
||||
};
|
||||
}
|
||||
|
||||
for (const food of foods2) {
|
||||
food.ingredients = [
|
||||
...food.ingredients,
|
||||
...Array(countCreate)
|
||||
.fill(0)
|
||||
.map(() => {
|
||||
return { [`${localCollectionIngredients}_id`]: createIngredient(pkType) };
|
||||
}),
|
||||
];
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.patch(`/items/${localCollectionFoods}`)
|
||||
.send(foods)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `update_${localCollectionFoods}_batch`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
data: foods2,
|
||||
},
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.length).toBe(count);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.data[mutationKey].length).toEqual(count);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
|
||||
describe('errors when above limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// TODO: Fix Oracle ORA-01086 savepoint never established in this session or is invalid
|
||||
if (vendor === 'oracle') {
|
||||
expect(true).toBe(true);
|
||||
return;
|
||||
}
|
||||
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) / 10;
|
||||
const countCreate = 2;
|
||||
const countUpdate = 3;
|
||||
const countDelete = 3;
|
||||
const foodsID = [];
|
||||
const foodsID2 = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
const food: any = createFood(pkType);
|
||||
food.ingredients = Array(countUpdate + countDelete)
|
||||
.fill(0)
|
||||
.map(() => {
|
||||
return { [`${localCollectionIngredients}_id`]: createIngredient(pkType) };
|
||||
});
|
||||
foodsID.push((await CreateItem(vendor, { collection: localCollectionFoods, item: food })).id);
|
||||
|
||||
const food2: any = createFood(pkType);
|
||||
food2.ingredients = Array(countUpdate + countDelete)
|
||||
.fill(0)
|
||||
.map(() => {
|
||||
return { [`${localCollectionIngredients}_id`]: createIngredient(pkType) };
|
||||
});
|
||||
foodsID2.push((await CreateItem(vendor, { collection: localCollectionFoods, item: food2 })).id);
|
||||
}
|
||||
|
||||
const foods = await ReadItem(vendor, {
|
||||
collection: localCollectionFoods,
|
||||
fields: [
|
||||
'*',
|
||||
'ingredients.id',
|
||||
`ingredients.${localCollectionIngredients}.id`,
|
||||
`ingredients.${localCollectionIngredients}.name`,
|
||||
],
|
||||
filter: { id: { _in: foodsID } },
|
||||
});
|
||||
|
||||
const foods2 = await ReadItem(vendor, {
|
||||
collection: localCollectionFoods,
|
||||
fields: [
|
||||
'*',
|
||||
'ingredients.id',
|
||||
`ingredients.${localCollectionIngredients}.id`,
|
||||
`ingredients.${localCollectionIngredients}.name`,
|
||||
],
|
||||
filter: { id: { _in: foodsID2 } },
|
||||
});
|
||||
|
||||
for (const food of foods) {
|
||||
const ingredients = food.ingredients;
|
||||
food.ingredients = {
|
||||
create: Array(countCreate)
|
||||
.fill(0)
|
||||
.map(() => {
|
||||
return { [`${localCollectionIngredients}_id`]: createIngredient(pkType) };
|
||||
}),
|
||||
update: ingredients.slice(0, countUpdate),
|
||||
delete: ingredients.slice(-countDelete).map((ingredient: Ingredient) => ingredient.id),
|
||||
};
|
||||
}
|
||||
|
||||
for (const food of foods2) {
|
||||
food.ingredients = [
|
||||
...food.ingredients,
|
||||
...Array(countCreate)
|
||||
.fill(0)
|
||||
.map(() => {
|
||||
return { [`${localCollectionIngredients}_id`]: createIngredient(pkType) };
|
||||
}),
|
||||
];
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.patch(`/items/${localCollectionFoods}`)
|
||||
.send(foods)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `update_${localCollectionFoods}_batch`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
data: foods2,
|
||||
},
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.errors).toBeDefined();
|
||||
expect(gqlResponse.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import request from 'supertest';
|
||||
import { getUrl } from '@common/config';
|
||||
import config, { getUrl } from '@common/config';
|
||||
import vendors from '@common/get-dbs-to-test';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { CreateItem } from '@common/functions';
|
||||
import { CreateItem, ReadItem } from '@common/functions';
|
||||
import { CachedTestsSchema, TestsSchemaVendorValues } from '@query/filter';
|
||||
import * as common from '@common/index';
|
||||
import {
|
||||
@@ -1142,6 +1142,394 @@ describe.each(common.PRIMARY_KEY_TYPES)('/items', (pkType) => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('MAX_BATCH_MUTATION Tests', () => {
|
||||
describe('createMany', () => {
|
||||
describe('passes when below limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) / 2;
|
||||
const states: any[] = [];
|
||||
const states2: any[] = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
states.push(createState(pkType));
|
||||
states[i].country_id = createCountry(pkType);
|
||||
|
||||
states2.push(createState(pkType));
|
||||
states2[i].country_id = createCountry(pkType);
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.post(`/items/${localCollectionStates}`)
|
||||
.send(states)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
const mutationKey = `create_${localCollectionStates}_items`;
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
data: states2,
|
||||
},
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.length).toBe(count);
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.data[mutationKey].length).toEqual(count);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
|
||||
describe('errors when above limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) / 2 + 1;
|
||||
const states: any[] = [];
|
||||
const states2: any[] = [];
|
||||
for (let i = 0; i < count; i++) {
|
||||
states.push(createState(pkType));
|
||||
states[i].country_id = createCountry(pkType);
|
||||
|
||||
states2.push(createState(pkType));
|
||||
states2[i].country_id = createCountry(pkType);
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.post(`/items/${localCollectionStates}`)
|
||||
.send(states)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
const mutationKey = `create_${localCollectionStates}_items`;
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
data: states2,
|
||||
},
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.errors).toBeDefined();
|
||||
expect(gqlResponse.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateBatch', () => {
|
||||
describe('passes when below limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) / 2;
|
||||
const countCreate = Math.floor(count / 2);
|
||||
const statesID = [];
|
||||
const statesID2 = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
const state: any = createState(pkType);
|
||||
state.name = `max_batch_mutation_${i.toString().padStart(3, '0')}`;
|
||||
if (i >= countCreate) {
|
||||
state.country_id = createCountry(pkType);
|
||||
}
|
||||
statesID.push((await CreateItem(vendor, { collection: localCollectionStates, item: state })).id);
|
||||
|
||||
const state2: any = createState(pkType);
|
||||
state2.name = `max_batch_mutation_gql_${i.toString().padStart(3, '0')}`;
|
||||
if (i >= countCreate) {
|
||||
state2.country_id = createCountry(pkType);
|
||||
}
|
||||
statesID2.push((await CreateItem(vendor, { collection: localCollectionStates, item: state2 })).id);
|
||||
}
|
||||
|
||||
const states = await ReadItem(vendor, {
|
||||
collection: localCollectionStates,
|
||||
fields: ['*', 'country_id.id', 'country_id.name'],
|
||||
sort: ['name'],
|
||||
filter: { id: { _in: statesID } },
|
||||
});
|
||||
|
||||
const states2 = await ReadItem(vendor, {
|
||||
collection: localCollectionStates,
|
||||
fields: ['*', 'country_id.id', 'country_id.name'],
|
||||
sort: ['name'],
|
||||
filter: { id: { _in: statesID2 } },
|
||||
});
|
||||
|
||||
for (let i = 0; i < states.length; i++) {
|
||||
if (i < countCreate) {
|
||||
states[i].country_id = createCountry(pkType);
|
||||
} else {
|
||||
states[i].country_id.name = 'updated';
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < states2.length; i++) {
|
||||
if (i < countCreate) {
|
||||
states2[i].country_id = createCountry(pkType);
|
||||
} else {
|
||||
states2[i].country_id.name = 'updated';
|
||||
}
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.patch(`/items/${localCollectionStates}`)
|
||||
.send(states)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `update_${localCollectionStates}_batch`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
data: states2,
|
||||
},
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.length).toBe(count);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.data[mutationKey].length).toEqual(count);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
|
||||
describe('errors when above limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) / 2 + 1;
|
||||
const countCreate = Math.floor(count / 2);
|
||||
const statesID = [];
|
||||
const statesID2 = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
const state: any = createState(pkType);
|
||||
state.name = `max_batch_mutation_${i.toString().padStart(3, '0')}`;
|
||||
if (i >= countCreate) {
|
||||
state.country_id = createCountry(pkType);
|
||||
}
|
||||
statesID.push((await CreateItem(vendor, { collection: localCollectionStates, item: state })).id);
|
||||
|
||||
const state2: any = createState(pkType);
|
||||
state2.name = `max_batch_mutation_gql_${i.toString().padStart(3, '0')}`;
|
||||
if (i >= countCreate) {
|
||||
state2.country_id = createCountry(pkType);
|
||||
}
|
||||
statesID2.push((await CreateItem(vendor, { collection: localCollectionStates, item: state2 })).id);
|
||||
}
|
||||
|
||||
const states = await ReadItem(vendor, {
|
||||
collection: localCollectionStates,
|
||||
fields: ['*', 'country_id.id', 'country_id.name'],
|
||||
sort: ['name'],
|
||||
filter: { id: { _in: statesID } },
|
||||
});
|
||||
|
||||
const states2 = await ReadItem(vendor, {
|
||||
collection: localCollectionStates,
|
||||
fields: ['*', 'country_id.id', 'country_id.name'],
|
||||
sort: ['name'],
|
||||
filter: { id: { _in: statesID2 } },
|
||||
});
|
||||
|
||||
for (let i = 0; i < states.length; i++) {
|
||||
if (i < countCreate) {
|
||||
states[i].country_id = createCountry(pkType);
|
||||
} else {
|
||||
states[i].country_id.name = 'updated';
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < states2.length; i++) {
|
||||
if (i < countCreate) {
|
||||
states2[i].country_id = createCountry(pkType);
|
||||
} else {
|
||||
states2[i].country_id.name = 'updated';
|
||||
}
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.patch(`/items/${localCollectionStates}`)
|
||||
.send(states)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `update_${localCollectionStates}_batch`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
data: states2,
|
||||
},
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.errors).toBeDefined();
|
||||
expect(gqlResponse.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateMany', () => {
|
||||
describe('passes when below limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) - 1;
|
||||
const stateIDs = [];
|
||||
const stateIDs2 = [];
|
||||
const newCountry = createCountry(pkType);
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
const state: any = createState(pkType);
|
||||
state.country_id = createCountry(pkType);
|
||||
stateIDs.push((await CreateItem(vendor, { collection: localCollectionStates, item: state })).id);
|
||||
|
||||
const state2: any = createState(pkType);
|
||||
state2.country_id = createCountry(pkType);
|
||||
stateIDs2.push((await CreateItem(vendor, { collection: localCollectionStates, item: state2 })).id);
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.patch(`/items/${localCollectionStates}`)
|
||||
.send({ keys: stateIDs, data: { country_id: newCountry } })
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `update_${localCollectionStates}_items`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
ids: stateIDs2,
|
||||
data: { country_id: newCountry },
|
||||
},
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.length).toBe(count);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.data[mutationKey].length).toEqual(count);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
|
||||
describe('errors when above limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION);
|
||||
const stateIDs = [];
|
||||
const stateIDs2 = [];
|
||||
const newCountry = createCountry(pkType);
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
const state: any = createState(pkType);
|
||||
state.country_id = createCountry(pkType);
|
||||
stateIDs.push((await CreateItem(vendor, { collection: localCollectionStates, item: state })).id);
|
||||
|
||||
const state2: any = createState(pkType);
|
||||
state2.country_id = createCountry(pkType);
|
||||
stateIDs2.push((await CreateItem(vendor, { collection: localCollectionStates, item: state2 })).id);
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.patch(`/items/${localCollectionStates}`)
|
||||
.send({ keys: stateIDs, data: { country_id: newCountry } })
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `update_${localCollectionStates}_items`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
ids: stateIDs2,
|
||||
data: { country_id: newCountry },
|
||||
},
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.errors).toBeDefined();
|
||||
expect(gqlResponse.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
CheckQueryFilters(
|
||||
{
|
||||
method: 'get',
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import request from 'supertest';
|
||||
import { getUrl } from '@common/config';
|
||||
import config, { getUrl } from '@common/config';
|
||||
import vendors from '@common/get-dbs-to-test';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { CreateItem } from '@common/functions';
|
||||
@@ -1149,5 +1149,624 @@ describe.each(common.PRIMARY_KEY_TYPES)('/items', (pkType) => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('MAX_BATCH_MUTATION Tests', () => {
|
||||
describe('createMany', () => {
|
||||
describe('passes when below limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION);
|
||||
const artists = [];
|
||||
const artists2 = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
artists.push(createArtist(pkType));
|
||||
artists2.push(createArtist(pkType));
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.post(`/items/${localCollectionArtists}`)
|
||||
.send(artists)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `create_${localCollectionArtists}_items`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
data: artists2,
|
||||
},
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.length).toBe(count);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.data[mutationKey].length).toEqual(count);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
|
||||
describe('errors when above limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) + 1;
|
||||
const artists = [];
|
||||
const artists2 = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
artists.push(createArtist(pkType));
|
||||
artists2.push(createArtist(pkType));
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.post(`/items/${localCollectionArtists}`)
|
||||
.send(artists)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `create_${localCollectionArtists}_items`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
data: artists2,
|
||||
},
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.errors).toBeDefined();
|
||||
expect(gqlResponse.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateBatch', () => {
|
||||
describe('passes when below limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION);
|
||||
const artists = [];
|
||||
const artists2 = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
artists.push(
|
||||
await CreateItem(vendor, { collection: localCollectionArtists, item: createArtist(pkType) })
|
||||
);
|
||||
artists2.push(
|
||||
await CreateItem(vendor, { collection: localCollectionArtists, item: createArtist(pkType) })
|
||||
);
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.patch(`/items/${localCollectionArtists}`)
|
||||
.send(artists)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `update_${localCollectionArtists}_batch`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
data: artists2,
|
||||
},
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.length).toBe(count);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.data[mutationKey].length).toEqual(count);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
|
||||
describe('errors when above limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) + 1;
|
||||
const artists = [];
|
||||
const artists2 = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
artists.push(
|
||||
await CreateItem(vendor, { collection: localCollectionArtists, item: createArtist(pkType) })
|
||||
);
|
||||
artists2.push(
|
||||
await CreateItem(vendor, { collection: localCollectionArtists, item: createArtist(pkType) })
|
||||
);
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.patch(`/items/${localCollectionArtists}`)
|
||||
.send(artists)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `update_${localCollectionArtists}_batch`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
data: artists2,
|
||||
},
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.errors).toBeDefined();
|
||||
expect(gqlResponse.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateMany', () => {
|
||||
describe('passes when below limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION);
|
||||
const artistIDs = [];
|
||||
const artistIDs2 = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
artistIDs.push(
|
||||
(await CreateItem(vendor, { collection: localCollectionArtists, item: createArtist(pkType) })).id
|
||||
);
|
||||
artistIDs2.push(
|
||||
(await CreateItem(vendor, { collection: localCollectionArtists, item: createArtist(pkType) })).id
|
||||
);
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.patch(`/items/${localCollectionArtists}`)
|
||||
.send({ keys: artistIDs, data: { name: 'updated' } })
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `update_${localCollectionArtists}_items`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
ids: artistIDs2,
|
||||
data: { name: 'updated' },
|
||||
},
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.length).toBe(count);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.data[mutationKey].length).toEqual(count);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
|
||||
describe('errors when above limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) + 1;
|
||||
const artistIDs = [];
|
||||
const artistIDs2 = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
artistIDs.push(
|
||||
(await CreateItem(vendor, { collection: localCollectionArtists, item: createArtist(pkType) })).id
|
||||
);
|
||||
artistIDs2.push(
|
||||
(await CreateItem(vendor, { collection: localCollectionArtists, item: createArtist(pkType) })).id
|
||||
);
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.patch(`/items/${localCollectionArtists}`)
|
||||
.send({ keys: artistIDs, data: { name: 'updated' } })
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `update_${localCollectionArtists}_items`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
ids: artistIDs2,
|
||||
data: { name: 'updated' },
|
||||
},
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.errors).toBeDefined();
|
||||
expect(gqlResponse.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateByQuery', () => {
|
||||
describe('passes when below limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION);
|
||||
const company = uuid();
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
const artist = createArtist(pkType);
|
||||
artist.company = company;
|
||||
await CreateItem(vendor, { collection: localCollectionArtists, item: artist });
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.patch(`/items/${localCollectionArtists}`)
|
||||
.send({
|
||||
query: {
|
||||
filter: JSON.stringify({ company: { _eq: company } }),
|
||||
limit: -1,
|
||||
},
|
||||
data: { name: 'updated' },
|
||||
})
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.length).toBe(count);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
|
||||
describe('errors when above limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) + 1;
|
||||
const company = uuid();
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
const artist = createArtist(pkType);
|
||||
artist.company = company;
|
||||
await CreateItem(vendor, { collection: localCollectionArtists, item: artist });
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.patch(`/items/${localCollectionArtists}`)
|
||||
.send({
|
||||
query: {
|
||||
filter: JSON.stringify({ company: { _eq: company } }),
|
||||
limit: -1,
|
||||
},
|
||||
data: { name: 'updated' },
|
||||
})
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteMany', () => {
|
||||
describe('passes when below limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION);
|
||||
const artistIDs = [];
|
||||
const artistIDs2 = [];
|
||||
const artistIDs3 = [];
|
||||
const artistIDs4 = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
artistIDs.push(
|
||||
(await CreateItem(vendor, { collection: localCollectionArtists, item: createArtist(pkType) })).id
|
||||
);
|
||||
artistIDs2.push(
|
||||
(await CreateItem(vendor, { collection: localCollectionArtists, item: createArtist(pkType) })).id
|
||||
);
|
||||
artistIDs3.push(
|
||||
(await CreateItem(vendor, { collection: localCollectionArtists, item: createArtist(pkType) })).id
|
||||
);
|
||||
artistIDs4.push(
|
||||
(await CreateItem(vendor, { collection: localCollectionArtists, item: createArtist(pkType) })).id
|
||||
);
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.delete(`/items/${localCollectionArtists}`)
|
||||
.send({ keys: artistIDs })
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const response2 = await request(getUrl(vendor))
|
||||
.delete(`/items/${localCollectionArtists}`)
|
||||
.send({ keys: artistIDs2 })
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `delete_${localCollectionArtists}_items`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
ids: artistIDs3,
|
||||
},
|
||||
ids: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const gqlResponse2 = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
ids: artistIDs4,
|
||||
},
|
||||
ids: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(204);
|
||||
|
||||
expect(response2.statusCode).toBe(204);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.data[mutationKey].ids.length).toEqual(count);
|
||||
|
||||
expect(gqlResponse2.statusCode).toBe(200);
|
||||
expect(gqlResponse2.body.data[mutationKey].ids.length).toEqual(count);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
|
||||
describe('errors when above limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) + 1;
|
||||
const artistIDs = [];
|
||||
const artistIDs2 = [];
|
||||
const artistIDs3 = [];
|
||||
const artistIDs4 = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
artistIDs.push(
|
||||
(await CreateItem(vendor, { collection: localCollectionArtists, item: createArtist(pkType) })).id
|
||||
);
|
||||
artistIDs2.push(
|
||||
(await CreateItem(vendor, { collection: localCollectionArtists, item: createArtist(pkType) })).id
|
||||
);
|
||||
artistIDs3.push(
|
||||
(await CreateItem(vendor, { collection: localCollectionArtists, item: createArtist(pkType) })).id
|
||||
);
|
||||
artistIDs4.push(
|
||||
(await CreateItem(vendor, { collection: localCollectionArtists, item: createArtist(pkType) })).id
|
||||
);
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.delete(`/items/${localCollectionArtists}`)
|
||||
.send({ keys: artistIDs })
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const response2 = await request(getUrl(vendor))
|
||||
.delete(`/items/${localCollectionArtists}`)
|
||||
.send({ keys: artistIDs2 })
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `delete_${localCollectionArtists}_items`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
ids: artistIDs3,
|
||||
},
|
||||
ids: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const gqlResponse2 = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
ids: artistIDs4,
|
||||
},
|
||||
ids: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
|
||||
expect(response2.statusCode).toBe(400);
|
||||
expect(response2.body.errors).toBeDefined();
|
||||
expect(response2.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.errors).toBeDefined();
|
||||
expect(gqlResponse.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
|
||||
expect(gqlResponse2.statusCode).toBe(200);
|
||||
expect(gqlResponse2.body.errors).toBeDefined();
|
||||
expect(gqlResponse2.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteByQuery', () => {
|
||||
describe('passes when below limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION);
|
||||
const company = uuid();
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
const artist = createArtist(pkType);
|
||||
artist.company = company;
|
||||
await CreateItem(vendor, { collection: localCollectionArtists, item: artist });
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.delete(`/items/${localCollectionArtists}`)
|
||||
.send({
|
||||
query: {
|
||||
filter: JSON.stringify({ company: { _eq: company } }),
|
||||
limit: -1,
|
||||
},
|
||||
})
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(204);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
|
||||
describe('errors when above limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) + 1;
|
||||
const company = uuid();
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
const artist = createArtist(pkType);
|
||||
artist.company = company;
|
||||
await CreateItem(vendor, { collection: localCollectionArtists, item: artist });
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.delete(`/items/${localCollectionArtists}`)
|
||||
.send({
|
||||
query: {
|
||||
filter: JSON.stringify({ company: { _eq: company } }),
|
||||
limit: -1,
|
||||
},
|
||||
})
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import request from 'supertest';
|
||||
import { getUrl } from '@common/config';
|
||||
import config, { getUrl } from '@common/config';
|
||||
import vendors from '@common/get-dbs-to-test';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { CreateItem } from '@common/functions';
|
||||
import { CreateItem, ReadItem } from '@common/functions';
|
||||
import { CachedTestsSchema, TestsSchemaVendorValues } from '@query/filter';
|
||||
import * as common from '@common/index';
|
||||
import {
|
||||
@@ -1845,5 +1845,443 @@ describe.each(common.PRIMARY_KEY_TYPES)('/items', (pkType) => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('MAX_BATCH_MUTATION Tests', () => {
|
||||
describe('createOne', () => {
|
||||
describe('passes when below limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// TODO: Fix Oracle exceeded directus_revisions limit of 4000
|
||||
if (vendor === 'oracle') {
|
||||
expect(true).toBe(true);
|
||||
return;
|
||||
}
|
||||
|
||||
// Setup
|
||||
const countNested = Number(config.envs[vendor].MAX_BATCH_MUTATION) - 1;
|
||||
const country: any = createCountry(pkType);
|
||||
const country2: any = createCountry(pkType);
|
||||
|
||||
country.states = Array(countNested)
|
||||
.fill(0)
|
||||
.map(() => createState(pkType));
|
||||
|
||||
country2.states = Array(countNested)
|
||||
.fill(0)
|
||||
.map(() => createState(pkType));
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.post(`/items/${localCollectionCountries}`)
|
||||
.send(country)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `create_${localCollectionCountries}_item`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
data: country2,
|
||||
},
|
||||
id: true,
|
||||
states: {
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.states.length).toBe(countNested);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.data[mutationKey].states.length).toEqual(countNested);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
|
||||
describe('errors when above limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// TODO: Fix Oracle ORA-01086 savepoint never established in this session or is invalid
|
||||
if (vendor === 'oracle') {
|
||||
expect(true).toBe(true);
|
||||
return;
|
||||
}
|
||||
|
||||
// Setup
|
||||
const countNested = Number(config.envs[vendor].MAX_BATCH_MUTATION);
|
||||
const country: any = createCountry(pkType);
|
||||
const country2: any = createCountry(pkType);
|
||||
|
||||
country.states = Array(countNested)
|
||||
.fill(0)
|
||||
.map(() => createState(pkType));
|
||||
|
||||
country2.states = Array(countNested)
|
||||
.fill(0)
|
||||
.map(() => createState(pkType));
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.post(`/items/${localCollectionCountries}`)
|
||||
.send(country)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `create_${localCollectionCountries}_item`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
data: country2,
|
||||
},
|
||||
id: true,
|
||||
states: {
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.errors).toBeDefined();
|
||||
expect(gqlResponse.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('createMany', () => {
|
||||
describe('passes when below limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) / 10;
|
||||
const countNested = 9;
|
||||
const countries: any[] = [];
|
||||
const countries2: any[] = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
countries.push(createCountry(pkType));
|
||||
countries[i].states = Array(countNested)
|
||||
.fill(0)
|
||||
.map(() => createState(pkType));
|
||||
|
||||
countries2.push(createCountry(pkType));
|
||||
countries2[i].states = Array(countNested)
|
||||
.fill(0)
|
||||
.map(() => createState(pkType));
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.post(`/items/${localCollectionCountries}`)
|
||||
.send(countries)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `create_${localCollectionCountries}_items`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
data: countries2,
|
||||
},
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.length).toBe(count);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.data[mutationKey].length).toEqual(count);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
|
||||
describe('errors when above limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// TODO: Fix Oracle ORA-01086 savepoint never established in this session or is invalid
|
||||
if (vendor === 'oracle') {
|
||||
expect(true).toBe(true);
|
||||
return;
|
||||
}
|
||||
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) / 10;
|
||||
const countNested = 10;
|
||||
const countries: any[] = [];
|
||||
const countries2: any[] = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
countries.push(createCountry(pkType));
|
||||
countries[i].states = Array(countNested)
|
||||
.fill(0)
|
||||
.map(() => createState(pkType));
|
||||
|
||||
countries2.push(createCountry(pkType));
|
||||
countries2[i].states = Array(countNested)
|
||||
.fill(0)
|
||||
.map(() => createState(pkType));
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.post(`/items/${localCollectionCountries}`)
|
||||
.send(countries)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `create_${localCollectionCountries}_items`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
data: countries2,
|
||||
},
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.errors).toBeDefined();
|
||||
expect(gqlResponse.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateBatch', () => {
|
||||
describe('passes when below limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) / 10;
|
||||
const countCreate = 4;
|
||||
const countUpdate = 3;
|
||||
const countDelete = 2;
|
||||
const countriesID = [];
|
||||
const countriesID2 = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
const country: any = createCountry(pkType);
|
||||
country.states = Array(countUpdate + countDelete)
|
||||
.fill(0)
|
||||
.map(() => createState(pkType));
|
||||
countriesID.push(
|
||||
(await CreateItem(vendor, { collection: localCollectionCountries, item: country })).id
|
||||
);
|
||||
|
||||
const country2: any = createCountry(pkType);
|
||||
country2.states = Array(countUpdate + countDelete)
|
||||
.fill(0)
|
||||
.map(() => createState(pkType));
|
||||
countriesID2.push(
|
||||
(await CreateItem(vendor, { collection: localCollectionCountries, item: country2 })).id
|
||||
);
|
||||
}
|
||||
|
||||
const countries = await ReadItem(vendor, {
|
||||
collection: localCollectionCountries,
|
||||
fields: ['*', 'states.id', 'states.name'],
|
||||
filter: { id: { _in: countriesID } },
|
||||
});
|
||||
|
||||
const countries2 = await ReadItem(vendor, {
|
||||
collection: localCollectionCountries,
|
||||
fields: ['*', 'states.id', 'states.name'],
|
||||
filter: { id: { _in: countriesID2 } },
|
||||
});
|
||||
|
||||
for (const country of countries) {
|
||||
const states = country.states;
|
||||
country.states = {
|
||||
create: Array(countCreate)
|
||||
.fill(0)
|
||||
.map(() => createState(pkType)),
|
||||
update: states.slice(0, countUpdate),
|
||||
delete: states.slice(-countDelete).map((state: State) => state.id),
|
||||
};
|
||||
}
|
||||
|
||||
for (const country of countries2) {
|
||||
country.states = [
|
||||
...country.states,
|
||||
...Array(countCreate)
|
||||
.fill(0)
|
||||
.map(() => createState(pkType)),
|
||||
];
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.patch(`/items/${localCollectionCountries}`)
|
||||
.send(countries)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `update_${localCollectionCountries}_batch`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
data: countries2,
|
||||
},
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.length).toBe(count);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.data[mutationKey].length).toEqual(count);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
|
||||
describe('errors when above limit', () => {
|
||||
it.each(vendors)(
|
||||
'%s',
|
||||
async (vendor) => {
|
||||
// TODO: Fix Oracle ORA-01086 savepoint never established in this session or is invalid
|
||||
if (vendor === 'oracle') {
|
||||
expect(true).toBe(true);
|
||||
return;
|
||||
}
|
||||
|
||||
// Setup
|
||||
const count = Number(config.envs[vendor].MAX_BATCH_MUTATION) / 10;
|
||||
const countCreate = 4;
|
||||
const countUpdate = 3;
|
||||
const countDelete = 3;
|
||||
const countriesID = [];
|
||||
const countriesID2 = [];
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
const country: any = createCountry(pkType);
|
||||
country.states = Array(countUpdate + countDelete)
|
||||
.fill(0)
|
||||
.map(() => createState(pkType));
|
||||
countriesID.push(
|
||||
(await CreateItem(vendor, { collection: localCollectionCountries, item: country })).id
|
||||
);
|
||||
|
||||
const country2: any = createCountry(pkType);
|
||||
country2.states = Array(countUpdate + countDelete)
|
||||
.fill(0)
|
||||
.map(() => createState(pkType));
|
||||
countriesID2.push(
|
||||
(await CreateItem(vendor, { collection: localCollectionCountries, item: country2 })).id
|
||||
);
|
||||
}
|
||||
|
||||
const countries = await ReadItem(vendor, {
|
||||
collection: localCollectionCountries,
|
||||
fields: ['*', 'states.id', 'states.name'],
|
||||
filter: { id: { _in: countriesID } },
|
||||
});
|
||||
|
||||
const countries2 = await ReadItem(vendor, {
|
||||
collection: localCollectionCountries,
|
||||
fields: ['*', 'states.id', 'states.name'],
|
||||
filter: { id: { _in: countriesID2 } },
|
||||
});
|
||||
|
||||
for (const country of countries) {
|
||||
const states = country.states;
|
||||
country.states = {
|
||||
create: Array(countCreate)
|
||||
.fill(0)
|
||||
.map(() => createState(pkType)),
|
||||
update: states.slice(0, countUpdate),
|
||||
delete: states.slice(-countDelete).map((state: State) => state.id),
|
||||
};
|
||||
}
|
||||
|
||||
for (const country of countries2) {
|
||||
country.states = [
|
||||
...country.states,
|
||||
...Array(countCreate)
|
||||
.fill(0)
|
||||
.map(() => createState(pkType)),
|
||||
];
|
||||
}
|
||||
|
||||
// Action
|
||||
const response = await request(getUrl(vendor))
|
||||
.patch(`/items/${localCollectionCountries}`)
|
||||
.send(countries)
|
||||
.set('Authorization', `Bearer ${common.USER.ADMIN.TOKEN}`);
|
||||
|
||||
const mutationKey = `update_${localCollectionCountries}_batch`;
|
||||
|
||||
const gqlResponse = await requestGraphQL(getUrl(vendor), false, common.USER.ADMIN.TOKEN, {
|
||||
mutation: {
|
||||
[mutationKey]: {
|
||||
__args: {
|
||||
data: countries2,
|
||||
},
|
||||
id: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.statusCode).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
|
||||
expect(gqlResponse.statusCode).toBe(200);
|
||||
expect(gqlResponse.body.errors).toBeDefined();
|
||||
expect(gqlResponse.body.errors[0].message).toBe(
|
||||
`Exceeded max batch mutation limit of ${config.envs[vendor].MAX_BATCH_MUTATION}.`
|
||||
);
|
||||
},
|
||||
120000
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -180,8 +180,8 @@ export const seedM2MAliasAllFieldTypesValues = async (
|
||||
otherPossibleKeys: any[]
|
||||
) => {
|
||||
try {
|
||||
const collectionItems = await ReadItem(vendor, { collection: collection, fields: '*' });
|
||||
const otherCollectionItems = await ReadItem(vendor, { collection: otherCollection, fields: '*' });
|
||||
const collectionItems = await ReadItem(vendor, { collection: collection, fields: ['*'] });
|
||||
const otherCollectionItems = await ReadItem(vendor, { collection: otherCollection, fields: ['*'] });
|
||||
const newCollectionKeys = collectionItems.map((i: any) => i.id).filter((i: any) => !possibleKeys.includes(i));
|
||||
const newOtherCollectionKeys = otherCollectionItems
|
||||
.map((i: any) => i.id)
|
||||
@@ -214,8 +214,8 @@ export const seedM2AAliasAllFieldTypesValues = async (
|
||||
otherPossibleKeys: any[]
|
||||
) => {
|
||||
try {
|
||||
const collectionItems = await ReadItem(vendor, { collection: collection, fields: 'id' });
|
||||
const otherCollectionItems = await ReadItem(vendor, { collection: relatedCollection, fields: 'id' });
|
||||
const collectionItems = await ReadItem(vendor, { collection: collection, fields: ['id'] });
|
||||
const otherCollectionItems = await ReadItem(vendor, { collection: relatedCollection, fields: ['id'] });
|
||||
const newCollectionKeys = collectionItems.map((i: any) => i.id).filter((i: any) => !possibleKeys.includes(i));
|
||||
const newOtherCollectionKeys = otherCollectionItems
|
||||
.map((i: any) => i.id)
|
||||
|
||||
Reference in New Issue
Block a user