Use prefetched schema info

This commit is contained in:
rijkvanzanten
2020-11-09 17:13:48 -05:00
parent 0697f4b4be
commit 8cbdfb409b
61 changed files with 834 additions and 458 deletions

View File

@@ -45,6 +45,7 @@ import { WebhooksService } from './services/webhooks';
import { InvalidPayloadException } from './exceptions';
import { registerExtensions } from './extensions';
import { register as registerWebhooks } from './webhooks';
import emitter from './emitter';
import fse from 'fs-extra';
@@ -149,8 +150,7 @@ export default async function createApp() {
app.use(errorHandler);
// Register all webhooks
const webhooksService = new WebhooksService();
await webhooksService.register();
await registerWebhooks();
// Register custom hooks / endpoints
await registerExtensions(customRouter);

View File

@@ -13,8 +13,14 @@ router.use(useCollection('directus_activity'));
router.get(
'/',
asyncHandler(async (req, res, next) => {
const service = new ActivityService({ accountability: req.accountability });
const metaService = new MetaService({ accountability: req.accountability });
const service = new ActivityService({
accountability: req.accountability,
schema: req.schema,
});
const metaService = new MetaService({
accountability: req.accountability,
schema: req.schema,
});
const records = await service.readByQuery(req.sanitizedQuery);
const meta = await metaService.getMetaForQuery('directus_activity', req.sanitizedQuery);
@@ -32,7 +38,10 @@ router.get(
router.get(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new ActivityService({ accountability: req.accountability });
const service = new ActivityService({
accountability: req.accountability,
schema: req.schema,
});
const record = await service.readByKey(req.params.pk, req.sanitizedQuery);
res.locals.payload = {
@@ -47,7 +56,10 @@ router.get(
router.post(
'/comment',
asyncHandler(async (req, res, next) => {
const service = new ActivityService({ accountability: req.accountability });
const service = new ActivityService({
accountability: req.accountability,
schema: req.schema,
});
const primaryKey = await service.create({
...req.body,
@@ -79,7 +91,10 @@ router.post(
router.patch(
'/comment/:pk',
asyncHandler(async (req, res, next) => {
const service = new ActivityService({ accountability: req.accountability });
const service = new ActivityService({
accountability: req.accountability,
schema: req.schema,
});
const primaryKey = await service.update(req.body, req.params.pk);
try {
@@ -104,7 +119,10 @@ router.patch(
router.delete(
'/comment/:pk',
asyncHandler(async (req, res, next) => {
const service = new ActivityService({ accountability: req.accountability });
const service = new ActivityService({
accountability: req.accountability,
schema: req.schema,
});
await service.delete(req.params.pk);
return next();

View File

@@ -47,7 +47,7 @@ router.get(
// Validate query params
asyncHandler(async (req, res, next) => {
const payloadService = new PayloadService('directus_settings');
const payloadService = new PayloadService('directus_settings', { schema: req.schema });
const defaults = { storage_asset_presets: [], storage_asset_transform: 'all' };
let savedAssetSettings = await database
@@ -107,7 +107,10 @@ router.get(
// Return file
asyncHandler(async (req, res) => {
const service = new AssetsService({ accountability: req.accountability });
const service = new AssetsService({
accountability: req.accountability,
schema: req.schema,
});
const transformation: Transformation = res.locals.transformation.key
? res.locals.shortcuts.find(
(transformation: Transformation) =>

View File

@@ -34,6 +34,7 @@ router.post(
const authenticationService = new AuthenticationService({
accountability: accountability,
schema: req.schema,
});
const { error } = loginSchema.validate(req.body);
@@ -90,6 +91,7 @@ router.post(
const authenticationService = new AuthenticationService({
accountability: accountability,
schema: req.schema,
});
const currentRefreshToken = req.body.refresh_token || req.cookies.directus_refresh_token;
@@ -142,6 +144,7 @@ router.post(
const authenticationService = new AuthenticationService({
accountability: accountability,
schema: req.schema,
});
const currentRefreshToken = req.body.refresh_token || req.cookies.directus_refresh_token;
@@ -171,7 +174,7 @@ router.post(
role: null,
};
const service = new UsersService({ accountability });
const service = new UsersService({ accountability, schema: req.schema });
try {
await service.requestPasswordReset(req.body.email);
@@ -202,7 +205,7 @@ router.post(
role: null,
};
const service = new UsersService({ accountability });
const service = new UsersService({ accountability, schema: req.schema });
await service.resetPassword(req.body.token, req.body.password);
return next();
}),
@@ -260,6 +263,7 @@ router.get(
const authenticationService = new AuthenticationService({
accountability: accountability,
schema: req.schema,
});
const email = getEmailFromProfile(

View File

@@ -9,7 +9,10 @@ const router = Router();
router.post(
'/',
asyncHandler(async (req, res, next) => {
const collectionsService = new CollectionsService({ accountability: req.accountability });
const collectionsService = new CollectionsService({
accountability: req.accountability,
schema: req.schema,
});
const collectionKey = await collectionsService.create(req.body);
const record = await collectionsService.readByKey(collectionKey);
@@ -23,8 +26,14 @@ router.post(
router.get(
'/',
asyncHandler(async (req, res, next) => {
const collectionsService = new CollectionsService({ accountability: req.accountability });
const metaService = new MetaService({ accountability: req.accountability });
const collectionsService = new CollectionsService({
accountability: req.accountability,
schema: req.schema,
});
const metaService = new MetaService({
accountability: req.accountability,
schema: req.schema,
});
const collections = await collectionsService.readByQuery();
const meta = await metaService.getMetaForQuery('directus_collections', {});
@@ -38,7 +47,10 @@ router.get(
router.get(
'/:collection',
asyncHandler(async (req, res, next) => {
const collectionsService = new CollectionsService({ accountability: req.accountability });
const collectionsService = new CollectionsService({
accountability: req.accountability,
schema: req.schema,
});
const collectionKey = req.params.collection.includes(',')
? req.params.collection.split(',')
: req.params.collection;
@@ -62,7 +74,10 @@ router.get(
router.patch(
'/:collection',
asyncHandler(async (req, res, next) => {
const collectionsService = new CollectionsService({ accountability: req.accountability });
const collectionsService = new CollectionsService({
accountability: req.accountability,
schema: req.schema,
});
const collectionKey = req.params.collection.includes(',')
? req.params.collection.split(',')
: req.params.collection;
@@ -91,7 +106,10 @@ router.delete(
throw new InvalidPayloadException(`Body has to be an array of primary keys`);
}
const collectionsService = new CollectionsService({ accountability: req.accountability });
const collectionsService = new CollectionsService({
accountability: req.accountability,
schema: req.schema,
});
await collectionsService.delete(req.body as string[]);
return next();
@@ -102,7 +120,10 @@ router.delete(
router.delete(
'/:collection',
asyncHandler(async (req, res, next) => {
const collectionsService = new CollectionsService({ accountability: req.accountability });
const collectionsService = new CollectionsService({
accountability: req.accountability,
schema: req.schema,
});
const collectionKey = req.params.collection.includes(',')
? req.params.collection.split(',')
: req.params.collection;

View File

@@ -2,7 +2,6 @@ import { Router } from 'express';
import asyncHandler from 'express-async-handler';
import { FieldsService } from '../services/fields';
import validateCollection from '../middleware/collection-exists';
import { schemaInspector } from '../database';
import { InvalidPayloadException, ForbiddenException } from '../exceptions';
import Joi from 'joi';
import { types, Field } from '../types';
@@ -16,7 +15,10 @@ router.use(useCollection('directus_fields'));
router.get(
'/',
asyncHandler(async (req, res, next) => {
const service = new FieldsService({ accountability: req.accountability });
const service = new FieldsService({
accountability: req.accountability,
schema: req.schema,
});
const fields = await service.readAll();
res.locals.payload = { data: fields || null };
@@ -29,7 +31,10 @@ router.get(
'/:collection',
validateCollection,
asyncHandler(async (req, res, next) => {
const service = new FieldsService({ accountability: req.accountability });
const service = new FieldsService({
accountability: req.accountability,
schema: req.schema,
});
const fields = await service.readAll(req.params.collection);
res.locals.payload = { data: fields || null };
@@ -42,10 +47,13 @@ router.get(
'/:collection/:field',
validateCollection,
asyncHandler(async (req, res, next) => {
const service = new FieldsService({ accountability: req.accountability });
const service = new FieldsService({
accountability: req.accountability,
schema: req.schema,
});
const exists = await schemaInspector.hasColumn(req.params.collection, req.params.field);
if (exists === false) throw new ForbiddenException();
if (req.params.field in req.schema[req.params.collection].columns === false)
throw new ForbiddenException();
const field = await service.readOne(req.params.collection, req.params.field);
@@ -75,7 +83,10 @@ router.post(
if (!req.body.schema && !req.body.meta)
throw new InvalidPayloadException(`"schema" or "meta" is required`);
const service = new FieldsService({ accountability: req.accountability });
const service = new FieldsService({
accountability: req.accountability,
schema: req.schema,
});
const { error } = newFieldSchema.validate(req.body);
@@ -107,7 +118,10 @@ router.patch(
'/:collection',
validateCollection,
asyncHandler(async (req, res, next) => {
const service = new FieldsService({ accountability: req.accountability });
const service = new FieldsService({
accountability: req.accountability,
schema: req.schema,
});
if (Array.isArray(req.body) === false) {
throw new InvalidPayloadException('Submitted body has to be an array.');
@@ -142,7 +156,10 @@ router.patch(
validateCollection,
// @todo: validate field
asyncHandler(async (req, res, next) => {
const service = new FieldsService({ accountability: req.accountability });
const service = new FieldsService({
accountability: req.accountability,
schema: req.schema,
});
const fieldData: Partial<Field> & { field: string; type: typeof types[number] } = req.body;
if (!fieldData.field) fieldData.field = req.params.field;
@@ -169,7 +186,10 @@ router.delete(
'/:collection/:field',
validateCollection,
asyncHandler(async (req, res, next) => {
const service = new FieldsService({ accountability: req.accountability });
const service = new FieldsService({
accountability: req.accountability,
schema: req.schema,
});
await service.deleteField(req.params.collection, req.params.field);
return next();
}),

View File

@@ -23,7 +23,7 @@ const multipartHandler = asyncHandler(async (req, res, next) => {
const busboy = new Busboy({ headers: req.headers });
const savedFiles: PrimaryKey[] = [];
const service = new FilesService({ accountability: req.accountability });
const service = new FilesService({ accountability: req.accountability, schema: req.schema });
const existingPrimaryKey = req.params.pk || undefined;
@@ -102,7 +102,10 @@ router.post(
'/',
multipartHandler,
asyncHandler(async (req, res, next) => {
const service = new FilesService({ accountability: req.accountability });
const service = new FilesService({
accountability: req.accountability,
schema: req.schema,
});
let keys: PrimaryKey | PrimaryKey[] = [];
if (req.is('multipart/form-data')) {
@@ -145,7 +148,10 @@ router.post(
throw new InvalidPayloadException(error.message);
}
const service = new FilesService({ accountability: req.accountability });
const service = new FilesService({
accountability: req.accountability,
schema: req.schema,
});
const fileResponse = await axios.get<NodeJS.ReadableStream>(req.body.url, {
responseType: 'stream',
@@ -183,8 +189,14 @@ router.post(
router.get(
'/',
asyncHandler(async (req, res, next) => {
const service = new FilesService({ accountability: req.accountability });
const metaService = new MetaService({ accountability: req.accountability });
const service = new FilesService({
accountability: req.accountability,
schema: req.schema,
});
const metaService = new MetaService({
accountability: req.accountability,
schema: req.schema,
});
const records = await service.readByQuery(req.sanitizedQuery);
const meta = await metaService.getMetaForQuery('directus_files', req.sanitizedQuery);
@@ -199,7 +211,10 @@ router.get(
'/:pk',
asyncHandler(async (req, res, next) => {
const keys = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
const service = new FilesService({ accountability: req.accountability });
const service = new FilesService({
accountability: req.accountability,
schema: req.schema,
});
const record = await service.readByKey(keys as any, req.sanitizedQuery);
res.locals.payload = { data: record || null };
return next();
@@ -211,7 +226,10 @@ router.patch(
'/:pk',
multipartHandler,
asyncHandler(async (req, res, next) => {
const service = new FilesService({ accountability: req.accountability });
const service = new FilesService({
accountability: req.accountability,
schema: req.schema,
});
let keys: PrimaryKey | PrimaryKey[] = [];
if (req.is('multipart/form-data')) {
@@ -244,7 +262,10 @@ router.delete(
throw new InvalidPayloadException(`Body has to be an array of primary keys`);
}
const service = new FilesService({ accountability: req.accountability });
const service = new FilesService({
accountability: req.accountability,
schema: req.schema,
});
await service.delete(req.body as PrimaryKey[]);
return next();
}),
@@ -255,7 +276,10 @@ router.delete(
'/:pk',
asyncHandler(async (req, res, next) => {
const keys = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
const service = new FilesService({ accountability: req.accountability });
const service = new FilesService({
accountability: req.accountability,
schema: req.schema,
});
await service.delete(keys as any);
return next();
}),

View File

@@ -13,7 +13,10 @@ router.use(useCollection('directus_folders'));
router.post(
'/',
asyncHandler(async (req, res, next) => {
const service = new FoldersService({ accountability: req.accountability });
const service = new FoldersService({
accountability: req.accountability,
schema: req.schema,
});
const primaryKey = await service.create(req.body);
try {
@@ -35,8 +38,14 @@ router.post(
router.get(
'/',
asyncHandler(async (req, res, next) => {
const service = new FoldersService({ accountability: req.accountability });
const metaService = new MetaService({ accountability: req.accountability });
const service = new FoldersService({
accountability: req.accountability,
schema: req.schema,
});
const metaService = new MetaService({
accountability: req.accountability,
schema: req.schema,
});
const records = await service.readByQuery(req.sanitizedQuery);
const meta = await metaService.getMetaForQuery('directus_files', req.sanitizedQuery);
@@ -50,7 +59,10 @@ router.get(
router.get(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new FoldersService({ accountability: req.accountability });
const service = new FoldersService({
accountability: req.accountability,
schema: req.schema,
});
const primaryKey = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
const record = await service.readByKey(primaryKey as any, req.sanitizedQuery);
@@ -63,7 +75,10 @@ router.get(
router.patch(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new FoldersService({ accountability: req.accountability });
const service = new FoldersService({
accountability: req.accountability,
schema: req.schema,
});
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
const primaryKey = await service.update(req.body, pk as any);
@@ -90,7 +105,10 @@ router.delete(
throw new InvalidPayloadException(`Body has to be an array of primary keys`);
}
const service = new FoldersService({ accountability: req.accountability });
const service = new FoldersService({
accountability: req.accountability,
schema: req.schema,
});
await service.delete(req.body as PrimaryKey[]);
return next();
}),
@@ -100,7 +118,10 @@ router.delete(
router.delete(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new FoldersService({ accountability: req.accountability });
const service = new FoldersService({
accountability: req.accountability,
schema: req.schema,
});
const primaryKey = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
await service.delete(primaryKey as any);
return next();

View File

@@ -5,12 +5,16 @@ import asyncHandler from 'express-async-handler';
const router = Router();
router.use(asyncHandler(async (req, res) => {
const service = new GraphQLService({ accountability: req.accountability });
const schema = await service.getSchema();
router.use(
asyncHandler(async (req, res) => {
const service = new GraphQLService({
accountability: req.accountability,
schema: req.schema,
});
const schema = await service.getSchema();
graphqlHTTP({ schema, graphiql: true })(req, res);
}));
graphqlHTTP({ schema, graphiql: true })(req, res);
})
);
export default router;

View File

@@ -22,7 +22,10 @@ router.post(
throw new RouteNotFoundException(req.path);
}
const service = new ItemsService(req.collection, { accountability: req.accountability });
const service = new ItemsService(req.collection, {
accountability: req.accountability,
schema: req.schema,
});
const primaryKey = await service.create(req.body);
try {
@@ -45,8 +48,14 @@ router.get(
'/:collection',
collectionExists,
asyncHandler(async (req, res, next) => {
const service = new ItemsService(req.collection, { accountability: req.accountability });
const metaService = new MetaService({ accountability: req.accountability });
const service = new ItemsService(req.collection, {
accountability: req.accountability,
schema: req.schema,
});
const metaService = new MetaService({
accountability: req.accountability,
schema: req.schema,
});
const records = req.singleton
? await service.readSingleton(req.sanitizedQuery)
@@ -71,7 +80,10 @@ router.get(
throw new RouteNotFoundException(req.path);
}
const service = new ItemsService(req.collection, { accountability: req.accountability });
const service = new ItemsService(req.collection, {
accountability: req.accountability,
schema: req.schema,
});
const primaryKey = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
const result = await service.readByKey(primaryKey as any, req.sanitizedQuery);
@@ -87,7 +99,10 @@ router.patch(
'/:collection',
collectionExists,
asyncHandler(async (req, res, next) => {
const service = new ItemsService(req.collection, { accountability: req.accountability });
const service = new ItemsService(req.collection, {
accountability: req.accountability,
schema: req.schema,
});
if (req.singleton === true) {
await service.upsertSingleton(req.body);
@@ -151,7 +166,10 @@ router.patch(
throw new RouteNotFoundException(req.path);
}
const service = new ItemsService(req.collection, { accountability: req.accountability });
const service = new ItemsService(req.collection, {
accountability: req.accountability,
schema: req.schema,
});
const primaryKey = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
const updatedPrimaryKey = await service.update(req.body, primaryKey as any);
@@ -180,7 +198,10 @@ router.delete(
throw new InvalidPayloadException(`Body has to be an array of primary keys`);
}
const service = new ItemsService(req.collection, { accountability: req.accountability });
const service = new ItemsService(req.collection, {
accountability: req.accountability,
schema: req.schema,
});
await service.delete(req.body as PrimaryKey[]);
return next();
}),
@@ -191,7 +212,10 @@ router.delete(
'/:collection/:pk',
collectionExists,
asyncHandler(async (req, res, next) => {
const service = new ItemsService(req.collection, { accountability: req.accountability });
const service = new ItemsService(req.collection, {
accountability: req.accountability,
schema: req.schema,
});
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
await service.delete(pk as any);
return next();

View File

@@ -18,7 +18,10 @@ router.use(useCollection('directus_permissions'));
router.post(
'/',
asyncHandler(async (req, res, next) => {
const service = new PermissionsService({ accountability: req.accountability });
const service = new PermissionsService({
accountability: req.accountability,
schema: req.schema,
});
const primaryKey = await service.create(req.body);
try {
@@ -39,8 +42,14 @@ router.post(
router.get(
'/',
asyncHandler(async (req, res, next) => {
const service = new PermissionsService({ accountability: req.accountability });
const metaService = new MetaService({ accountability: req.accountability });
const service = new PermissionsService({
accountability: req.accountability,
schema: req.schema,
});
const metaService = new MetaService({
accountability: req.accountability,
schema: req.schema,
});
const item = await service.readByQuery(req.sanitizedQuery);
const meta = await metaService.getMetaForQuery('directus_permissions', req.sanitizedQuery);
@@ -58,7 +67,7 @@ router.get(
throw new InvalidCredentialsException();
}
const service = new PermissionsService();
const service = new PermissionsService({ schema: req.schema });
const query = clone(req.sanitizedQuery || {});
query.filter = {
@@ -80,7 +89,10 @@ router.get(
'/:pk',
asyncHandler(async (req, res, next) => {
if (req.path.endsWith('me')) return next();
const service = new PermissionsService({ accountability: req.accountability });
const service = new PermissionsService({
accountability: req.accountability,
schema: req.schema,
});
const primaryKey = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
const record = await service.readByKey(primaryKey as any, req.sanitizedQuery);
@@ -93,7 +105,10 @@ router.get(
router.patch(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new PermissionsService({ accountability: req.accountability });
const service = new PermissionsService({
accountability: req.accountability,
schema: req.schema,
});
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
const primaryKey = await service.update(req.body, pk as any);
@@ -120,7 +135,10 @@ router.delete(
throw new InvalidPayloadException(`Body has to be an array of primary keys`);
}
const service = new PermissionsService({ accountability: req.accountability });
const service = new PermissionsService({
accountability: req.accountability,
schema: req.schema,
});
await service.delete(req.body as PrimaryKey[]);
return next();
}),
@@ -130,7 +148,10 @@ router.delete(
router.delete(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new PermissionsService({ accountability: req.accountability });
const service = new PermissionsService({
accountability: req.accountability,
schema: req.schema,
});
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
await service.delete(pk as any);
return next();

View File

@@ -13,7 +13,10 @@ router.use(useCollection('directus_presets'));
router.post(
'/',
asyncHandler(async (req, res, next) => {
const service = new PresetsService({ accountability: req.accountability });
const service = new PresetsService({
accountability: req.accountability,
schema: req.schema,
});
const primaryKey = await service.create(req.body);
try {
@@ -35,8 +38,14 @@ router.post(
router.get(
'/',
asyncHandler(async (req, res, next) => {
const service = new PresetsService({ accountability: req.accountability });
const metaService = new MetaService({ accountability: req.accountability });
const service = new PresetsService({
accountability: req.accountability,
schema: req.schema,
});
const metaService = new MetaService({
accountability: req.accountability,
schema: req.schema,
});
const records = await service.readByQuery(req.sanitizedQuery);
const meta = await metaService.getMetaForQuery('directus_presets', req.sanitizedQuery);
@@ -50,7 +59,10 @@ router.get(
router.get(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new PresetsService({ accountability: req.accountability });
const service = new PresetsService({
accountability: req.accountability,
schema: req.schema,
});
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
const record = await service.readByKey(pk as any, req.sanitizedQuery);
@@ -63,7 +75,10 @@ router.get(
router.patch(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new PresetsService({ accountability: req.accountability });
const service = new PresetsService({
accountability: req.accountability,
schema: req.schema,
});
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
const primaryKey = await service.update(req.body, pk as any);
@@ -90,7 +105,10 @@ router.delete(
throw new InvalidPayloadException(`Body has to be an array of primary keys`);
}
const service = new PresetsService({ accountability: req.accountability });
const service = new PresetsService({
accountability: req.accountability,
schema: req.schema,
});
await service.delete(req.body as PrimaryKey[]);
return next();
}),
@@ -100,7 +118,10 @@ router.delete(
router.delete(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new PresetsService({ accountability: req.accountability });
const service = new PresetsService({
accountability: req.accountability,
schema: req.schema,
});
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
await service.delete(pk as any);
return next();

View File

@@ -13,7 +13,10 @@ router.use(useCollection('directus_relations'));
router.post(
'/',
asyncHandler(async (req, res, next) => {
const service = new RelationsService({ accountability: req.accountability });
const service = new RelationsService({
accountability: req.accountability,
schema: req.schema,
});
const primaryKey = await service.create(req.body);
try {
@@ -35,8 +38,14 @@ router.post(
router.get(
'/',
asyncHandler(async (req, res, next) => {
const service = new RelationsService({ accountability: req.accountability });
const metaService = new MetaService({ accountability: req.accountability });
const service = new RelationsService({
accountability: req.accountability,
schema: req.schema,
});
const metaService = new MetaService({
accountability: req.accountability,
schema: req.schema,
});
const records = await service.readByQuery(req.sanitizedQuery);
const meta = await metaService.getMetaForQuery(req.collection, req.sanitizedQuery);
@@ -50,7 +59,10 @@ router.get(
router.get(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new RelationsService({ accountability: req.accountability });
const service = new RelationsService({
accountability: req.accountability,
schema: req.schema,
});
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
const record = await service.readByKey(pk as any, req.sanitizedQuery);
res.locals.payload = { data: record || null };
@@ -62,7 +74,10 @@ router.get(
router.patch(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new RelationsService({ accountability: req.accountability });
const service = new RelationsService({
accountability: req.accountability,
schema: req.schema,
});
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
const primaryKey = await service.update(req.body, pk as any);
@@ -89,7 +104,10 @@ router.delete(
throw new InvalidPayloadException(`Body has to be an array of primary keys`);
}
const service = new RelationsService({ accountability: req.accountability });
const service = new RelationsService({
accountability: req.accountability,
schema: req.schema,
});
await service.delete(req.body as PrimaryKey[]);
return next();
}),
@@ -99,7 +117,10 @@ router.delete(
router.delete(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new RelationsService({ accountability: req.accountability });
const service = new RelationsService({
accountability: req.accountability,
schema: req.schema,
});
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
await service.delete(pk as any);
return next();

View File

@@ -11,8 +11,14 @@ router.use(useCollection('directus_revisions'));
router.get(
'/',
asyncHandler(async (req, res, next) => {
const service = new RevisionsService({ accountability: req.accountability });
const metaService = new MetaService({ accountability: req.accountability });
const service = new RevisionsService({
accountability: req.accountability,
schema: req.schema,
});
const metaService = new MetaService({
accountability: req.accountability,
schema: req.schema,
});
const records = await service.readByQuery(req.sanitizedQuery);
const meta = await metaService.getMetaForQuery('directus_revisions', req.sanitizedQuery);
@@ -26,7 +32,10 @@ router.get(
router.get(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new RevisionsService({ accountability: req.accountability });
const service = new RevisionsService({
accountability: req.accountability,
schema: req.schema,
});
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
const record = await service.readByKey(pk as any, req.sanitizedQuery);
res.locals.payload = { data: record || null };

View File

@@ -13,7 +13,10 @@ router.use(useCollection('directus_roles'));
router.post(
'/',
asyncHandler(async (req, res, next) => {
const service = new RolesService({ accountability: req.accountability });
const service = new RolesService({
accountability: req.accountability,
schema: req.schema,
});
const primaryKey = await service.create(req.body);
try {
@@ -35,8 +38,14 @@ router.post(
router.get(
'/',
asyncHandler(async (req, res, next) => {
const service = new RolesService({ accountability: req.accountability });
const metaService = new MetaService({ accountability: req.accountability });
const service = new RolesService({
accountability: req.accountability,
schema: req.schema,
});
const metaService = new MetaService({
accountability: req.accountability,
schema: req.schema,
});
const records = await service.readByQuery(req.sanitizedQuery);
const meta = await metaService.getMetaForQuery('directus_roles', req.sanitizedQuery);
@@ -50,7 +59,10 @@ router.get(
router.get(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new RolesService({ accountability: req.accountability });
const service = new RolesService({
accountability: req.accountability,
schema: req.schema,
});
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
const record = await service.readByKey(pk as any, req.sanitizedQuery);
res.locals.payload = { data: record || null };
@@ -62,7 +74,10 @@ router.get(
router.patch(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new RolesService({ accountability: req.accountability });
const service = new RolesService({
accountability: req.accountability,
schema: req.schema,
});
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
const primaryKey = await service.update(req.body, pk as any);
@@ -89,7 +104,10 @@ router.delete(
throw new InvalidPayloadException(`Body has to be an array of primary keys`);
}
const service = new RolesService({ accountability: req.accountability });
const service = new RolesService({
accountability: req.accountability,
schema: req.schema,
});
await service.delete(req.body as PrimaryKey[]);
return next();
}),
@@ -99,7 +117,10 @@ router.delete(
router.delete(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new RolesService({ accountability: req.accountability });
const service = new RolesService({
accountability: req.accountability,
schema: req.schema,
});
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
await service.delete(pk as any);
return next();

View File

@@ -9,7 +9,10 @@ const router = Router();
router.get(
'/specs/oas',
asyncHandler(async (req, res, next) => {
const service = new SpecificationService({ accountability: req.accountability });
const service = new SpecificationService({
accountability: req.accountability,
schema: req.schema,
});
res.locals.payload = await service.oas.generate();
return next();
}),
@@ -21,7 +24,10 @@ router.get('/ping', (req, res) => res.send('pong'));
router.get(
'/info',
asyncHandler(async (req, res, next) => {
const service = new ServerService({ accountability: req.accountability });
const service = new ServerService({
accountability: req.accountability,
schema: req.schema,
});
const data = await service.serverInfo();
res.locals.payload = { data };
return next();

View File

@@ -12,7 +12,10 @@ router.use(useCollection('directus_settings'));
router.get(
'/',
asyncHandler(async (req, res, next) => {
const service = new SettingsService({ accountability: req.accountability });
const service = new SettingsService({
accountability: req.accountability,
schema: req.schema,
});
const records = await service.readSingleton(req.sanitizedQuery);
res.locals.payload = { data: records || null };
return next();
@@ -23,7 +26,10 @@ router.get(
router.patch(
'/',
asyncHandler(async (req, res, next) => {
const service = new SettingsService({ accountability: req.accountability });
const service = new SettingsService({
accountability: req.accountability,
schema: req.schema,
});
await service.upsertSingleton(req.body);
try {

View File

@@ -18,7 +18,10 @@ router.use(useCollection('directus_users'));
router.post(
'/',
asyncHandler(async (req, res, next) => {
const service = new UsersService({ accountability: req.accountability });
const service = new UsersService({
accountability: req.accountability,
schema: req.schema,
});
const primaryKey = await service.create(req.body);
try {
@@ -40,8 +43,14 @@ router.post(
router.get(
'/',
asyncHandler(async (req, res, next) => {
const service = new UsersService({ accountability: req.accountability });
const metaService = new MetaService({ accountability: req.accountability });
const service = new UsersService({
accountability: req.accountability,
schema: req.schema,
});
const metaService = new MetaService({
accountability: req.accountability,
schema: req.schema,
});
const item = await service.readByQuery(req.sanitizedQuery);
const meta = await metaService.getMetaForQuery('directus_users', req.sanitizedQuery);
@@ -59,7 +68,10 @@ router.get(
throw new InvalidCredentialsException();
}
const service = new UsersService({ accountability: req.accountability });
const service = new UsersService({
accountability: req.accountability,
schema: req.schema,
});
try {
const item = await service.readByKey(req.accountability.user, req.sanitizedQuery);
@@ -82,7 +94,10 @@ router.get(
'/:pk',
asyncHandler(async (req, res, next) => {
if (req.path.endsWith('me')) return next();
const service = new UsersService({ accountability: req.accountability });
const service = new UsersService({
accountability: req.accountability,
schema: req.schema,
});
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
const items = await service.readByKey(pk as any, req.sanitizedQuery);
res.locals.payload = { data: items || null };
@@ -98,7 +113,10 @@ router.patch(
throw new InvalidCredentialsException();
}
const service = new UsersService({ accountability: req.accountability });
const service = new UsersService({
accountability: req.accountability,
schema: req.schema,
});
const primaryKey = await service.update(req.body, req.accountability.user);
const item = await service.readByKey(primaryKey, req.sanitizedQuery);
@@ -119,7 +137,7 @@ router.patch(
throw new InvalidPayloadException(`"last_page" key is required.`);
}
const service = new UsersService();
const service = new UsersService({ schema: req.schema });
await service.update({ last_page: req.body.last_page }, req.accountability.user);
return next();
@@ -130,7 +148,10 @@ router.patch(
router.patch(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new UsersService({ accountability: req.accountability });
const service = new UsersService({
accountability: req.accountability,
schema: req.schema,
});
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
const primaryKey = await service.update(req.body, pk as any);
@@ -157,7 +178,10 @@ router.delete(
throw new InvalidPayloadException(`Body has to be an array of primary keys`);
}
const service = new UsersService({ accountability: req.accountability });
const service = new UsersService({
accountability: req.accountability,
schema: req.schema,
});
await service.delete(req.body as PrimaryKey[]);
return next();
@@ -168,7 +192,10 @@ router.delete(
router.delete(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new UsersService({ accountability: req.accountability });
const service = new UsersService({
accountability: req.accountability,
schema: req.schema,
});
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
await service.delete(pk as any);
@@ -191,7 +218,10 @@ router.post(
const { error } = inviteSchema.validate(req.body);
if (error) throw new InvalidPayloadException(error.message);
const service = new UsersService({ accountability: req.accountability });
const service = new UsersService({
accountability: req.accountability,
schema: req.schema,
});
await service.inviteUser(req.body.email, req.body.role);
return next();
}),
@@ -208,7 +238,10 @@ router.post(
asyncHandler(async (req, res, next) => {
const { error } = acceptInviteSchema.validate(req.body);
if (error) throw new InvalidPayloadException(error.message);
const service = new UsersService({ accountability: req.accountability });
const service = new UsersService({
accountability: req.accountability,
schema: req.schema,
});
await service.acceptInvite(req.body.token, req.body.password);
return next();
}),
@@ -226,9 +259,15 @@ router.post(
throw new InvalidPayloadException(`"password" is required`);
}
const service = new UsersService({ accountability: req.accountability });
const service = new UsersService({
accountability: req.accountability,
schema: req.schema,
});
const authService = new AuthenticationService({ accountability: req.accountability });
const authService = new AuthenticationService({
accountability: req.accountability,
schema: req.schema,
});
await authService.verifyPassword(req.accountability.user, req.body.password);
const { url, secret } = await service.enableTFA(req.accountability.user);
@@ -250,8 +289,14 @@ router.post(
throw new InvalidPayloadException(`"otp" is required`);
}
const service = new UsersService({ accountability: req.accountability });
const authService = new AuthenticationService({ accountability: req.accountability });
const service = new UsersService({
accountability: req.accountability,
schema: req.schema,
});
const authService = new AuthenticationService({
accountability: req.accountability,
schema: req.schema,
});
const otpValid = await authService.verifyOTP(req.accountability.user, req.body.otp);

View File

@@ -67,7 +67,10 @@ router.post(
const { error } = SortSchema.validate(req.body);
if (error) throw new InvalidPayloadException(error.message);
const service = new UtilsService({ accountability: req.accountability });
const service = new UtilsService({
accountability: req.accountability,
schema: req.schema,
});
await service.sort(req.collection, req.body);
return res.status(200).end();
@@ -78,7 +81,10 @@ router.post(
router.post(
'/revert/:revision',
asyncHandler(async (req, res, next) => {
const service = new RevisionsService({ accountability: req.accountability });
const service = new RevisionsService({
accountability: req.accountability,
schema: req.schema,
});
await service.revert(req.params.revision);
next();
}),

View File

@@ -13,7 +13,10 @@ router.use(useCollection('directus_webhooks'));
router.post(
'/',
asyncHandler(async (req, res, next) => {
const service = new WebhooksService({ accountability: req.accountability });
const service = new WebhooksService({
accountability: req.accountability,
schema: req.schema,
});
const primaryKey = await service.create(req.body);
try {
@@ -35,8 +38,14 @@ router.post(
router.get(
'/',
asyncHandler(async (req, res, next) => {
const service = new WebhooksService({ accountability: req.accountability });
const metaService = new MetaService({ accountability: req.accountability });
const service = new WebhooksService({
accountability: req.accountability,
schema: req.schema,
});
const metaService = new MetaService({
accountability: req.accountability,
schema: req.schema,
});
const records = await service.readByQuery(req.sanitizedQuery);
const meta = await metaService.getMetaForQuery(req.collection, req.sanitizedQuery);
@@ -50,7 +59,10 @@ router.get(
router.get(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new WebhooksService({ accountability: req.accountability });
const service = new WebhooksService({
accountability: req.accountability,
schema: req.schema,
});
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
const record = await service.readByKey(pk as any, req.sanitizedQuery);
@@ -63,7 +75,10 @@ router.get(
router.patch(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new WebhooksService({ accountability: req.accountability });
const service = new WebhooksService({
accountability: req.accountability,
schema: req.schema,
});
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
const primaryKey = await service.update(req.body, pk as any);
@@ -90,7 +105,10 @@ router.delete(
throw new InvalidPayloadException(`Body has to be an array of primary keys`);
}
const service = new WebhooksService({ accountability: req.accountability });
const service = new WebhooksService({
accountability: req.accountability,
schema: req.schema,
});
await service.delete(req.body as PrimaryKey[]);
return next();
@@ -101,7 +119,10 @@ router.delete(
router.delete(
'/:pk',
asyncHandler(async (req, res, next) => {
const service = new WebhooksService({ accountability: req.accountability });
const service = new WebhooksService({
accountability: req.accountability,
schema: req.schema,
});
const pk = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
await service.delete(pk as any);

View File

@@ -1,8 +1,7 @@
import { AST, NestedCollectionNode, FieldNode, M2ONode, O2MNode } from '../types/ast';
import { AST, NestedCollectionNode, FieldNode } from '../types/ast';
import { clone, cloneDeep, uniq, pick } from 'lodash';
import database from './index';
import SchemaInspector from '@directus/schema';
import { Query, Item } from '../types';
import { Query, Item, SchemaOverview } from '../types';
import { PayloadService } from '../services/payload';
import applyQuery from '../utils/apply-query';
import Knex, { QueryBuilder } from 'knex';
@@ -16,6 +15,7 @@ type RunASTOptions = {
export default async function runAST(
originalAST: AST | NestedCollectionNode,
schema: SchemaOverview,
options?: RunASTOptions
): Promise<null | Item | Item[]> {
const ast = cloneDeep(originalAST);
@@ -47,18 +47,25 @@ export default async function runAST(
const { columnsToSelect, primaryKeyField, nestedCollectionNodes } = await parseCurrentLevel(
collection,
children,
knex
schema
);
// The actual knex query builder instance. This is a promise that resolves with the raw items from the db
const dbQuery = await getDBQuery(knex, collection, columnsToSelect, query, primaryKeyField);
const dbQuery = await getDBQuery(
knex,
collection,
columnsToSelect,
query,
primaryKeyField,
schema
);
const rawItems: Item | Item[] = await dbQuery;
if (!rawItems) return null;
// Run the items through the special transforms
const payloadService = new PayloadService(collection, { knex });
const payloadService = new PayloadService(collection, { knex, schema });
let items: null | Item | Item[] = await payloadService.processValues('read', rawItems);
if (!items || items.length === 0) return items;
@@ -78,7 +85,7 @@ export default async function runAST(
nestedNode.query.limit = -1;
}
let nestedItems = await runAST(nestedNode, { knex, child: true });
let nestedItems = await runAST(nestedNode, schema, { knex, child: true });
if (nestedItems) {
// Merge all fetched nested records with the parent items
@@ -102,15 +109,10 @@ export default async function runAST(
async function parseCurrentLevel(
collection: string,
children: (NestedCollectionNode | FieldNode)[],
knex: Knex
schema: SchemaOverview
) {
const schemaInspector = SchemaInspector(knex);
const primaryKeyField = (await schemaInspector.primary(collection)) as string;
const columnsInCollection = (await schemaInspector.columns(collection)).map(
({ column }) => column
);
const primaryKeyField = schema[collection].primary;
const columnsInCollection = Object.keys(schema[collection].columns);
const columnsToSelect: string[] = [];
const nestedCollectionNodes: NestedCollectionNode[] = [];
@@ -151,7 +153,8 @@ async function getDBQuery(
table: string,
columns: string[],
query: Query,
primaryKeyField: string
primaryKeyField: string,
schema: SchemaOverview
): Promise<QueryBuilder> {
let dbQuery = knex.select(columns.map((column) => `${table}.${column}`)).from(table);
@@ -165,7 +168,7 @@ async function getDBQuery(
query.sort = query.sort || [{ column: primaryKeyField, order: 'asc' }];
await applyQuery(knex, table, dbQuery, queryCopy);
await applyQuery(knex, table, dbQuery, queryCopy, schema);
return dbQuery;
}

View File

@@ -1,4 +1,4 @@
import { SettingsService } from './../services/settings';
import database from '../database';
import logger from '../logger';
import nodemailer, { Transporter } from 'nodemailer';
import { Liquid } from 'liquidjs';
@@ -14,8 +14,6 @@ const liquidEngine = new Liquid({
extname: '.liquid',
});
const settingsService = new SettingsService();
let transporter: Transporter;
if (env.EMAIL_TRANSPORT === 'sendmail') {
@@ -49,25 +47,28 @@ export type EmailOptions = {
* Get an object with default template options to pass to the email templates.
*/
async function getDefaultTemplateOptions() {
const projectInfo = await settingsService.readSingleton({
fields: ['project_name', 'project_logo', 'project_color'],
});
let projectLogoURL = env.PUBLIC_URL;
if (projectLogoURL.endsWith('/') === false) {
projectLogoURL += '/';
}
projectLogoURL += 'assets/${projectInfo.project_logo}';
const projectInfo = await database
.select(['project_name', 'project_logo', 'project_color'])
.from('directus_settings')
.first();
return {
projectName: projectInfo.project_name || 'Directus',
projectColor: projectInfo.project_color || '#546e7a',
projectLogo: projectInfo.project_logo
? projectLogoURL
projectName: projectInfo?.project_name || 'Directus',
projectColor: projectInfo?.project_color || '#546e7a',
projectLogo: projectInfo?.project_logo
? getProjectLogoURL(projectInfo.project_logo)
: 'https://directus.io/assets/directus-white.png',
};
function getProjectLogoURL(logoID: string) {
let projectLogoURL = env.PUBLIC_URL;
if (projectLogoURL.endsWith('/') === false) {
projectLogoURL += '/';
}
projectLogoURL += `assets/${logoID}`;
}
}
export default async function sendMail(options: EmailOptions) {

View File

@@ -11,9 +11,7 @@ import { systemCollectionRows } from '../database/system-data/collections';
const collectionExists: RequestHandler = asyncHandler(async (req, res, next) => {
if (!req.params.collection) return next();
const exists = await database.schema.hasTable(req.params.collection);
if (exists === false) {
if (req.params.collection in req.schema === false) {
throw new ForbiddenException();
}

View File

@@ -6,7 +6,7 @@ import { AbstractServiceOptions } from '../types';
*/
export class ActivityService extends ItemsService {
constructor(options?: AbstractServiceOptions) {
constructor(options: AbstractServiceOptions) {
super('directus_activity', options);
}
}

View File

@@ -11,9 +11,9 @@ export class AssetsService {
accountability: Accountability | null;
authorizationService: AuthorizationService;
constructor(options?: AbstractServiceOptions) {
this.knex = options?.knex || database;
this.accountability = options?.accountability || null;
constructor(options: AbstractServiceOptions) {
this.knex = options.knex || database;
this.accountability = options.accountability || null;
this.authorizationService = new AuthorizationService(options);
}

View File

@@ -27,10 +27,10 @@ export class AuthenticationService {
accountability: Accountability | null;
activityService: ActivityService;
constructor(options?: AbstractServiceOptions) {
constructor(options: AbstractServiceOptions) {
this.knex = options?.knex || database;
this.accountability = options?.accountability || null;
this.activityService = new ActivityService();
this.activityService = new ActivityService(options);
}
/**

View File

@@ -10,8 +10,8 @@ import {
PermissionsAction,
Item,
PrimaryKey,
SchemaOverview,
} from '../types';
import SchemaInspector from '@directus/schema';
import Knex from 'knex';
import { ForbiddenException, FailedValidationException } from '../exceptions';
import { uniq, merge, flatten } from 'lodash';
@@ -26,11 +26,16 @@ export class AuthorizationService {
knex: Knex;
accountability: Accountability | null;
payloadService: PayloadService;
schema: SchemaOverview;
constructor(options?: AbstractServiceOptions) {
this.knex = options?.knex || database;
this.accountability = options?.accountability || null;
this.payloadService = new PayloadService('directus_permissions', { knex: this.knex });
constructor(options: AbstractServiceOptions) {
this.knex = options.knex || database;
this.accountability = options.accountability || null;
this.schema = options.schema;
this.payloadService = new PayloadService('directus_permissions', {
knex: this.knex,
schema: this.schema,
});
}
async processAST(ast: AST, action: PermissionsAction = 'read'): Promise<AST> {
@@ -263,8 +268,7 @@ export class AuthorizationService {
payloads = payloads.map((payload) => merge({}, preset, payload));
const schemaInspector = SchemaInspector(this.knex);
const columns = await schemaInspector.columnInfo(collection);
const columns = Object.values(this.schema[collection].columns);
let requiredColumns: string[] = [];
@@ -273,11 +277,12 @@ export class AuthorizationService {
(await this.knex
.select<{ special: string }>('special')
.from('directus_fields')
.where({ collection, field: column.name })
.where({ collection, field: column.column_name })
.first()) ||
systemFieldRows.find(
(fieldMeta) =>
fieldMeta.field === column.name && fieldMeta.collection === collection
fieldMeta.field === column.column_name &&
fieldMeta.collection === collection
);
const specials = field?.special ? toArray(field.special) : [];
@@ -291,12 +296,11 @@ export class AuthorizationService {
const isRequired =
column.is_nullable === false &&
column.has_auto_increment === false &&
column.default_value === null &&
hasGenerateSpecial === false;
if (isRequired) {
requiredColumns.push(column.name);
requiredColumns.push(column.column_name);
}
}
@@ -389,7 +393,11 @@ export class AuthorizationService {
) {
if (this.accountability?.admin === true) return;
const itemsService = new ItemsService(collection, { accountability: this.accountability });
const itemsService = new ItemsService(collection, {
accountability: this.accountability,
knex: this.knex,
schema: this.schema,
});
try {
const query: Query = {

View File

@@ -5,10 +5,10 @@ import {
Collection,
CollectionMeta,
Relation,
SchemaOverview,
} from '../types';
import Knex from 'knex';
import { ForbiddenException, InvalidPayloadException } from '../exceptions';
import SchemaInspector from '@directus/schema';
import { FieldsService } from '../services/fields';
import { ItemsService } from '../services/items';
import cache from '../cache';
@@ -18,10 +18,12 @@ import { systemCollectionRows } from '../database/system-data/collections';
export class CollectionsService {
knex: Knex;
accountability: Accountability | null;
schema: SchemaOverview;
constructor(options?: AbstractServiceOptions) {
this.knex = options?.knex || database;
this.accountability = options?.accountability || null;
constructor(options: AbstractServiceOptions) {
this.knex = options.knex || database;
this.accountability = options.accountability || null;
this.schema = options.schema;
}
create(data: Partial<Collection>[]): Promise<string[]>;
@@ -52,15 +54,18 @@ export class CollectionsService {
const createdCollections: string[] = [];
await this.knex.transaction(async (trx) => {
const schemaInspector = SchemaInspector(trx);
const fieldsService = new FieldsService({ knex: trx });
const fieldsService = new FieldsService({ knex: trx, schema: this.schema });
const collectionItemsService = new ItemsService('directus_collections', {
knex: trx,
accountability: this.accountability,
schema: this.schema,
});
const fieldItemsService = new ItemsService('directus_fields', {
knex: trx,
accountability: this.accountability,
schema: this.schema,
});
for (const payload of payloads) {
@@ -72,7 +77,7 @@ export class CollectionsService {
throw new InvalidPayloadException(`Collections can't start with "directus_"`);
}
if (await schemaInspector.hasTable(payload.collection)) {
if (payload.collection in this.schema) {
throw new InvalidPayloadException(
`Collection "${payload.collection}" already exists.`
);
@@ -112,6 +117,7 @@ export class CollectionsService {
const collectionItemsService = new ItemsService('directus_collections', {
knex: this.knex,
accountability: this.accountability,
schema: this.schema,
});
const collectionKeys = toArray(collection);
@@ -164,7 +170,10 @@ export class CollectionsService {
/** @todo, read by query without query support is a bit ironic, isnt it */
async readByQuery(): Promise<Collection[]> {
const collectionItemsService = new ItemsService('directus_collections');
const collectionItemsService = new ItemsService('directus_collections', {
knex: this.knex,
schema: this.schema,
});
let tablesInDatabase = await schemaInspector.tableInfo();
if (this.accountability && this.accountability.admin !== true) {
@@ -216,6 +225,7 @@ export class CollectionsService {
const collectionItemsService = new ItemsService('directus_collections', {
knex: this.knex,
accountability: this.accountability,
schema: this.schema,
});
if (data && key) {
@@ -273,9 +283,10 @@ export class CollectionsService {
const fieldsService = new FieldsService({
knex: this.knex,
accountability: this.accountability,
schema: this.schema,
});
const tablesInDatabase = await schemaInspector.tables();
const tablesInDatabase = Object.keys(this.schema);
const collectionKeys = toArray(collection);
@@ -319,7 +330,9 @@ export class CollectionsService {
const collectionItemsService = new ItemsService('directus_collections', {
knex: this.knex,
accountability: this.accountability,
schema: this.schema,
});
await collectionItemsService.delete(collectionKeys);
for (const collectionKey of collectionKeys) {

View File

@@ -1,6 +1,12 @@
import database, { schemaInspector } from '../database';
import { Field } from '../types/field';
import { Accountability, AbstractServiceOptions, FieldMeta, Relation } from '../types';
import {
Accountability,
AbstractServiceOptions,
FieldMeta,
Relation,
SchemaOverview,
} from '../types';
import { ItemsService } from '../services/items';
import { ColumnBuilder } from 'knex';
import getLocalType from '../utils/get-local-type';
@@ -23,18 +29,23 @@ export class FieldsService {
itemsService: ItemsService;
payloadService: PayloadService;
schemaInspector: typeof schemaInspector;
schema: SchemaOverview;
constructor(options?: AbstractServiceOptions) {
this.knex = options?.knex || database;
this.schemaInspector = options?.knex ? SchemaInspector(options.knex) : schemaInspector;
this.accountability = options?.accountability || null;
constructor(options: AbstractServiceOptions) {
this.knex = options.knex || database;
this.schemaInspector = options.knex ? SchemaInspector(options.knex) : schemaInspector;
this.accountability = options.accountability || null;
this.itemsService = new ItemsService('directus_fields', options);
this.payloadService = new PayloadService('directus_fields');
this.payloadService = new PayloadService('directus_fields', options);
this.schema = options.schema;
}
async readAll(collection?: string): Promise<Field[]> {
let fields: FieldMeta[];
const nonAuthorizedItemsService = new ItemsService('directus_fields', { knex: this.knex });
const nonAuthorizedItemsService = new ItemsService('directus_fields', {
knex: this.knex,
schema: this.schema,
});
if (collection) {
fields = (await nonAuthorizedItemsService.readByQuery({
@@ -50,7 +61,7 @@ export class FieldsService {
fields.push(...systemFieldRows);
}
let columns = await schemaInspector.columnInfo(collection);
let columns = await this.schemaInspector.columnInfo(collection);
columns = columns.map((column) => {
return {
@@ -179,7 +190,7 @@ export class FieldsService {
);
try {
column = await schemaInspector.columnInfo(collection, field);
column = await this.schemaInspector.columnInfo(collection, field);
column.default_value = getDefaultValue(column);
} catch {}
@@ -204,7 +215,7 @@ export class FieldsService {
}
// Check if field already exists, either as a column, or as a row in directus_fields
if (await this.schemaInspector.hasColumn(collection, field.field)) {
if (field.field in this.schema[collection].columns) {
throw new InvalidPayloadException(
`Field "${field.field}" already exists in collection "${collection}"`
);
@@ -265,8 +276,8 @@ export class FieldsService {
const type = field.type as 'float' | 'decimal';
column = table[type](
field.field,
field.schema?.precision || 10,
field.schema?.scale || 5
field.schema?.numeric_precision || 10,
field.schema?.numeric_scale || 5
);
} else if (field.type === 'csv') {
column = table.string(field.field);
@@ -335,7 +346,7 @@ export class FieldsService {
await this.knex('directus_fields').delete().where({ collection, field });
if (await schemaInspector.hasColumn(collection, field)) {
if (field in this.schema[collection].columns) {
await this.knex.schema.table(collection, (table) => {
table.dropColumn(field);
});

View File

@@ -13,7 +13,7 @@ import { extension } from 'mime-types';
import path from 'path';
export class FilesService extends ItemsService {
constructor(options?: AbstractServiceOptions) {
constructor(options: AbstractServiceOptions) {
super('directus_files', options);
}
@@ -81,7 +81,10 @@ export class FilesService extends ItemsService {
// We do this in a service without accountability. Even if you don't have update permissions to the file,
// we still want to be able to set the extracted values from the file on create
const sudoService = new ItemsService('directus_files');
const sudoService = new ItemsService('directus_files', {
knex: this.knex,
schema: this.schema,
});
await sudoService.update(payload, primaryKey);
if (cache) {

View File

@@ -2,7 +2,7 @@ import { ItemsService } from './items';
import { AbstractServiceOptions } from '../types';
export class FoldersService extends ItemsService {
constructor(options?: AbstractServiceOptions) {
constructor(options: AbstractServiceOptions) {
super('directus_folders', options);
}
}

View File

@@ -7,6 +7,7 @@ import {
Field,
Relation,
Query,
SchemaOverview,
} from '../types';
import {
GraphQLString,
@@ -56,13 +57,15 @@ export class GraphQLService {
fieldsService: FieldsService;
collectionsService: CollectionsService;
relationsService: RelationsService;
schema: SchemaOverview;
constructor(options?: AbstractServiceOptions) {
constructor(options: AbstractServiceOptions) {
this.accountability = options?.accountability || null;
this.knex = options?.knex || database;
this.fieldsService = new FieldsService(options);
this.collectionsService = new CollectionsService(options);
this.relationsService = new RelationsService(options);
this.schema = options.schema;
}
args = {
@@ -454,6 +457,7 @@ export class GraphQLService {
service = new ActivityService({
knex: this.knex,
accountability: this.accountability,
schema: this.schema,
});
// case 'directus_collections':
// service = new CollectionsService({ knex: this.knex, accountability: this.accountability });
@@ -463,61 +467,73 @@ export class GraphQLService {
service = new FilesService({
knex: this.knex,
accountability: this.accountability,
schema: this.schema,
});
case 'directus_folders':
service = new FoldersService({
knex: this.knex,
accountability: this.accountability,
schema: this.schema,
});
case 'directus_folders':
service = new FoldersService({
knex: this.knex,
accountability: this.accountability,
schema: this.schema,
});
case 'directus_permissions':
service = new PermissionsService({
knex: this.knex,
accountability: this.accountability,
schema: this.schema,
});
case 'directus_presets':
service = new PresetsService({
knex: this.knex,
accountability: this.accountability,
schema: this.schema,
});
case 'directus_relations':
service = new RelationsService({
knex: this.knex,
accountability: this.accountability,
schema: this.schema,
});
case 'directus_revisions':
service = new RevisionsService({
knex: this.knex,
accountability: this.accountability,
schema: this.schema,
});
case 'directus_roles':
service = new RolesService({
knex: this.knex,
accountability: this.accountability,
schema: this.schema,
});
case 'directus_settings':
service = new SettingsService({
knex: this.knex,
accountability: this.accountability,
schema: this.schema,
});
case 'directus_users':
service = new UsersService({
knex: this.knex,
accountability: this.accountability,
schema: this.schema,
});
case 'directus_webhooks':
service = new WebhooksService({
knex: this.knex,
accountability: this.accountability,
schema: this.schema,
});
default:
service = new ItemsService(collection, {
knex: this.knex,
accountability: this.accountability,
schema: this.schema,
});
}

View File

@@ -1,5 +1,4 @@
import database from '../database';
import SchemaInspector from '@directus/schema';
import runAST from '../database/run-ast';
import getASTFromQuery from '../utils/get-ast-from-query';
import {
@@ -11,6 +10,7 @@ import {
PrimaryKey,
AbstractService,
AbstractServiceOptions,
SchemaOverview,
} from '../types';
import Knex from 'knex';
import cache from '../cache';
@@ -31,17 +31,16 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
knex: Knex;
accountability: Accountability | null;
eventScope: string;
schemaInspector: ReturnType<typeof SchemaInspector>;
schema: SchemaOverview;
constructor(collection: string, options?: AbstractServiceOptions) {
constructor(collection: string, options: AbstractServiceOptions) {
this.collection = collection;
this.knex = options?.knex || database;
this.accountability = options?.accountability || null;
this.knex = options.knex || database;
this.accountability = options.accountability || null;
this.eventScope = this.collection.startsWith('directus_')
? this.collection.substring(9)
: 'items';
this.schemaInspector = SchemaInspector(this.knex);
this.schema = options.schema;
return this;
}
@@ -49,8 +48,8 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
async create(data: Partial<Item>[]): Promise<PrimaryKey[]>;
async create(data: Partial<Item>): Promise<PrimaryKey>;
async create(data: Partial<Item> | Partial<Item>[]): Promise<PrimaryKey | PrimaryKey[]> {
const primaryKeyField = (await this.schemaInspector.primary(this.collection)) as string;
const columns = await this.schemaInspector.columns(this.collection);
const primaryKeyField = this.schema[this.collection].primary;
const columns = Object.keys(this.schema[this.collection].columns);
let payloads: AnyItem[] = clone(toArray(data));
@@ -58,6 +57,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
const payloadService = new PayloadService(this.collection, {
accountability: this.accountability,
knex: trx,
schema: this.schema,
});
const customProcessed = await emitter.emitAsync(
@@ -81,6 +81,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
const authorizationService = new AuthorizationService({
accountability: this.accountability,
knex: trx,
schema: this.schema,
});
payloads = await authorizationService.validatePayload(
@@ -92,12 +93,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
payloads = await payloadService.processM2O(payloads);
let payloadsWithoutAliases = payloads.map((payload) =>
pick(
payload,
columns.map(({ column }) => column)
)
);
let payloadsWithoutAliases = payloads.map((payload) => pick(payload, columns));
payloadsWithoutAliases = await payloadService.processValues(
'create',
@@ -198,9 +194,10 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
const authorizationService = new AuthorizationService({
accountability: this.accountability,
knex: this.knex,
schema: this.schema,
});
let ast = await getASTFromQuery(this.collection, query, {
let ast = await getASTFromQuery(this.collection, query, this.schema, {
accountability: this.accountability,
knex: this.knex,
});
@@ -209,7 +206,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
ast = await authorizationService.processAST(ast);
}
const records = await runAST(ast, { knex: this.knex });
const records = await runAST(ast, this.schema, { knex: this.knex });
return records as Partial<Item> | Partial<Item>[] | null;
}
@@ -229,7 +226,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
action: PermissionsAction = 'read'
): Promise<null | Partial<Item> | Partial<Item>[]> {
query = clone(query);
const primaryKeyField = (await this.schemaInspector.primary(this.collection)) as string;
const primaryKeyField = this.schema[this.collection].primary;
const keys = toArray(key);
if (keys.length === 1) {
@@ -246,7 +243,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
},
};
let ast = await getASTFromQuery(this.collection, queryWithFilter, {
let ast = await getASTFromQuery(this.collection, queryWithFilter, this.schema, {
accountability: this.accountability,
action,
knex: this.knex,
@@ -256,12 +253,13 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
const authorizationService = new AuthorizationService({
accountability: this.accountability,
knex: this.knex,
schema: this.schema,
});
ast = await authorizationService.processAST(ast, action);
}
const result = await runAST(ast, { knex: this.knex });
const result = await runAST(ast, this.schema, { knex: this.knex });
if (result === null) throw new ForbiddenException();
@@ -275,8 +273,8 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
data: Partial<Item> | Partial<Item>[],
key?: PrimaryKey | PrimaryKey[]
): Promise<PrimaryKey | PrimaryKey[]> {
const primaryKeyField = (await this.schemaInspector.primary(this.collection)) as string;
const columns = await this.schemaInspector.columns(this.collection);
const primaryKeyField = this.schema[this.collection].primary;
const columns = Object.keys(this.schema[this.collection].columns);
// Updating one or more items to the same payload
if (data && key) {
@@ -305,6 +303,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
const authorizationService = new AuthorizationService({
accountability: this.accountability,
knex: this.knex,
schema: this.schema,
});
await authorizationService.checkAccess('update', this.collection, keys);
@@ -320,14 +319,12 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
const payloadService = new PayloadService(this.collection, {
accountability: this.accountability,
knex: trx,
schema: this.schema,
});
payload = await payloadService.processM2O(payload);
let payloadWithoutAliases = pick(
payload,
columns.map(({ column }) => column)
);
let payloadWithoutAliases = pick(payload, columns);
payloadWithoutAliases = await payloadService.processValues(
'update',
@@ -370,7 +367,10 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
activityPrimaryKeys.push(primaryKey);
}
const itemsService = new ItemsService(this.collection, { knex: trx });
const itemsService = new ItemsService(this.collection, {
knex: trx,
schema: this.schema,
});
const snapshots = await itemsService.readByKey(keys);
const revisionRecords = activityPrimaryKeys.map((key, index) => ({
@@ -412,6 +412,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
const itemsService = new ItemsService(this.collection, {
accountability: this.accountability,
knex: trx,
schema: this.schema,
});
const payloads = toArray(data);
@@ -434,12 +435,15 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
}
async updateByQuery(data: Partial<Item>, query: Query): Promise<PrimaryKey[]> {
const primaryKeyField = (await this.schemaInspector.primary(this.collection)) as string;
const primaryKeyField = this.schema[this.collection].primary;
const readQuery = cloneDeep(query);
readQuery.fields = [primaryKeyField];
// Not authenticated:
const itemsService = new ItemsService(this.collection, { knex: this.knex });
const itemsService = new ItemsService(this.collection, {
knex: this.knex,
schema: this.schema,
});
let itemsToUpdate = await itemsService.readByQuery(readQuery);
itemsToUpdate = toArray(itemsToUpdate);
@@ -454,7 +458,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
upsert(data: Partial<Item>[]): Promise<PrimaryKey[]>;
upsert(data: Partial<Item>): Promise<PrimaryKey>;
async upsert(data: Partial<Item> | Partial<Item>[]): Promise<PrimaryKey | PrimaryKey[]> {
const primaryKeyField = (await this.schemaInspector.primary(this.collection)) as string;
const primaryKeyField = this.schema[this.collection].primary;
const payloads = toArray(data);
const primaryKeys: PrimaryKey[] = [];
@@ -484,11 +488,12 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
delete(keys: PrimaryKey[]): Promise<PrimaryKey[]>;
async delete(key: PrimaryKey | PrimaryKey[]): Promise<PrimaryKey | PrimaryKey[]> {
const keys = toArray(key);
const primaryKeyField = (await this.schemaInspector.primary(this.collection)) as string;
const primaryKeyField = this.schema[this.collection].primary;
if (this.accountability && this.accountability.admin !== true) {
const authorizationService = new AuthorizationService({
accountability: this.accountability,
schema: this.schema,
});
await authorizationService.checkAccess('delete', this.collection, key);
@@ -539,12 +544,15 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
}
async deleteByQuery(query: Query): Promise<PrimaryKey[]> {
const primaryKeyField = (await this.schemaInspector.primary(this.collection)) as string;
const primaryKeyField = this.schema[this.collection].primary;
const readQuery = cloneDeep(query);
readQuery.fields = [primaryKeyField];
// Not authenticated:
const itemsService = new ItemsService(this.collection);
const itemsService = new ItemsService(this.collection, {
knex: this.knex,
schema: this.schema,
});
let itemsToDelete = await itemsService.readByQuery(readQuery);
itemsToDelete = toArray(itemsToDelete);
@@ -562,17 +570,17 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
const record = (await this.readByQuery(query)) as Partial<Item>;
if (!record) {
let columns = await this.schemaInspector.columnInfo(this.collection);
let columns = Object.values(this.schema[this.collection].columns);
const defaults: Record<string, any> = {};
if (query.fields && query.fields.includes('*') === false) {
columns = columns.filter((column) => {
return query.fields!.includes(column.name);
return query.fields!.includes(column.column_name);
});
}
for (const column of columns) {
defaults[column.name] = getDefaultValue(column);
defaults[column.column_name] = getDefaultValue(column);
}
return defaults as Partial<Item>;
@@ -582,7 +590,7 @@ export class ItemsService<Item extends AnyItem = AnyItem> implements AbstractSer
}
async upsertSingleton(data: Partial<Item>) {
const primaryKeyField = (await this.schemaInspector.primary(this.collection)) as string;
const primaryKeyField = this.schema[this.collection].primary;
const record = await this.knex
.select(primaryKeyField)

View File

@@ -7,12 +7,18 @@ import argon2 from 'argon2';
import { v4 as uuidv4 } from 'uuid';
import database from '../database';
import { clone, isObject, cloneDeep } from 'lodash';
import { Relation, Item, AbstractServiceOptions, Accountability, PrimaryKey } from '../types';
import {
Relation,
Item,
AbstractServiceOptions,
Accountability,
PrimaryKey,
SchemaOverview,
} from '../types';
import { ItemsService } from './items';
import { URL } from 'url';
import Knex from 'knex';
import env from '../env';
import SchemaInspector from '@directus/schema';
import getLocalType from '../utils/get-local-type';
import { format, formatISO } from 'date-fns';
import { ForbiddenException } from '../exceptions';
@@ -36,11 +42,13 @@ export class PayloadService {
accountability: Accountability | null;
knex: Knex;
collection: string;
schema: SchemaOverview;
constructor(collection: string, options?: AbstractServiceOptions) {
this.accountability = options?.accountability || null;
this.knex = options?.knex || database;
constructor(collection: string, options: AbstractServiceOptions) {
this.accountability = options.accountability || null;
this.knex = options.knex || database;
this.collection = collection;
this.schema = options.schema;
return this;
}
@@ -239,11 +247,10 @@ export class PayloadService {
* shouldn't return with time / timezone info respectively
*/
async processDates(payloads: Partial<Record<string, any>>[]) {
const schemaInspector = SchemaInspector(this.knex);
const columnsInCollection = await schemaInspector.columnInfo(this.collection);
const columnsInCollection = Object.values(this.schema[this.collection].columns);
const columnsWithType = columnsInCollection.map((column) => ({
name: column.name,
name: column.column_name,
type: getLocalType(column),
}));
@@ -320,6 +327,7 @@ export class PayloadService {
const itemsService = new ItemsService(relation.one_collection, {
accountability: this.accountability,
knex: this.knex,
schema: this.schema,
});
const relatedRecord: Partial<Item> = payload[relation.many_field];
@@ -374,6 +382,7 @@ export class PayloadService {
const itemsService = new ItemsService(relation.many_collection, {
accountability: this.accountability,
knex: this.knex,
schema: this.schema,
});
const relatedRecords: Partial<Item>[] = [];

View File

@@ -2,7 +2,7 @@ import { AbstractServiceOptions, PermissionsAction } from '../types';
import { ItemsService } from '../services/items';
export class PermissionsService extends ItemsService {
constructor(options?: AbstractServiceOptions) {
constructor(options: AbstractServiceOptions) {
super('directus_permissions', options);
}

View File

@@ -2,7 +2,7 @@ import { ItemsService } from './items';
import { AbstractServiceOptions } from '../types';
export class PresetsService extends ItemsService {
constructor(options?: AbstractServiceOptions) {
constructor(options: AbstractServiceOptions) {
super('directus_presets', options);
}
}

View File

@@ -16,13 +16,16 @@ type ParsedRelation = Relation & {
export class RelationsService extends ItemsService {
permissionsService: PermissionsService;
constructor(options?: AbstractServiceOptions) {
constructor(options: AbstractServiceOptions) {
super('directus_relations', options);
this.permissionsService = new PermissionsService(options);
}
async readByQuery(query: Query): Promise<null | Relation | Relation[]> {
const service = new ItemsService('directus_relations', { knex: this.knex });
const service = new ItemsService('directus_relations', {
knex: this.knex,
schema: this.schema,
});
const results = (await service.readByQuery(query)) as
| ParsedRelation
| ParsedRelation[]
@@ -48,7 +51,10 @@ export class RelationsService extends ItemsService {
query: Query = {},
action: PermissionsAction = 'read'
): Promise<null | Relation | Relation[]> {
const service = new ItemsService('directus_relations', { knex: this.knex });
const service = new ItemsService('directus_relations', {
knex: this.knex,
schema: this.schema,
});
const results = (await service.readByKey(key as any, query, action)) as
| ParsedRelation
| ParsedRelation[]

View File

@@ -7,7 +7,7 @@ import { InvalidPayloadException, ForbiddenException } from '../exceptions';
*/
export class RevisionsService extends ItemsService {
constructor(options?: AbstractServiceOptions) {
constructor(options: AbstractServiceOptions) {
super('directus_revisions', options);
}
@@ -21,7 +21,9 @@ export class RevisionsService extends ItemsService {
const service = new ItemsService(revision.collection, {
accountability: this.accountability,
knex: this.knex,
schema: this.schema,
});
await service.update(revision.data, revision.item);
}
}

View File

@@ -7,7 +7,7 @@ import { UnprocessableEntityException } from '../exceptions';
import { toArray } from '../utils/to-array';
export class RolesService extends ItemsService {
constructor(options?: AbstractServiceOptions) {
constructor(options: AbstractServiceOptions) {
super('directus_roles', options);
}
@@ -31,6 +31,7 @@ export class RolesService extends ItemsService {
const permissionsService = new PermissionsService({
knex: this.knex,
accountability: this.accountability,
schema: this.schema,
});
const permissionsForRole = (await permissionsService.readByQuery({
@@ -45,6 +46,7 @@ export class RolesService extends ItemsService {
const presetsService = new PresetsService({
knex: this.knex,
accountability: this.accountability,
schema: this.schema,
});
const presetsForRole = (await presetsService.readByQuery({
@@ -59,6 +61,7 @@ export class RolesService extends ItemsService {
const usersService = new UsersService({
knex: this.knex,
accountability: this.accountability,
schema: this.schema,
});
const usersInRole = (await usersService.readByQuery({

View File

@@ -1,4 +1,4 @@
import { AbstractServiceOptions, Accountability } from '../types';
import { AbstractServiceOptions, Accountability, SchemaOverview } from '../types';
import Knex from 'knex';
import database from '../database';
import os from 'os';
@@ -11,11 +11,13 @@ export class ServerService {
knex: Knex;
accountability: Accountability | null;
settingsService: SettingsService;
schema: SchemaOverview;
constructor(options?: AbstractServiceOptions) {
this.knex = options?.knex || database;
this.accountability = options?.accountability || null;
this.settingsService = new SettingsService({ knex: this.knex });
constructor(options: AbstractServiceOptions) {
this.knex = options.knex || database;
this.accountability = options.accountability || null;
this.schema = options.schema;
this.settingsService = new SettingsService({ knex: this.knex, schema: this.schema });
}
async serverInfo() {

View File

@@ -2,7 +2,7 @@ import { ItemsService } from './items';
import { AbstractServiceOptions } from '../types';
export class SettingsService extends ItemsService {
constructor(options?: AbstractServiceOptions) {
constructor(options: AbstractServiceOptions) {
super('directus_settings', options);
}
}

View File

@@ -5,6 +5,7 @@ import {
Field,
Permission,
Relation,
SchemaOverview,
types,
} from '../types';
import { CollectionsService } from './collections';
@@ -32,6 +33,7 @@ import { getRelationType } from '../utils/get-relation-type';
export class SpecificationService {
accountability: Accountability | null;
knex: Knex;
schema: SchemaOverview;
fieldsService: FieldsService;
collectionsService: CollectionsService;
@@ -39,16 +41,17 @@ export class SpecificationService {
oas: OASService;
constructor(options?: AbstractServiceOptions) {
this.accountability = options?.accountability || null;
this.knex = options?.knex || database;
constructor(options: AbstractServiceOptions) {
this.accountability = options.accountability || null;
this.knex = options.knex || database;
this.schema = options.schema;
this.fieldsService = new FieldsService(options);
this.collectionsService = new CollectionsService(options);
this.relationsService = new RelationsService(options);
this.oas = new OASService(
{ knex: this.knex, accountability: this.accountability },
{ knex: this.knex, accountability: this.accountability, schema: this.schema },
{
fieldsService: this.fieldsService,
collectionsService: this.collectionsService,
@@ -83,7 +86,7 @@ class OASService implements SpecificationSubService {
}
) {
this.accountability = options.accountability || null;
this.knex = options?.knex || database;
this.knex = options.knex || database;
this.fieldsService = fieldsService;
this.collectionsService = collectionsService;

View File

@@ -9,7 +9,7 @@ import {
ForbiddenException,
UnprocessableEntityException,
} from '../exceptions';
import { Accountability, PrimaryKey, Item, AbstractServiceOptions } from '../types';
import { Accountability, PrimaryKey, Item, AbstractServiceOptions, SchemaOverview } from '../types';
import Knex from 'knex';
import env from '../env';
import cache from '../cache';
@@ -18,14 +18,16 @@ import { toArray } from '../utils/to-array';
export class UsersService extends ItemsService {
knex: Knex;
accountability: Accountability | null;
schema: SchemaOverview;
service: ItemsService;
constructor(options?: AbstractServiceOptions) {
constructor(options: AbstractServiceOptions) {
super('directus_users', options);
this.knex = options?.knex || database;
this.accountability = options?.accountability || null;
this.knex = options.knex || database;
this.accountability = options.accountability || null;
this.service = new ItemsService('directus_users', options);
this.schema = options.schema;
}
update(data: Partial<Item>, keys: PrimaryKey[]): Promise<PrimaryKey[]>;
@@ -174,7 +176,11 @@ export class UsersService extends ItemsService {
throw new InvalidPayloadException('TFA Secret is already set for this user');
}
const authService = new AuthenticationService();
const authService = new AuthenticationService({
knex: this.knex,
schema: this.schema,
accountability: this.accountability,
});
const secret = authService.generateTFASecret();
await this.knex('directus_users').update({ tfa_secret: secret }).where({ id: pk });

View File

@@ -1,22 +1,21 @@
import { AbstractServiceOptions, Accountability, PrimaryKey } from '../types';
import { AbstractServiceOptions, Accountability, PrimaryKey, SchemaOverview } from '../types';
import database from '../database';
import Knex from 'knex';
import { InvalidPayloadException, ForbiddenException } from '../exceptions';
import SchemaInspector from '@directus/schema';
import { systemCollectionRows } from '../database/system-data/collections';
export class UtilsService {
knex: Knex;
accountability: Accountability | null;
schema: SchemaOverview;
constructor(options?: AbstractServiceOptions) {
this.knex = options?.knex || database;
this.accountability = options?.accountability || null;
constructor(options: AbstractServiceOptions) {
this.knex = options.knex || database;
this.accountability = options.accountability || null;
this.schema = options.schema;
}
async sort(collection: string, { item, to }: { item: PrimaryKey; to: PrimaryKey }) {
const schemaInspector = SchemaInspector(this.knex);
const sortFieldResponse =
(await this.knex
.select('sort_field')
@@ -54,7 +53,7 @@ export class UtilsService {
}
}
const primaryKeyField = (await schemaInspector.primary(collection)) as string;
const primaryKeyField = this.schema[collection].primary;
// Make sure all rows have a sort value
const countResponse = await this.knex

View File

@@ -1,79 +1,18 @@
import { ItemsService } from './items';
import { Item, PrimaryKey, AbstractServiceOptions } from '../types';
import emitter from '../emitter';
import { ListenerFn } from 'eventemitter2';
import { Webhook } from '../types';
import axios from 'axios';
import logger from '../logger';
let registered: { event: string; handler: ListenerFn }[] = [];
import { register } from '../webhooks';
export class WebhooksService extends ItemsService {
constructor(options?: AbstractServiceOptions) {
constructor(options: AbstractServiceOptions) {
super('directus_webhooks', options);
}
async register() {
this.unregister();
const webhooks = await this.knex
.select<Webhook[]>('*')
.from('directus_webhooks')
.where({ status: 'active' });
for (const webhook of webhooks) {
if (webhook.actions === '*') {
const event = 'items.*';
const handler = this.createHandler(webhook);
emitter.on(event, handler);
registered.push({ event, handler });
} else {
for (const action of webhook.actions.split(',')) {
const event = `items.${action}`;
const handler = this.createHandler(webhook);
emitter.on(event, handler);
registered.push({ event, handler });
}
}
}
}
unregister() {
for (const { event, handler } of registered) {
emitter.off(event, handler);
}
registered = [];
}
createHandler(webhook: Webhook): ListenerFn {
return async (data) => {
const collectionAllowList = webhook.collections.split(',');
if (
collectionAllowList.includes('*') === false &&
collectionAllowList.includes(data.collection) === false
)
return;
try {
await axios({
url: webhook.url,
method: webhook.method,
data: webhook.data ? data : null,
});
} catch (error) {
logger.warn(`Webhook "${webhook.name}" (id: ${webhook.id}) failed`);
logger.warn(error);
}
};
}
async create(data: Partial<Item>[]): Promise<PrimaryKey[]>;
async create(data: Partial<Item>): Promise<PrimaryKey>;
async create(data: Partial<Item> | Partial<Item>[]): Promise<PrimaryKey | PrimaryKey[]> {
const result = await super.create(data);
await this.register();
await register();
return result;
}
@@ -87,7 +26,7 @@ export class WebhooksService extends ItemsService {
): Promise<PrimaryKey | PrimaryKey[]> {
const result = await super.update(data, key as any);
await this.register();
await register();
return result;
}
@@ -97,7 +36,7 @@ export class WebhooksService extends ItemsService {
async delete(key: PrimaryKey | PrimaryKey[]): Promise<PrimaryKey | PrimaryKey[]> {
const result = await super.delete(key as any);
await this.register();
await register();
return result;
}

View File

@@ -15,3 +15,4 @@ export * from './revision';
export * from './services';
export * from './sessions';
export * from './webhooks';
export * from './schema';

3
api/src/types/schema.ts Normal file
View File

@@ -0,0 +1,3 @@
import { SchemaOverview as SO } from '@directus/schema/dist/types/overview';
export type SchemaOverview = SO;

View File

@@ -3,10 +3,12 @@ import { Accountability } from './accountability';
import { Item, PrimaryKey } from './items';
import { Query } from './query';
import { PermissionsAction } from './permissions';
import { SchemaOverview } from '../types';
export type AbstractServiceOptions = {
knex?: Knex;
accountability?: Accountability | null;
schema: SchemaOverview;
};
export interface AbstractService {

View File

@@ -1,6 +1,5 @@
import { QueryBuilder } from 'knex';
import { Query, Filter, Relation } from '../types';
import { schemaInspector } from '../database';
import { Query, Filter, Relation, SchemaOverview } from '../types';
import Knex from 'knex';
import { clone, isPlainObject } from 'lodash';
import { systemRelationRows } from '../database/system-data/relations';
@@ -9,7 +8,8 @@ export default async function applyQuery(
knex: Knex,
collection: string,
dbQuery: QueryBuilder,
query: Query
query: Query,
schema: SchemaOverview
) {
if (query.filter) {
await applyFilter(knex, dbQuery, query.filter, collection);
@@ -36,18 +36,18 @@ export default async function applyQuery(
}
if (query.search) {
const columns = await schemaInspector.columnInfo(collection);
const columns = Object.values(schema[collection].columns);
dbQuery.andWhere(function () {
columns
/** @todo Check if this scales between SQL vendors */
.filter(
(column) =>
column.type.toLowerCase().includes('text') ||
column.type.toLowerCase().includes('char')
column.data_type.toLowerCase().includes('text') ||
column.data_type.toLowerCase().includes('char')
)
.forEach((column) => {
this.orWhereRaw(`LOWER(??) LIKE ?`, [column.name, `%${query.search!}%`]);
this.orWhereRaw(`LOWER(??) LIKE ?`, [column.column_name, `%${query.search!}%`]);
});
});
}

View File

@@ -10,11 +10,11 @@ import {
Relation,
PermissionsAction,
Accountability,
SchemaOverview,
} from '../types';
import database from '../database';
import { cloneDeep } from 'lodash';
import Knex from 'knex';
import SchemaInspector from '@directus/schema';
import { getRelationType } from '../utils/get-relation-type';
import { systemFieldRows } from '../database/system-data/fields';
import { systemRelationRows } from '../database/system-data/relations';
@@ -28,6 +28,7 @@ type GetASTOptions = {
export default async function getASTFromQuery(
collection: string,
query: Query,
schema: SchemaOverview,
options?: GetASTOptions
): Promise<AST> {
query = cloneDeep(query);
@@ -35,7 +36,6 @@ export default async function getASTFromQuery(
const accountability = options?.accountability;
const action = options?.action || 'read';
const knex = options?.knex || database;
const schemaInspector = SchemaInspector(knex);
/**
* we might not need al this info at all times, but it's easier to fetch it all once, than trying to fetch it for every
@@ -143,7 +143,7 @@ export default async function getASTFromQuery(
children: {},
query: {},
relatedKey: {},
parentKey: (await schemaInspector.primary(parentCollection)) as string,
parentKey: schema[parentCollection].primary,
fieldKey: relationalField,
relation: relation,
};
@@ -154,9 +154,7 @@ export default async function getASTFromQuery(
nestedFields
);
child.query[relatedCollection] = {};
child.relatedKey[relatedCollection] = (await schemaInspector.primary(
relatedCollection
)) as string;
child.relatedKey[relatedCollection] = schema[relatedCollection].primary;
}
} else if (relatedCollection) {
if (
@@ -172,8 +170,8 @@ export default async function getASTFromQuery(
type: relationType,
name: relatedCollection,
fieldKey: relationalField,
parentKey: (await schemaInspector.primary(parentCollection)) as string,
relatedKey: (await schemaInspector.primary(relatedCollection)) as string,
parentKey: schema[parentCollection].primary,
relatedKey: schema[relatedCollection].primary,
relation: relation,
query: deep?.[relationalField] || {},
children: await parseFields(relatedCollection, nestedFields),
@@ -281,7 +279,7 @@ export default async function getASTFromQuery(
}
async function getFieldsInCollection(collection: string) {
const columns = (await schemaInspector.columns(collection)).map((column) => column.column);
const columns = Object.keys(schema[collection].columns);
const fields = [
...(await knex.select('field').from('directus_fields').where({ collection })).map(
(field) => field.field

View File

@@ -1,11 +1,13 @@
import { Column } from '@directus/schema/dist/types/column';
import getLocalType from './get-local-type';
import { Column } from '@directus/schema/dist/types/column';
import { SchemaOverview } from '../types';
export default function getDefaultValue(column: Column) {
export default function getDefaultValue(
column: SchemaOverview[string]['columns'][string] | Column
) {
const type = getLocalType(column);
let defaultValue = column.default_value || null;
if (defaultValue === null) return null;
// Check if the default is wrapped in an extra pair of quotes, this happens in SQLite

View File

@@ -1,5 +1,5 @@
import { FieldMeta, types, SchemaOverview } from '../types';
import { Column } from '@directus/schema/dist/types/column';
import { FieldMeta, types } from '../types';
/**
* Typemap graciously provided by @gpetrov
@@ -81,13 +81,17 @@ const localTypeMap: Record<string, { type: typeof types[number]; useTimezone?: b
};
export default function getLocalType(
column: Column,
column: SchemaOverview[string]['columns'][string] | Column,
field?: FieldMeta
): typeof types[number] | 'unknown' {
const type = localTypeMap[column.type.toLowerCase().split('(')[0]];
const type = localTypeMap[column.data_type.toLowerCase().split('(')[0]];
/** Handle Postgres numeric decimals */
if (column.type === 'numeric' && column.precision !== null && column.scale !== null) {
if (
column.data_type === 'numeric' &&
column.numeric_precision !== null &&
column.numeric_scale !== null
) {
return 'decimal';
}

View File

@@ -1,45 +0,0 @@
import database, { schemaInspector } from '../database';
import { uniq } from 'lodash';
import { systemFieldRows } from '../database/system-data/fields';
export default async function hasFields(fields: { collection: string; field: string }[]) {
const fieldsObject: { [collection: string]: string[] } = {};
fields.forEach(({ field, collection }) => {
if (fieldsObject.hasOwnProperty(collection) === false) {
fieldsObject[collection] = [];
}
fieldsObject[collection].push(field);
});
await Promise.all(
Object.entries(fieldsObject).map(([collection, fields]) =>
collectionHasFields(collection, fields)
)
);
return true;
}
export async function collectionHasFields(collection: string, fieldKeys: string[]) {
const [columns, fields] = await Promise.all([
schemaInspector.columns(collection),
database
.select('field')
.from('directus_fields')
.where({ collection })
.whereIn('field', fieldKeys),
]);
const existingFields = uniq([
...columns.map(({ column }) => column),
...fields.map(({ field }) => field),
...systemFieldRows
.filter((fieldMeta) => fieldMeta.collection === collection)
.map((fieldMeta) => fieldMeta.field),
]);
for (const key of fieldKeys) {
if (existingFields.includes(key) === false) throw new Error(key);
}
}

63
api/src/webhooks.ts Normal file
View File

@@ -0,0 +1,63 @@
import { Webhook } from './types';
import emitter from './emitter';
import database from './database';
import { ListenerFn } from 'eventemitter2';
import axios from 'axios';
import logger from './logger';
let registered: { event: string; handler: ListenerFn }[] = [];
export async function register() {
unregister();
const webhooks = await database
.select<Webhook[]>('*')
.from('directus_webhooks')
.where({ status: 'active' });
for (const webhook of webhooks) {
if (webhook.actions === '*') {
const event = 'items.*';
const handler = createHandler(webhook);
emitter.on(event, handler);
registered.push({ event, handler });
} else {
for (const action of webhook.actions.split(',')) {
const event = `items.${action}`;
const handler = createHandler(webhook);
emitter.on(event, handler);
registered.push({ event, handler });
}
}
}
}
export function unregister() {
for (const { event, handler } of registered) {
emitter.off(event, handler);
}
registered = [];
}
function createHandler(webhook: Webhook): ListenerFn {
return async (data) => {
const collectionAllowList = webhook.collections.split(',');
if (
collectionAllowList.includes('*') === false &&
collectionAllowList.includes(data.collection) === false
)
return;
try {
await axios({
url: webhook.url,
method: webhook.method,
data: webhook.data ? data : null,
});
} catch (error) {
logger.warn(`Webhook "${webhook.name}" (id: ${webhook.id}) failed`);
logger.warn(error);
}
};
}

View File

@@ -215,11 +215,11 @@ export default class MSSQL implements Schema {
return {
name: rawColumn.COLUMN_NAME,
table: rawColumn.TABLE_NAME,
type: rawColumn.DATA_TYPE,
data_type: rawColumn.DATA_TYPE,
default_value: parseDefault(rawColumn.COLUMN_DEFAULT),
max_length: rawColumn.CHARACTER_MAXIMUM_LENGTH,
precision: rawColumn.NUMERIC_PRECISION,
scale: rawColumn.NUMERIC_SCALE,
numeric_precision: rawColumn.NUMERIC_PRECISION,
numeric_scale: rawColumn.NUMERIC_SCALE,
is_nullable: rawColumn.IS_NULLABLE === 'YES',
is_primary_key: rawColumn.PK_SET === 'PRIMARY',
has_auto_increment: rawColumn.EXTRA === '1',
@@ -235,11 +235,11 @@ export default class MSSQL implements Schema {
return {
name: rawColumn.COLUMN_NAME,
table: rawColumn.TABLE_NAME,
type: rawColumn.DATA_TYPE,
data_type: rawColumn.DATA_TYPE,
default_value: parseDefault(rawColumn.COLUMN_DEFAULT),
max_length: rawColumn.CHARACTER_MAXIMUM_LENGTH,
precision: rawColumn.NUMERIC_PRECISION,
scale: rawColumn.NUMERIC_SCALE,
numeric_precision: rawColumn.NUMERIC_PRECISION,
numeric_scale: rawColumn.NUMERIC_SCALE,
is_nullable: rawColumn.IS_NULLABLE === 'YES',
is_primary_key: rawColumn.PK_SET === 'PRIMARY',
has_auto_increment: rawColumn.EXTRA === '1',

View File

@@ -195,11 +195,11 @@ export default class MySQL implements Schema {
return {
name: rawColumn.COLUMN_NAME,
table: rawColumn.TABLE_NAME,
type: rawColumn.DATA_TYPE,
data_type: rawColumn.DATA_TYPE,
default_value: parseDefault(rawColumn.COLUMN_DEFAULT),
max_length: rawColumn.CHARACTER_MAXIMUM_LENGTH,
precision: rawColumn.NUMERIC_PRECISION,
scale: rawColumn.NUMERIC_SCALE,
numeric_precision: rawColumn.NUMERIC_PRECISION,
numeric_scale: rawColumn.NUMERIC_SCALE,
is_nullable: rawColumn.IS_NULLABLE === 'YES',
is_primary_key: rawColumn.CONSTRAINT_NAME === 'PRIMARY',
has_auto_increment: rawColumn.EXTRA === 'auto_increment',
@@ -218,11 +218,11 @@ export default class MySQL implements Schema {
return {
name: rawColumn.COLUMN_NAME,
table: rawColumn.TABLE_NAME,
type: rawColumn.DATA_TYPE,
data_type: rawColumn.DATA_TYPE,
default_value: parseDefault(rawColumn.COLUMN_DEFAULT),
max_length: rawColumn.CHARACTER_MAXIMUM_LENGTH,
precision: rawColumn.NUMERIC_PRECISION,
scale: rawColumn.NUMERIC_SCALE,
numeric_precision: rawColumn.NUMERIC_PRECISION,
numeric_scale: rawColumn.NUMERIC_SCALE,
is_nullable: rawColumn.IS_NULLABLE === 'YES',
is_primary_key: rawColumn.CONSTRAINT_NAME === 'PRIMARY',
has_auto_increment: rawColumn.EXTRA === 'auto_increment',

View File

@@ -180,11 +180,11 @@ export default class oracleDB implements Schema {
return {
name: rawColumn.COLUMN_NAME,
table: rawColumn.TABLE_NAME,
type: rawColumn.DATA_TYPE,
data_type: rawColumn.DATA_TYPE,
default_value: rawColumn.DATA_DEFAULT,
max_length: rawColumn.DATA_LENGTH,
precision: rawColumn.DATA_PRECISION,
scale: rawColumn.DATA_SCALE,
numeric_precision: rawColumn.DATA_PRECISION,
numeric_scale: rawColumn.DATA_SCALE,
is_nullable: rawColumn.NULLABLE === 'YES',
is_primary_key: rawColumn.CONSTRAINT_TYPE === 'P',
foreign_key_column: rawColumn.REFERENCED_COLUMN_NAME,
@@ -200,11 +200,11 @@ export default class oracleDB implements Schema {
return {
name: rawColumn.COLUMN_NAME,
table: rawColumn.TABLE_NAME,
type: rawColumn.DATA_TYPE,
data_type: rawColumn.DATA_TYPE,
default_value: rawColumn.DATA_DEFAULT,
max_length: rawColumn.DATA_DEFAULT,
precision: rawColumn.DATA_PRECISION,
scale: rawColumn.DATA_SCALE,
numeric_precision: rawColumn.DATA_PRECISION,
numeric_scale: rawColumn.DATA_SCALE,
is_nullable: rawColumn.NULLABLE === 'YES',
is_primary_key: rawColumn.CONSTRAINT_TYPE === 'P',
has_auto_increment: rawColumn.DATA_DEFAULT,

View File

@@ -92,7 +92,7 @@ export default class Postgres implements Schema {
SELECT
table_name,
column_name,
column_default,
column_default as default_value,
is_nullable,
data_type
FROM
@@ -334,13 +334,13 @@ export default class Postgres implements Schema {
return {
name: rawColumn.column_name,
table: rawColumn.table_name,
type: rawColumn.data_type,
data_type: rawColumn.data_type,
default_value: rawColumn.column_default
? this.parseDefaultValue(rawColumn.column_default)
: null,
max_length: rawColumn.character_maximum_length,
precision: rawColumn.numeric_precision,
scale: rawColumn.numeric_scale,
numeric_precision: rawColumn.numeric_precision,
numeric_scale: rawColumn.numeric_scale,
is_nullable: rawColumn.is_nullable === 'YES',
is_primary_key: rawColumn.is_primary === 'YES',
has_auto_increment: rawColumn.serial !== null,
@@ -359,13 +359,13 @@ export default class Postgres implements Schema {
return {
name: rawColumn.column_name,
table: rawColumn.table_name,
type: rawColumn.data_type,
data_type: rawColumn.data_type,
default_value: rawColumn.column_default
? this.parseDefaultValue(rawColumn.column_default)
: null,
max_length: rawColumn.character_maximum_length,
precision: rawColumn.numeric_precision,
scale: rawColumn.numeric_scale,
numeric_precision: rawColumn.numeric_precision,
numeric_scale: rawColumn.numeric_scale,
is_nullable: rawColumn.is_nullable === 'YES',
is_primary_key: rawColumn.is_primary === 'YES',
has_auto_increment: rawColumn.serial !== null,

View File

@@ -129,12 +129,12 @@ export default class SQLite implements Schema {
return {
name: raw.name,
table: table,
type: raw.type,
data_type: raw.type,
default_value: raw.dflt_value,
max_length: extractMaxLength(raw.dflt_value),
/** @NOTE SQLite3 doesn't support precision/scale */
precision: null,
scale: null,
numeric_precision: null,
numeric_scale: null,
is_nullable: raw.notnull === 0,
is_primary_key: raw.pk === 1,
has_auto_increment: raw.pk === 1 && tablesWithAutoIncrementPrimaryKeys.includes(table),

View File

@@ -1,22 +1,22 @@
export interface Column {
name: string;
table: string;
type: string;
default_value: any | null;
max_length: number | null;
precision: number | null;
scale: number | null;
name: string;
table: string;
data_type: string;
default_value: any | null;
max_length: number | null;
numeric_precision: number | null;
numeric_scale: number | null;
is_nullable: boolean;
is_primary_key: boolean;
has_auto_increment: boolean;
foreign_key_column: string | null;
foreign_key_table: string | null;
is_nullable: boolean;
is_primary_key: boolean;
has_auto_increment: boolean;
foreign_key_column: string | null;
foreign_key_table: string | null;
// Not supported in SQLite or MSSQL
comment?: string | null;
// Not supported in SQLite or MSSQL
comment?: string | null;
// Postgres Only
schema?: string;
foreign_key_schema?: string | null;
// Postgres Only
schema?: string;
foreign_key_schema?: string | null;
}

View File

@@ -3,7 +3,9 @@ export type SchemaOverview = {
primary: string;
columns: {
[column: string]: {
column_default: any;
table_name: string;
column_name: string;
default_value: any;
is_nullable: boolean;
data_type: string;
numeric_precision: number | null;