mirror of
https://github.com/directus/directus.git
synced 2026-02-03 12:45:08 -05:00
@@ -2,7 +2,7 @@
|
||||
# General
|
||||
|
||||
PORT=41201
|
||||
PUBLIC_URL="http://localhost:3000"
|
||||
PUBLIC_URL="http://localhost:41201"
|
||||
LOG_LEVEL="info"
|
||||
|
||||
####################################################################################################
|
||||
@@ -24,7 +24,7 @@ DB_PASSWORD="psql1234"
|
||||
|
||||
STORAGE_LOCATIONS="finder, digitalocean"
|
||||
|
||||
STORAGE_FINDER_PUBLIC_URL="http://localhost:3000/uploads"
|
||||
STORAGE_FINDER_PUBLIC_URL="http://localhost:41201/uploads"
|
||||
STORAGE_FINDER_DRIVER="local"
|
||||
STORAGE_FINDER_ROOT="./uploads"
|
||||
|
||||
|
||||
@@ -239,7 +239,6 @@ tables:
|
||||
filename_disk:
|
||||
type: string
|
||||
length: 255
|
||||
nullable: false
|
||||
filename_download:
|
||||
type: string
|
||||
length: 255
|
||||
|
||||
@@ -2,9 +2,9 @@ import express from 'express';
|
||||
import asyncHandler from 'express-async-handler';
|
||||
import Busboy from 'busboy';
|
||||
import sanitizeQuery from '../middleware/sanitize-query';
|
||||
import * as FilesService from '../services/files';
|
||||
import FilesService from '../services/files';
|
||||
import useCollection from '../middleware/use-collection';
|
||||
import { Item } from '../types';
|
||||
import { File, PrimaryKey } from '../types';
|
||||
import path from 'path';
|
||||
import formatTitle from '@directus/format-title';
|
||||
import env from '../env';
|
||||
@@ -13,114 +13,102 @@ const router = express.Router();
|
||||
|
||||
router.use(useCollection('directus_files'));
|
||||
|
||||
const multipartHandler = (operation: 'create' | 'update') =>
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const busboy = new Busboy({ headers: req.headers });
|
||||
const savedFiles: Item[] = [];
|
||||
const multipartHandler = asyncHandler(async (req, res, next) => {
|
||||
if (req.is('multipart/form-data') === false) return next();
|
||||
|
||||
/**
|
||||
* The order of the fields in multipart/form-data is important. We require that all fields
|
||||
* are provided _before_ the files. This allows us to set the storage location, and create
|
||||
* the row in directus_files async during the upload of the actual file.
|
||||
*/
|
||||
const busboy = new Busboy({ headers: req.headers });
|
||||
const savedFiles: PrimaryKey[] = [];
|
||||
const service = new FilesService({ accountability: req.accountability });
|
||||
|
||||
let disk: string = (env.STORAGE_LOCATIONS as string).split(',')[0].trim();
|
||||
let payload: Partial<Item> = {};
|
||||
let fileCount = 0;
|
||||
/**
|
||||
* The order of the fields in multipart/form-data is important. We require that all fields
|
||||
* are provided _before_ the files. This allows us to set the storage location, and create
|
||||
* the row in directus_files async during the upload of the actual file.
|
||||
*/
|
||||
|
||||
busboy.on('field', (fieldname, val) => {
|
||||
if (fieldname === 'storage') {
|
||||
disk = val;
|
||||
}
|
||||
let disk: string = (env.STORAGE_LOCATIONS as string).split(',')[0].trim();
|
||||
let payload: Partial<File> = {};
|
||||
let fileCount = 0;
|
||||
|
||||
payload[fieldname] = val;
|
||||
});
|
||||
|
||||
busboy.on('file', async (fieldname, fileStream, filename, encoding, mimetype) => {
|
||||
fileCount++;
|
||||
|
||||
payload = {
|
||||
...payload,
|
||||
filename_download: filename,
|
||||
type: mimetype,
|
||||
};
|
||||
|
||||
if (!payload.storage) {
|
||||
payload.storage = disk;
|
||||
}
|
||||
|
||||
if (!payload.title) {
|
||||
payload.title = formatTitle(path.parse(filename).name);
|
||||
}
|
||||
|
||||
if (req.accountability?.user) {
|
||||
payload.uploaded_by = req.accountability.user;
|
||||
}
|
||||
|
||||
try {
|
||||
if (operation === 'create') {
|
||||
const pk = await FilesService.createFile(
|
||||
payload,
|
||||
fileStream,
|
||||
req.accountability
|
||||
);
|
||||
const file = await FilesService.readFile(
|
||||
pk,
|
||||
req.sanitizedQuery,
|
||||
req.accountability
|
||||
);
|
||||
|
||||
savedFiles.push(file);
|
||||
tryDone();
|
||||
} else {
|
||||
const pk = await FilesService.updateFile(
|
||||
req.params.pk,
|
||||
payload,
|
||||
req.accountability,
|
||||
fileStream
|
||||
);
|
||||
const file = await FilesService.readFile(
|
||||
pk,
|
||||
req.sanitizedQuery,
|
||||
req.accountability
|
||||
);
|
||||
|
||||
savedFiles.push(file);
|
||||
tryDone();
|
||||
}
|
||||
} catch (err) {
|
||||
busboy.emit('error', err);
|
||||
}
|
||||
});
|
||||
|
||||
busboy.on('error', (error: Error) => {
|
||||
next(error);
|
||||
});
|
||||
|
||||
busboy.on('finish', () => {
|
||||
tryDone();
|
||||
});
|
||||
|
||||
req.pipe(busboy);
|
||||
|
||||
function tryDone() {
|
||||
if (savedFiles.length === fileCount) {
|
||||
if (fileCount === 1) {
|
||||
return res.status(200).json({ data: savedFiles[0] });
|
||||
} else {
|
||||
return res.status(200).json({ data: savedFiles });
|
||||
}
|
||||
}
|
||||
busboy.on('field', (fieldname: keyof File, val) => {
|
||||
if (fieldname === 'storage') {
|
||||
disk = val;
|
||||
}
|
||||
|
||||
payload[fieldname] = val;
|
||||
});
|
||||
|
||||
router.post('/', sanitizeQuery, multipartHandler('create'));
|
||||
busboy.on('file', async (fieldname, fileStream, filename, encoding, mimetype) => {
|
||||
fileCount++;
|
||||
|
||||
if (!payload.title) {
|
||||
payload.title = formatTitle(path.parse(filename).name);
|
||||
}
|
||||
|
||||
if (req.accountability?.user) {
|
||||
payload.uploaded_by = req.accountability.user;
|
||||
}
|
||||
|
||||
const payloadWithRequiredFields: Partial<File> & {
|
||||
filename_download: string;
|
||||
type: string;
|
||||
storage: string;
|
||||
} = {
|
||||
...payload,
|
||||
filename_download: filename,
|
||||
type: mimetype,
|
||||
storage: payload.storage || disk,
|
||||
};
|
||||
|
||||
const primaryKey = await service.upload(fileStream, payloadWithRequiredFields);
|
||||
savedFiles.push(primaryKey);
|
||||
tryDone();
|
||||
});
|
||||
|
||||
busboy.on('error', (error: Error) => {
|
||||
next(error);
|
||||
});
|
||||
|
||||
busboy.on('finish', () => {
|
||||
tryDone();
|
||||
});
|
||||
|
||||
req.pipe(busboy);
|
||||
|
||||
function tryDone() {
|
||||
if (savedFiles.length === fileCount) {
|
||||
res.locals.savedFiles = savedFiles;
|
||||
return next();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
sanitizeQuery,
|
||||
multipartHandler,
|
||||
asyncHandler(async (req, res) => {
|
||||
const service = new FilesService({ accountability: req.accountability });
|
||||
let keys: PrimaryKey | PrimaryKey[] = [];
|
||||
|
||||
if (req.is('multipart/form-data')) {
|
||||
keys = res.locals.savedFiles;
|
||||
} else {
|
||||
// @TODO is this ever used in real life? Wouldn't you always upload a file on create?
|
||||
keys = await service.create(req.body);
|
||||
}
|
||||
|
||||
const record = await service.readByKey(keys as any, req.sanitizedQuery);
|
||||
return res.json({ data: record || null });
|
||||
})
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/',
|
||||
sanitizeQuery,
|
||||
asyncHandler(async (req, res) => {
|
||||
const records = await FilesService.readFiles(req.sanitizedQuery, req.accountability);
|
||||
const service = new FilesService({ accountability: req.accountability });
|
||||
const records = await service.readByQuery(req.sanitizedQuery);
|
||||
return res.json({ data: records || null });
|
||||
})
|
||||
);
|
||||
@@ -129,11 +117,9 @@ router.get(
|
||||
'/:pk',
|
||||
sanitizeQuery,
|
||||
asyncHandler(async (req, res) => {
|
||||
const record = await FilesService.readFile(
|
||||
req.params.pk,
|
||||
req.sanitizedQuery,
|
||||
req.accountability
|
||||
);
|
||||
const keys = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const service = new FilesService({ accountability: req.accountability });
|
||||
const record = await service.readByKey(keys as any, req.sanitizedQuery);
|
||||
return res.json({ data: record || null });
|
||||
})
|
||||
);
|
||||
@@ -141,22 +127,29 @@ router.get(
|
||||
router.patch(
|
||||
'/:pk',
|
||||
sanitizeQuery,
|
||||
asyncHandler(async (req, res, next) => {
|
||||
if (req.is('multipart/form-data')) {
|
||||
return multipartHandler('update')(req, res, next);
|
||||
} else {
|
||||
const pk = await FilesService.updateFile(req.params.pk, req.body, req.accountability);
|
||||
const file = await FilesService.readFile(pk, req.sanitizedQuery, req.accountability);
|
||||
multipartHandler,
|
||||
asyncHandler(async (req, res) => {
|
||||
const service = new FilesService({ accountability: req.accountability });
|
||||
let keys: PrimaryKey | PrimaryKey[] = [];
|
||||
|
||||
return res.status(200).json({ data: file || null });
|
||||
if (req.is('multipart/form-data')) {
|
||||
keys = res.locals.savedFiles;
|
||||
} else {
|
||||
keys = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
await service.update(req.body, keys as any);
|
||||
}
|
||||
|
||||
const record = await service.readByKey(keys as any, req.sanitizedQuery);
|
||||
return res.json({ data: record || null });
|
||||
})
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/:pk',
|
||||
asyncHandler(async (req, res) => {
|
||||
await FilesService.deleteFile(req.params.pk, req.accountability);
|
||||
const keys = req.params.pk.includes(',') ? req.params.pk.split(',') : req.params.pk;
|
||||
const service = new FilesService({ accountability: req.accountability });
|
||||
await service.delete(keys as any);
|
||||
return res.status(200).end();
|
||||
})
|
||||
);
|
||||
|
||||
@@ -1,145 +1,100 @@
|
||||
import { Query } from '../types/query';
|
||||
import ItemsService from './items';
|
||||
import storage from '../storage';
|
||||
import database from '../database';
|
||||
import logger from '../logger';
|
||||
import sharp from 'sharp';
|
||||
import { parse as parseICC } from 'icc';
|
||||
import parseEXIF from 'exif-reader';
|
||||
import parseIPTC from '../utils/parse-iptc';
|
||||
import path from 'path';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { Accountability, Item } from '../types';
|
||||
import { Readable } from 'stream';
|
||||
import { AbstractServiceOptions, File, PrimaryKey } from '../types';
|
||||
import { clone } from 'lodash';
|
||||
|
||||
/**
|
||||
* @todo turn into class
|
||||
*/
|
||||
|
||||
export const createFile = async (
|
||||
data: Partial<Item>,
|
||||
stream: NodeJS.ReadableStream,
|
||||
accountability?: Accountability
|
||||
) => {
|
||||
const id = uuidv4();
|
||||
const itemsService = new ItemsService('directus_files', { accountability });
|
||||
|
||||
const payload: Partial<Item> = {
|
||||
...data,
|
||||
id,
|
||||
};
|
||||
|
||||
payload.filename_disk = payload.id + path.extname(payload.filename_download);
|
||||
|
||||
/**
|
||||
* @note
|
||||
* We save a subset first. This allows the permissions check to run and the file to be created with
|
||||
* proper accountability and revisions.
|
||||
* Afterwards, we'll save the file to disk. During this process, we extract the metadata of the
|
||||
* file itself. After the file is saved to disk, we'll update the just created item with the
|
||||
* updated values to ensure we save the filesize etc. We explicitly save this without accountability
|
||||
* in order to prevent update permissions to kick in and to pervent an extra revision from being created
|
||||
*/
|
||||
const pk = await itemsService.create(payload);
|
||||
|
||||
if (['image/jpeg', 'image/png', 'image/webp'].includes(payload.type)) {
|
||||
const pipeline = sharp();
|
||||
|
||||
pipeline.metadata().then((meta) => {
|
||||
payload.width = meta.width;
|
||||
payload.height = meta.height;
|
||||
payload.filesize = meta.size;
|
||||
payload.metadata = {};
|
||||
|
||||
if (meta.icc) {
|
||||
payload.metadata.icc = parseICC(meta.icc);
|
||||
}
|
||||
|
||||
if (meta.exif) {
|
||||
payload.metadata.exif = parseEXIF(meta.exif);
|
||||
}
|
||||
|
||||
if (meta.iptc) {
|
||||
payload.metadata.iptc = parseIPTC(meta.iptc);
|
||||
|
||||
payload.title = payload.title || payload.metadata.iptc.headline;
|
||||
payload.description = payload.description || payload.metadata.iptc.caption;
|
||||
}
|
||||
});
|
||||
|
||||
await storage.disk(data.storage).put(payload.filename_disk, stream.pipe(pipeline));
|
||||
|
||||
await itemsService.update(payload, pk);
|
||||
} else {
|
||||
await storage.disk(data.storage).put(payload.filename_disk, stream);
|
||||
export default class FilesService extends ItemsService {
|
||||
constructor(options?: AbstractServiceOptions) {
|
||||
super('directus_files', options);
|
||||
}
|
||||
|
||||
return pk;
|
||||
};
|
||||
async upload(
|
||||
stream: NodeJS.ReadableStream,
|
||||
data: Partial<File> & { filename_download: string; storage: string },
|
||||
primaryKey?: PrimaryKey
|
||||
) {
|
||||
const payload = clone(data);
|
||||
|
||||
export const readFiles = async (query: Query, accountability?: Accountability) => {
|
||||
const itemsService = new ItemsService('directus_files', { accountability });
|
||||
return await itemsService.readByQuery(query);
|
||||
};
|
||||
if (primaryKey !== undefined) {
|
||||
// If the file you're uploading already exists, we'll consider this upload a replace. In that case, we'll
|
||||
// delete the previously saved file and thumbnails to ensure they're generated fresh
|
||||
const disk = storage.disk(payload.storage);
|
||||
|
||||
export const readFile = async (
|
||||
pk: string | number,
|
||||
query: Query,
|
||||
accountability?: Accountability
|
||||
) => {
|
||||
const itemsService = new ItemsService('directus_files', { accountability });
|
||||
return await itemsService.readByKey(pk, query);
|
||||
};
|
||||
for await (const file of disk.flatList(String(primaryKey))) {
|
||||
await disk.delete(file.path);
|
||||
}
|
||||
|
||||
export const updateFile = async (
|
||||
pk: string | number,
|
||||
data: Partial<Item>,
|
||||
accountability?: Accountability,
|
||||
stream?: NodeJS.ReadableStream
|
||||
) => {
|
||||
const itemsService = new ItemsService('directus_files', { accountability });
|
||||
await this.update(payload, primaryKey);
|
||||
} else {
|
||||
primaryKey = await this.create(payload);
|
||||
}
|
||||
|
||||
/**
|
||||
* @TODO
|
||||
* Handle changes in storage adapter -> going from local to S3 needs to delete from one, upload to the other
|
||||
*/
|
||||
payload.filename_disk = primaryKey + path.extname(payload.filename_download);
|
||||
|
||||
/**
|
||||
* @TODO
|
||||
* Remove old thumbnails
|
||||
*/
|
||||
if (!payload.type) {
|
||||
payload.type = 'application/octet-stream';
|
||||
}
|
||||
|
||||
/**
|
||||
* @TODO
|
||||
* Extract metadata here too
|
||||
*/
|
||||
if (['image/jpeg', 'image/png', 'image/webp'].includes(payload.type)) {
|
||||
const pipeline = sharp();
|
||||
|
||||
if (stream) {
|
||||
const file = await database
|
||||
.select('storage', 'filename_disk')
|
||||
.from('directus_files')
|
||||
.where({ id: pk })
|
||||
.first();
|
||||
pipeline.metadata().then((meta) => {
|
||||
payload.width = meta.width;
|
||||
payload.height = meta.height;
|
||||
payload.filesize = meta.size;
|
||||
payload.metadata = {};
|
||||
|
||||
await storage.disk(file.storage).put(file.filename_disk, stream as Readable);
|
||||
if (meta.icc) {
|
||||
payload.metadata.icc = parseICC(meta.icc);
|
||||
}
|
||||
|
||||
if (meta.exif) {
|
||||
payload.metadata.exif = parseEXIF(meta.exif);
|
||||
}
|
||||
|
||||
if (meta.iptc) {
|
||||
payload.metadata.iptc = parseIPTC(meta.iptc);
|
||||
|
||||
payload.title = payload.title || payload.metadata.iptc.headline;
|
||||
payload.description = payload.description || payload.metadata.iptc.caption;
|
||||
}
|
||||
});
|
||||
|
||||
await storage.disk(data.storage).put(payload.filename_disk, stream.pipe(pipeline));
|
||||
} else {
|
||||
await storage.disk(data.storage).put(payload.filename_disk, stream);
|
||||
}
|
||||
|
||||
// We do this in a service without accountability. Even if you don't have update permissions to the file,
|
||||
// we still want to be able to set the extracted values from the file on create
|
||||
const sudoService = new ItemsService('directus_files');
|
||||
await sudoService.update(payload, primaryKey);
|
||||
|
||||
return primaryKey;
|
||||
}
|
||||
|
||||
return await itemsService.update(data, pk);
|
||||
};
|
||||
delete(key: PrimaryKey): Promise<PrimaryKey>;
|
||||
delete(keys: PrimaryKey[]): Promise<PrimaryKey[]>;
|
||||
async delete(key: PrimaryKey | PrimaryKey[]): Promise<PrimaryKey | PrimaryKey[]> {
|
||||
const keys = Array.isArray(key) ? key : [key];
|
||||
const files = await super.readByKey(keys, { fields: ['id', 'storage'] });
|
||||
|
||||
export const deleteFile = async (pk: string, accountability?: Accountability) => {
|
||||
/** @todo use ItemsService */
|
||||
const file = await database
|
||||
.select('storage', 'filename_disk')
|
||||
.from('directus_files')
|
||||
.where({ id: pk })
|
||||
.first();
|
||||
for (const file of files) {
|
||||
const disk = storage.disk(file.storage);
|
||||
|
||||
/** @todo delete thumbnails here. should be able to use storage.disk().flatList(prefix: string) */
|
||||
const { wasDeleted } = await storage.disk(file.storage).delete(file.filename_disk);
|
||||
// Delete file + thumbnails
|
||||
for await (const { path } of disk.flatList(file.id)) {
|
||||
await disk.delete(path);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`File ${file.filename_download} deleted: ${wasDeleted}`);
|
||||
await super.delete(keys);
|
||||
|
||||
/** @todo use itemsService */
|
||||
await database.delete().from('directus_files').where({ id: pk });
|
||||
};
|
||||
return key;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,22 @@
|
||||
/** @todo finalize */
|
||||
export type File = {
|
||||
id: string; // uuid
|
||||
filename_disk: string;
|
||||
storage: string;
|
||||
filename_disk: string;
|
||||
filename_download: string;
|
||||
title: string | null;
|
||||
type: string | null;
|
||||
folder: string | null; // uuid
|
||||
uploaded_by: string | null; // uuid
|
||||
uploaded_on: Date;
|
||||
charset: string | null;
|
||||
filesize: number;
|
||||
width: number | null;
|
||||
height: number | null;
|
||||
duration: number | null;
|
||||
embed: string | null;
|
||||
description: string | null;
|
||||
location: string | null;
|
||||
tags: string | null;
|
||||
metadata: Record<string, any> | null;
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user