Parse image file info if available

This commit is contained in:
rijkvanzanten
2020-06-29 15:42:46 -04:00
parent 58c644093c
commit d25417c383
7 changed files with 116 additions and 8 deletions

10
package-lock.json generated
View File

@@ -1952,6 +1952,11 @@
"strip-final-newline": "^2.0.0"
}
},
"exif-reader": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/exif-reader/-/exif-reader-1.0.3.tgz",
"integrity": "sha512-tWMBj1+9jUSibgR/kv/GQ/fkR0biaN9GEZ5iPdf7jFeH//d2bSzgPoaWf1OfMv4MXFD4upwvpCCyeMvSyLWSfA=="
},
"expand-brackets": {
"version": "2.1.4",
"resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-2.1.4.tgz",
@@ -3185,6 +3190,11 @@
}
}
},
"icc": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/icc/-/icc-2.0.0.tgz",
"integrity": "sha512-VSTak7UAcZu1E24YFvcoHVpVg/ZUVyb0G1v0wUIibfz5mHvcFeI/Gpn8C0cAUKw5jCCGx5JBcV4gULu6hX97mA=="
},
"iconv-lite": {
"version": "0.4.24",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",

View File

@@ -74,12 +74,14 @@
"busboy": "^0.3.1",
"camelcase": "^6.0.0",
"dotenv": "^8.2.0",
"exif-reader": "^1.0.3",
"express": "^4.17.1",
"express-async-handler": "^1.1.4",
"express-pino-logger": "^5.0.0",
"express-session": "^1.17.1",
"get-port": "^5.1.1",
"grant": "^5.2.0",
"icc": "^2.0.0",
"jsonwebtoken": "^8.5.1",
"knex": "^0.21.1",
"knex-schema-inspector": "github:knex/knex-schema-inspector",

View File

@@ -5,6 +5,7 @@ import sanitizeQuery from '../middleware/sanitize-query';
import validateQuery from '../middleware/validate-query';
import * as FilesService from '../services/files';
import logger from '../logger';
import { InvalidPayloadException } from '../exceptions';
const router = express.Router();
@@ -31,8 +32,7 @@ const multipartHandler = (operation: 'create' | 'update') =>
busboy.on('file', async (fieldname, fileStream, filename, encoding, mimetype) => {
if (!disk) {
// @todo error
return busboy.emit('error', new Error('no storage provided'));
return busboy.emit('error', new InvalidPayloadException('No storage provided.'));
}
payload = {

View File

@@ -4,6 +4,10 @@ import storage from '../storage';
import * as PayloadService from './payload';
import database from '../database';
import logger from '../logger';
import sharp from 'sharp';
import { parse as parseICC } from 'icc';
import parseEXIF from 'exif-reader';
import parseIPTC from '../utils/parse-iptc';
export const createFile = async (
data: Record<string, any>,
@@ -12,10 +16,36 @@ export const createFile = async (
) => {
const payload = await PayloadService.processValues('create', 'directus_files', data);
await ItemsService.createItem('directus_files', payload, query);
if (payload.type?.startsWith('image')) {
const pipeline = sharp();
pipeline.metadata().then((meta) => {
payload.width = meta.width;
payload.height = meta.height;
payload.filesize = meta.size;
payload.metadata = {};
if (meta.icc) {
payload.metadata.icc = parseICC(meta.icc);
}
if (meta.exif) {
payload.metadata.exif = parseEXIF(meta.exif);
}
if (meta.iptc) {
payload.metadata.iptc = parseIPTC(meta.iptc);
payload.title = payload.title || payload.metadata.iptc.headline;
payload.description = payload.description || payload.metadata.iptc.caption;
}
});
stream.pipe(pipeline);
}
// @todo type of stream in flydrive is wrong: https://github.com/Slynova-Org/flydrive/issues/145
await storage.disk(data.storage).put(data.filename_disk, stream as any);
await ItemsService.createItem('directus_files', payload, query);
};
export const readFiles = async (query: Query) => {

View File

@@ -1,4 +0,0 @@
declare module 'grant' {
const grant: any;
export default grant;
}

14
src/types/shims.d.ts vendored Normal file
View File

@@ -0,0 +1,14 @@
declare module 'grant' {
const grant: any;
export default grant;
}
declare module 'icc' {
const parse: (buf: Buffer) => Record<string, string>;
export { parse };
}
declare module 'exif-reader' {
const exifReader: (buf: Buffer) => Record<string, any>;
export default exifReader;
}

56
src/utils/parse-iptc.ts Normal file
View File

@@ -0,0 +1,56 @@
const IPTC_ENTRY_TYPES = new Map([
[0x78, 'caption'],
[0x6e, 'credit'],
[0x19, 'keywords'],
[0x37, 'dateCreated'],
[0x50, 'byline'],
[0x55, 'bylineTitle'],
[0x7a, 'captionWriter'],
[0x69, 'headline'],
[0x74, 'copyright'],
[0x0f, 'category'],
]);
const IPTC_ENTRY_MARKER = Buffer.from([0x1c, 0x02]);
export default function parseIPTC(buffer: Buffer) {
if (!Buffer.isBuffer(buffer)) return {};
let iptc = {};
let lastIptcEntryPos = buffer.indexOf(IPTC_ENTRY_MARKER);
while (lastIptcEntryPos !== -1) {
lastIptcEntryPos = buffer.indexOf(
IPTC_ENTRY_MARKER,
lastIptcEntryPos + IPTC_ENTRY_MARKER.byteLength
);
let iptcBlockTypePos = lastIptcEntryPos + IPTC_ENTRY_MARKER.byteLength;
let iptcBlockSizePos = iptcBlockTypePos + 1;
let iptcBlockDataPos = iptcBlockSizePos + 2;
let iptcBlockType = buffer.readUInt8(iptcBlockTypePos);
let iptcBlockSize = buffer.readUInt16BE(iptcBlockSizePos);
if (!IPTC_ENTRY_TYPES.has(iptcBlockType)) {
continue;
}
// if (iptcBlockSize > buffer.length - (iptcBlockDataPos + iptcBlockSize)) {
// throw new Error('Invalid IPTC directory');
// }
let iptcBlockTypeId = IPTC_ENTRY_TYPES.get(iptcBlockType);
let iptcData = buffer.slice(iptcBlockDataPos, iptcBlockDataPos + iptcBlockSize).toString();
if (iptc[iptcBlockTypeId] == null) {
iptc[iptcBlockTypeId] = iptcData;
} else if (Array.isArray(iptc[iptcBlockTypeId])) {
iptc[iptcBlockTypeId].push(iptcData);
} else {
iptc[iptcBlockTypeId] = [iptc[iptcBlockTypeId], iptcData];
}
}
return iptc;
}