diff --git a/.changeset/dry-shrimps-reply.md b/.changeset/dry-shrimps-reply.md deleted file mode 100644 index aaaa21f0c7..0000000000 --- a/.changeset/dry-shrimps-reply.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -'@directus/app': minor ---- - -Added a default preset for bookmarks to display the relevant fields by default diff --git a/.changeset/eight-clocks-attack.md b/.changeset/eight-clocks-attack.md new file mode 100644 index 0000000000..fe0b04ad5e --- /dev/null +++ b/.changeset/eight-clocks-attack.md @@ -0,0 +1,5 @@ +--- +'@directus/api': patch +--- + +Fixed reliance on `tus_id` field when reading files, even if TUS is not enabled diff --git a/.changeset/fair-bikes-smoke.md b/.changeset/fair-bikes-smoke.md new file mode 100644 index 0000000000..a586a5ac9b --- /dev/null +++ b/.changeset/fair-bikes-smoke.md @@ -0,0 +1,5 @@ +--- +"@directus/app": patch +--- + +Fixed an issue causing the tus uplaods not to respect the relative path of the app diff --git a/.changeset/hot-cherries-tap.md b/.changeset/hot-cherries-tap.md deleted file mode 100644 index 582d0f8efc..0000000000 --- a/.changeset/hot-cherries-tap.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -'@directus/api': major ---- - -Ensured service integrity, by calling corresponding specified services out of other services diff --git a/.changeset/moody-bees-pay.md b/.changeset/moody-bees-pay.md new file mode 100644 index 0000000000..b6698c4a4d --- /dev/null +++ b/.changeset/moody-bees-pay.md @@ -0,0 +1,5 @@ +--- +"@directus/api": patch +--- + +Prioritized access_token in query over cookies for websocket authentication diff --git a/.changeset/new-squids-argue.md b/.changeset/new-squids-argue.md deleted file mode 100644 index 7f7d88b127..0000000000 --- a/.changeset/new-squids-argue.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -'@directus/storage-driver-cloudinary': patch ---- - -Fixed upload resource type guessing to consider the file extension in a case insensitive manner diff --git a/.changeset/nine-weeks-pay.md b/.changeset/nine-weeks-pay.md deleted file mode 100644 index 4cf6c5747d..0000000000 --- a/.changeset/nine-weeks-pay.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -'@directus/app': patch ---- - -Improved bookmark edit view, arranging fields by purpose and giving more space to the filter diff --git a/.changeset/polite-elephants-juggle.md b/.changeset/polite-elephants-juggle.md deleted file mode 100644 index c3063746af..0000000000 --- a/.changeset/polite-elephants-juggle.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -'@directus/api': major ---- - -Included admin users under the app access limit diff --git a/.changeset/popular-bees-press.md b/.changeset/popular-bees-press.md deleted file mode 100644 index ab29acd98e..0000000000 --- a/.changeset/popular-bees-press.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -"@directus/sdk": patch ---- - -Fixed an issue where request could sometimes fail when using a custom storage implementation with async setter - diff --git a/.changeset/selfish-countries-check.md b/.changeset/selfish-countries-check.md deleted file mode 100644 index ba3c8082d4..0000000000 --- a/.changeset/selfish-countries-check.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -'@directus/api': patch ---- - -Fixed an issue where copying-to-clipboard displayed a success notification even though it failed diff --git a/.changeset/seven-suits-live.md b/.changeset/seven-suits-live.md new file mode 100644 index 0000000000..30db71a84a --- /dev/null +++ b/.changeset/seven-suits-live.md @@ -0,0 +1,5 @@ +--- +"@directus/api": patch +--- + +Fixed cookie authentication in websocket strict mode diff --git a/.changeset/sharp-swans-try.md b/.changeset/sharp-swans-try.md deleted file mode 100644 index b0d529cbfb..0000000000 --- a/.changeset/sharp-swans-try.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -'@directus/api': patch ---- - -Added total file size to the telemetry report diff --git a/.changeset/silly-emus-kick.md b/.changeset/silly-emus-kick.md deleted file mode 100644 index 37233478af..0000000000 --- a/.changeset/silly-emus-kick.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -'@directus/app': patch ---- - -Fixed the input pattern check in the filter component diff --git a/.changeset/silly-ladybugs-carry.md b/.changeset/silly-ladybugs-carry.md new file mode 100644 index 0000000000..c52bb3410b --- /dev/null +++ b/.changeset/silly-ladybugs-carry.md @@ -0,0 +1,7 @@ +--- +'@directus/env': minor +'docs': minor +'@directus/api': minor +--- + +Added support for the ADMIN_TOKEN environment variable diff --git a/.changeset/slimy-jeans-shout.md b/.changeset/slimy-jeans-shout.md deleted file mode 100644 index e49478b732..0000000000 --- a/.changeset/slimy-jeans-shout.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -'@directus/api': patch ---- - -Fixed error extraction for MySQL unique primary key constraints diff --git a/.changeset/soft-chefs-battle.md b/.changeset/soft-chefs-battle.md new file mode 100644 index 0000000000..509f90230b --- /dev/null +++ b/.changeset/soft-chefs-battle.md @@ -0,0 +1,5 @@ +--- +'@directus/api': patch +--- + +Optimized the type signature of the items service collection parameter diff --git a/.changeset/strong-otters-bake.md b/.changeset/strong-otters-bake.md deleted file mode 100644 index 6e0ccd1f02..0000000000 --- a/.changeset/strong-otters-bake.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -'@directus/api': patch ---- - -Fixed an issue that could cause time type fields to be treated as a string in GraphQL diff --git a/.changeset/twenty-seahorses-drop.md b/.changeset/twenty-seahorses-drop.md deleted file mode 100644 index de2403f9d8..0000000000 --- a/.changeset/twenty-seahorses-drop.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -'@directus/errors': patch ---- - -Added `primaryKey` flag in `RecordNotUniqueError` extensions diff --git a/.changeset/witty-papayas-promise.md b/.changeset/witty-papayas-promise.md deleted file mode 100644 index ea14ecde39..0000000000 --- a/.changeset/witty-papayas-promise.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@directus/api": patch ---- - -Fixed the interpretation of CORS config options, allowing to use "falsy" values like `CORS_ORIGIN: false` and `CORS_MAX_AGE: 0` diff --git a/.github/DISCUSSION_TEMPLATE/feature-requests.yml b/.github/DISCUSSION_TEMPLATE/draft-feature-requests.yml similarity index 100% rename from .github/DISCUSSION_TEMPLATE/feature-requests.yml rename to .github/DISCUSSION_TEMPLATE/draft-feature-requests.yml diff --git a/api/package.json b/api/package.json index 272ba04f62..8a45a22f25 100644 --- a/api/package.json +++ b/api/package.json @@ -1,6 +1,6 @@ { "name": "@directus/api", - "version": "20.0.0-rc.1", + "version": "21.0.0-rc.0", "description": "Directus is a real-time API and App dashboard for managing SQL database content", "keywords": [ "directus", @@ -66,7 +66,7 @@ }, "dependencies": { "@authenio/samlify-node-xmllint": "2.0.0", - "@aws-sdk/client-ses": "3.568.0", + "@aws-sdk/client-ses": "3.600.0", "@directus/app": "workspace:*", "@directus/constants": "workspace:*", "@directus/env": "workspace:*", @@ -93,6 +93,9 @@ "@rollup/plugin-alias": "5.1.0", "@rollup/plugin-node-resolve": "15.2.3", "@rollup/plugin-virtual": "3.0.2", + "@tus/file-store": "1.3.3", + "@tus/server": "1.6.0", + "@tus/utils": "0.2.0", "@types/cookie": "0.6.0", "argon2": "0.40.3", "async": "3.2.5", @@ -121,16 +124,16 @@ "flat": "6.0.1", "fs-extra": "11.2.0", "glob-to-regexp": "0.4.1", - "graphql": "16.8.2", + "graphql": "16.9.0", "graphql-compose": "9.0.11", "graphql-ws": "5.16.0", "helmet": "7.1.0", "icc": "3.0.0", - "inquirer": "9.2.23", + "inquirer": "9.3.2", "ioredis": "5.4.1", "ip-matching": "2.1.2", "isolated-vm": "4.7.2", - "joi": "17.13.1", + "joi": "17.13.3", "js-yaml": "4.1.0", "js2xmlparser": "5.0.0", "json2csv": "5.0.7", @@ -138,7 +141,7 @@ "keyv": "4.5.4", "knex": "3.1.0", "ldapjs": "2.3.3", - "liquidjs": "10.13.1", + "liquidjs": "10.14.0", "lodash-es": "4.17.21", "marked": "12.0.2", "micromustache": "8.0.3", @@ -162,7 +165,7 @@ "pino-http": "9.0.0", "pino-http-print": "3.1.0", "pino-pretty": "11.2.1", - "qs": "6.12.1", + "qs": "6.12.2", "rate-limiter-flexible": "5.0.3", "rollup": "4.17.2", "samlify": "2.8.10", @@ -170,10 +173,10 @@ "sharp": "0.33.4", "snappy": "7.2.2", "stream-json": "1.8.0", - "tar": "7.2.0", + "tar": "7.4.0", "tsx": "4.12.0", "wellknown": "0.5.0", - "ws": "8.17.0", + "ws": "8.18.0", "zod": "3.23.8", "zod-validation-error": "3.3.0" }, @@ -192,7 +195,7 @@ "@types/destroy": "1.0.3", "@types/encodeurl": "1.0.2", "@types/express": "4.17.21", - "@types/express-serve-static-core": "4.19.3", + "@types/express-serve-static-core": "4.19.5", "@types/fs-extra": "11.0.4", "@types/glob-to-regexp": "0.4.4", "@types/inquirer": "9.0.7", @@ -221,12 +224,12 @@ "vitest": "1.5.3" }, "optionalDependencies": { - "@keyv/redis": "2.8.4", + "@keyv/redis": "2.8.5", "mysql2": "3.10.0", "nodemailer-mailgun-transport": "2.1.5", "nodemailer-sendgrid": "1.0.3", "oracledb": "6.5.1", - "pg": "8.11.5", + "pg": "8.12.0", "sqlite3": "5.1.7", "tedious": "18.2.0" }, diff --git a/api/src/app.ts b/api/src/app.ts index fcbba2f715..5050347add 100644 --- a/api/src/app.ts +++ b/api/src/app.ts @@ -39,6 +39,7 @@ import serverRouter from './controllers/server.js'; import settingsRouter from './controllers/settings.js'; import sharesRouter from './controllers/shares.js'; import translationsRouter from './controllers/translations.js'; +import { default as tusRouter, scheduleTusCleanup } from './controllers/tus.js'; import usersRouter from './controllers/users.js'; import utilsRouter from './controllers/utils.js'; import versionsRouter from './controllers/versions.js'; @@ -280,6 +281,11 @@ export default async function createApp(): Promise { app.use('/dashboards', dashboardsRouter); app.use('/extensions', extensionsRouter); app.use('/fields', fieldsRouter); + + if (env['TUS_ENABLED'] === true) { + app.use('/files/tus', tusRouter); + } + app.use('/files', filesRouter); app.use('/flows', flowsRouter); app.use('/folders', foldersRouter); @@ -314,6 +320,7 @@ export default async function createApp(): Promise { await emitter.emitInit('routes.after', { app }); initTelemetry(); + scheduleTusCleanup(); await emitter.emitInit('app.after', { app }); diff --git a/api/src/cli/commands/bootstrap/index.ts b/api/src/cli/commands/bootstrap/index.ts index b45f1f8cfd..60a3108e4c 100644 --- a/api/src/cli/commands/bootstrap/index.ts +++ b/api/src/cli/commands/bootstrap/index.ts @@ -110,5 +110,7 @@ async function createDefaultAdmin(schema: SchemaOverview) { logger.info(`No admin password provided. Defaulting to "${adminPassword}"`); } - await usersService.createOne({ ...defaultAdminUser, email: adminEmail, password: adminPassword, role }); + const token = env['ADMIN_TOKEN'] ?? null; + + await usersService.createOne({ ...defaultAdminUser, email: adminEmail, password: adminPassword, token, role }); } diff --git a/api/src/constants.ts b/api/src/constants.ts index aeb60c1c01..d88e1f0983 100644 --- a/api/src/constants.ts +++ b/api/src/constants.ts @@ -2,6 +2,8 @@ import type { CookieOptions } from 'express'; import type { TransformationParams } from './types/index.js'; import { getMilliseconds } from './utils/get-milliseconds.js'; import { useEnv } from '@directus/env'; +import { toBoolean } from '@directus/utils'; +import bytes from 'bytes'; const env = useEnv(); @@ -93,3 +95,12 @@ export const SUPPORTED_IMAGE_METADATA_FORMATS = [ 'image/tiff', 'image/avif', ]; + +/** Resumable uploads */ +export const RESUMABLE_UPLOADS = { + ENABLED: toBoolean(env['TUS_ENABLED']), + CHUNK_SIZE: bytes(env['TUS_CHUNK_SIZE'] as string), + MAX_SIZE: bytes(env['FILES_MAX_UPLOAD_SIZE'] as string), + EXPIRATION_TIME: getMilliseconds(env['TUS_UPLOAD_EXPIRATION'], 600_000 /* 10min */), + SCHEDULE: String(env['TUS_CLEANUP_SCHEDULE'] as string), +}; diff --git a/api/src/controllers/tus.ts b/api/src/controllers/tus.ts new file mode 100644 index 0000000000..07b327fbd5 --- /dev/null +++ b/api/src/controllers/tus.ts @@ -0,0 +1,86 @@ +import { Router } from 'express'; +import { getSchema } from '../utils/get-schema.js'; +import { scheduleSynchronizedJob, validateCron } from '../utils/schedule.js'; +import { createTusServer } from '../services/tus/index.js'; +import { AuthorizationService } from '../services/authorization.js'; +import asyncHandler from '../utils/async-handler.js'; +import type { PermissionsAction } from '@directus/types'; +import { ForbiddenError } from '@directus/errors'; +import { RESUMABLE_UPLOADS } from '../constants.js'; + +const mapAction = (method: string): PermissionsAction => { + switch (method) { + case 'POST': + return 'create'; + case 'PATCH': + return 'update'; + case 'DELETE': + return 'delete'; + default: + return 'read'; + } +}; + +const checkFileAccess = asyncHandler(async (req, _res, next) => { + const auth = new AuthorizationService({ + accountability: req.accountability, + schema: req.schema, + }); + + if (!req.accountability?.admin) { + const action = mapAction(req.method); + + if (action === 'create') { + // checkAccess doesnt seem to work as expected for "create" actions + const hasPermission = Boolean( + req.accountability?.permissions?.find((permission) => { + return permission.collection === 'directus_files' && permission.action === action; + }), + ); + + if (!hasPermission) throw new ForbiddenError(); + } else { + try { + await auth.checkAccess(action, 'directus_files'); + } catch (e) { + throw new ForbiddenError(); + } + } + } + + return next(); +}); + +const handler = asyncHandler(async (req, res) => { + const tusServer = await createTusServer({ + schema: req.schema, + accountability: req.accountability, + }); + + await tusServer.handle(req, res); +}); + +export function scheduleTusCleanup() { + if (!RESUMABLE_UPLOADS.ENABLED) return; + + if (validateCron(RESUMABLE_UPLOADS.SCHEDULE)) { + scheduleSynchronizedJob('tus-cleanup', RESUMABLE_UPLOADS.SCHEDULE, async () => { + const tusServer = await createTusServer({ + schema: await getSchema(), + }); + + await tusServer.cleanUpExpiredUploads(); + }); + } +} + +const router = Router(); + +router.post('/', checkFileAccess, handler); +router.patch('/:id', checkFileAccess, handler); +router.delete('/:id', checkFileAccess, handler); + +router.options('/:id', checkFileAccess, handler); +router.head('/:id', checkFileAccess, handler); + +export default router; diff --git a/api/src/database/migrations/20240701A-add-tus-data.ts b/api/src/database/migrations/20240701A-add-tus-data.ts new file mode 100644 index 0000000000..0736af3e3d --- /dev/null +++ b/api/src/database/migrations/20240701A-add-tus-data.ts @@ -0,0 +1,15 @@ +import type { Knex } from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.schema.alterTable('directus_files', (table) => { + table.string('tus_id', 64).nullable(); + table.json('tus_data').nullable(); + }); +} + +export async function down(knex: Knex): Promise { + await knex.schema.alterTable('directus_files', (table) => { + table.dropColumn('tus_id'); + table.dropColumn('tus_data'); + }); +} diff --git a/api/src/services/authentication.ts b/api/src/services/authentication.ts index efebb061c8..a82112c55f 100644 --- a/api/src/services/authentication.ts +++ b/api/src/services/authentication.ts @@ -5,7 +5,6 @@ import { useEnv } from '@directus/env'; import { InvalidCredentialsError, InvalidOtpError, - InvalidProviderError, ServiceUnavailableError, UserSuspendedError, } from '@directus/errors'; @@ -124,19 +123,10 @@ export class AuthenticationService { ); }; - if (user?.status !== 'active') { + if (user?.status !== 'active' || user?.provider !== providerName) { emitStatus('fail'); - - if (user?.status === 'suspended') { - await stall(STALL_TIME, timeStart); - throw new UserSuspendedError(); - } else { - await stall(STALL_TIME, timeStart); - throw new InvalidCredentialsError(); - } - } else if (user.provider !== providerName) { await stall(STALL_TIME, timeStart); - throw new InvalidProviderError(); + throw new InvalidCredentialsError(); } const settingsService = new SettingsService({ @@ -435,7 +425,10 @@ export class AuthenticationService { // Clear expired sessions for the current user await this.knex('directus_sessions') .delete() - .where('user', '=', record.user_id) + .where({ + user: record.user_id, + share: record.share_id, + }) .andWhere('expires', '<', new Date()); return { @@ -494,6 +487,7 @@ export class AuthenticationService { await this.knex('directus_sessions').insert({ token: newSessionToken, user: sessionRecord['user_id'], + share: sessionRecord['share_id'], expires: sessionExpiration, ip: this.accountability?.ip, user_agent: this.accountability?.userAgent, diff --git a/api/src/services/files.ts b/api/src/services/files.ts index e39d661306..bae208942f 100644 --- a/api/src/services/files.ts +++ b/api/src/services/files.ts @@ -1,38 +1,31 @@ import { useEnv } from '@directus/env'; import { ContentTooLargeError, ForbiddenError, InvalidPayloadError, ServiceUnavailableError } from '@directus/errors'; import formatTitle from '@directus/format-title'; -import type { BusboyFileStream, File, PrimaryKey } from '@directus/types'; +import type { BusboyFileStream, File, PrimaryKey, Query } from '@directus/types'; import { toArray } from '@directus/utils'; import type { AxiosResponse } from 'axios'; import encodeURL from 'encodeurl'; -import exif, { type GPSInfoTags, type ImageTags, type IopTags, type PhotoTags } from 'exif-reader'; -import type { IccProfile } from 'icc'; -import { parse as parseIcc } from 'icc'; -import { clone, pick } from 'lodash-es'; +import { clone, cloneDeep } from 'lodash-es'; import { extension } from 'mime-types'; import type { Readable } from 'node:stream'; import { PassThrough as PassThroughStream, Transform as TransformStream } from 'node:stream'; -import { pipeline } from 'node:stream/promises'; import zlib from 'node:zlib'; import path from 'path'; -import sharp from 'sharp'; import url from 'url'; -import { SUPPORTED_IMAGE_METADATA_FORMATS } from '../constants.js'; +import { RESUMABLE_UPLOADS } from '../constants.js'; import emitter from '../emitter.js'; import { useLogger } from '../logger.js'; import { validateAccess } from '../permissions/modules/validate-access/validate-access.js'; import { getAxios } from '../request/index.js'; import { getStorage } from '../storage/index.js'; import type { AbstractServiceOptions, MutationOptions } from '../types/index.js'; -import { parseIptc, parseXmp } from '../utils/parse-image-metadata.js'; -import { ItemsService } from './items.js'; +import { extractMetadata } from './files/lib/extract-metadata.js'; +import { ItemsService, type QueryOptions } from './items.js'; const env = useEnv(); const logger = useLogger(); -type Metadata = Partial>; - -export class FilesService extends ItemsService { +export class FilesService extends ItemsService { constructor(options: AbstractServiceOptions) { super('directus_files', options); } @@ -87,7 +80,7 @@ export class FilesService extends ItemsService { path.extname(payload.filename_download!) || (payload.type && '.' + extension(payload.type)) || ''; // The filename_disk is the FINAL filename on disk - payload.filename_disk = primaryKey + (fileExtension || ''); + payload.filename_disk ||= primaryKey + (fileExtension || ''); // Temp filename is used for replacements const tempFilenameDisk = 'temp_' + payload.filename_disk; @@ -163,37 +156,7 @@ export class FilesService extends ItemsService { const { size } = await storage.location(data.storage).stat(payload.filename_disk); payload.filesize = size; - if (SUPPORTED_IMAGE_METADATA_FORMATS.includes(payload.type)) { - const stream = await storage.location(data.storage).read(payload.filename_disk); - const { height, width, description, title, tags, metadata } = await this.getMetadata(stream); - - if (!payload.height && height) { - payload.height = height; - } - - if (!payload.width && width) { - payload.width = width; - } - - if (!payload.metadata && metadata) { - payload.metadata = metadata; - } - - // Note that if this is a replace file upload, the below properties are fetched and included in the payload above - // in the `existingFile` variable... so this will ONLY set the values if they're not already set - - if (!payload.description && description) { - payload.description = description; - } - - if (!payload.title && title) { - payload.title = title; - } - - if (!payload.tags && tags) { - payload.tags = tags; - } - } + const metadata = await extractMetadata(data.storage, payload as Parameters[1]); // We do this in a service without accountability. Even if you don't have update permissions to the file, // we still want to be able to set the extracted values from the file on create @@ -202,7 +165,7 @@ export class FilesService extends ItemsService { schema: this.schema, }); - await sudoService.updateOne(primaryKey, payload, { emitEvents: false }); + await sudoService.updateOne(primaryKey, { ...payload, ...metadata }, { emitEvents: false }); if (opts?.emitEvents !== false) { emitter.emitAction( @@ -226,132 +189,6 @@ export class FilesService extends ItemsService { /** * Extract metadata from a buffer's content */ - async getMetadata( - stream: Readable, - allowList: string | string[] = env['FILE_METADATA_ALLOW_LIST'] as string[], - ): Promise { - return new Promise((resolve, reject) => { - pipeline( - stream, - sharp().metadata(async (err, sharpMetadata) => { - if (err) { - reject(err); - return; - } - - const metadata: Metadata = {}; - - if (sharpMetadata.orientation && sharpMetadata.orientation >= 5) { - metadata.height = sharpMetadata.width ?? null; - metadata.width = sharpMetadata.height ?? null; - } else { - metadata.width = sharpMetadata.width ?? null; - metadata.height = sharpMetadata.height ?? null; - } - - // Backward-compatible layout as it used to be with 'exifr' - const fullMetadata: { - ifd0?: Partial; - ifd1?: Partial; - exif?: Partial; - gps?: Partial; - interop?: Partial; - icc?: IccProfile; - iptc?: Record; - xmp?: Record; - } = {}; - - if (sharpMetadata.exif) { - try { - const { Image, ThumbnailTags, Iop, GPSInfo, Photo } = (exif as unknown as typeof exif.default)( - sharpMetadata.exif, - ); - - if (Image) { - fullMetadata.ifd0 = Image; - } - - if (ThumbnailTags) { - fullMetadata.ifd1 = ThumbnailTags; - } - - if (Iop) { - fullMetadata.interop = Iop; - } - - if (GPSInfo) { - fullMetadata.gps = GPSInfo; - } - - if (Photo) { - fullMetadata.exif = Photo; - } - } catch (err) { - logger.warn(`Couldn't extract Exif metadata from file`); - logger.warn(err); - } - } - - if (sharpMetadata.icc) { - try { - fullMetadata.icc = parseIcc(sharpMetadata.icc); - } catch (err) { - logger.warn(`Couldn't extract ICC profile data from file`); - logger.warn(err); - } - } - - if (sharpMetadata.iptc) { - try { - fullMetadata.iptc = parseIptc(sharpMetadata.iptc); - } catch (err) { - logger.warn(`Couldn't extract IPTC Photo Metadata from file`); - logger.warn(err); - } - } - - if (sharpMetadata.xmp) { - try { - fullMetadata.xmp = parseXmp(sharpMetadata.xmp); - } catch (err) { - logger.warn(`Couldn't extract XMP data from file`); - logger.warn(err); - } - } - - if (fullMetadata?.iptc?.['Caption'] && typeof fullMetadata.iptc['Caption'] === 'string') { - metadata.description = fullMetadata.iptc?.['Caption']; - } - - if (fullMetadata?.iptc?.['Headline'] && typeof fullMetadata.iptc['Headline'] === 'string') { - metadata.title = fullMetadata.iptc['Headline']; - } - - if (fullMetadata?.iptc?.['Keywords']) { - metadata.tags = fullMetadata.iptc['Keywords'] as string; - } - - if (allowList === '*' || allowList?.[0] === '*') { - metadata.metadata = fullMetadata; - } else { - metadata.metadata = pick(fullMetadata, allowList); - } - - // Fix (incorrectly parsed?) values starting / ending with spaces, - // limited to one level and string values only - for (const section of Object.keys(metadata.metadata)) { - for (const [key, value] of Object.entries(metadata.metadata[section])) { - if (typeof value === 'string') { - metadata.metadata[section][key] = value.trim(); - } - } - } - - resolve(metadata); - }), - ); - }); - } /** * Import a single file from an external URL @@ -442,6 +279,26 @@ export class FilesService extends ItemsService { return keys; } + + override async readByQuery(query: Query, opts?: QueryOptions | undefined) { + const filteredQuery = cloneDeep(query); + + if (RESUMABLE_UPLOADS.ENABLED === true) { + const filterPartialUploads = { tus_id: { _null: true } }; + + if (!filteredQuery.filter) { + filteredQuery.filter = filterPartialUploads; + } else if ('_and' in filteredQuery.filter && Array.isArray(filteredQuery.filter['_and'])) { + filteredQuery.filter['_and'].push(filterPartialUploads); + } else { + filteredQuery.filter = { + _and: [filteredQuery.filter, filterPartialUploads], + }; + } + } + + return super.readByQuery(filteredQuery, opts); + } } function decompressResponse(stream: Readable, headers: AxiosResponse['headers']) { diff --git a/api/src/services/files/lib/extract-metadata.ts b/api/src/services/files/lib/extract-metadata.ts new file mode 100644 index 0000000000..1cef6f4c63 --- /dev/null +++ b/api/src/services/files/lib/extract-metadata.ts @@ -0,0 +1,46 @@ +import type { File } from '@directus/types'; +import { SUPPORTED_IMAGE_METADATA_FORMATS } from '../../../constants.js'; +import { getStorage } from '../../../storage/index.js'; +import { getMetadata, type Metadata } from '../utils/get-metadata.js'; + +export async function extractMetadata( + storageLocation: string, + data: Partial & Pick, +): Promise { + const storage = await getStorage(); + const fileMeta: Metadata = {}; + + if (data.type && SUPPORTED_IMAGE_METADATA_FORMATS.includes(data.type)) { + const stream = await storage.location(storageLocation).read(data.filename_disk); + const { height, width, description, title, tags, metadata } = await getMetadata(stream); + + // Note that if this is a replace file upload, the below properties are fetched and included in the data above + // in the `existingFile` variable... so this will ONLY set the values if they're not already set + + if (!data.height && height) { + fileMeta.height = height; + } + + if (!data.width && width) { + fileMeta.width = width; + } + + if (!data.metadata && metadata) { + fileMeta.metadata = metadata; + } + + if (!data.description && description) { + fileMeta.description = description; + } + + if (!data.title && title) { + fileMeta.title = title; + } + + if (!data.tags && tags) { + fileMeta.tags = tags; + } + } + + return fileMeta; +} diff --git a/api/src/services/files/utils/get-metadata.ts b/api/src/services/files/utils/get-metadata.ts new file mode 100644 index 0000000000..bf2ef3ef1b --- /dev/null +++ b/api/src/services/files/utils/get-metadata.ts @@ -0,0 +1,142 @@ +import type { File } from '@directus/types'; +import exif, { type GPSInfoTags, type ImageTags, type IopTags, type PhotoTags } from 'exif-reader'; +import { type IccProfile, parse as parseIcc } from 'icc'; +import { pick } from 'lodash-es'; +import type { Readable } from 'node:stream'; +import { pipeline } from 'node:stream/promises'; +import sharp from 'sharp'; +import { useEnv } from '@directus/env'; +import { useLogger } from '../../../logger.js'; +import { parseIptc, parseXmp } from './parse-image-metadata.js'; + +const env = useEnv(); +const logger = useLogger(); + +export type Metadata = Partial>; + +export async function getMetadata( + stream: Readable, + allowList: string | string[] = env['FILE_METADATA_ALLOW_LIST'] as string[], +): Promise { + return new Promise((resolve, reject) => { + pipeline( + stream, + sharp().metadata(async (err, sharpMetadata) => { + if (err) { + reject(err); + return; + } + + const metadata: Metadata = {}; + + if (sharpMetadata.orientation && sharpMetadata.orientation >= 5) { + metadata.height = sharpMetadata.width ?? null; + metadata.width = sharpMetadata.height ?? null; + } else { + metadata.width = sharpMetadata.width ?? null; + metadata.height = sharpMetadata.height ?? null; + } + + // Backward-compatible layout as it used to be with 'exifr' + const fullMetadata: { + ifd0?: Partial; + ifd1?: Partial; + exif?: Partial; + gps?: Partial; + interop?: Partial; + icc?: IccProfile; + iptc?: Record; + xmp?: Record; + } = {}; + + if (sharpMetadata.exif) { + try { + const { Image, ThumbnailTags, Iop, GPSInfo, Photo } = (exif as unknown as typeof exif.default)( + sharpMetadata.exif, + ); + + if (Image) { + fullMetadata.ifd0 = Image; + } + + if (ThumbnailTags) { + fullMetadata.ifd1 = ThumbnailTags; + } + + if (Iop) { + fullMetadata.interop = Iop; + } + + if (GPSInfo) { + fullMetadata.gps = GPSInfo; + } + + if (Photo) { + fullMetadata.exif = Photo; + } + } catch (err) { + logger.warn(`Couldn't extract Exif metadata from file`); + logger.warn(err); + } + } + + if (sharpMetadata.icc) { + try { + fullMetadata.icc = parseIcc(sharpMetadata.icc); + } catch (err) { + logger.warn(`Couldn't extract ICC profile data from file`); + logger.warn(err); + } + } + + if (sharpMetadata.iptc) { + try { + fullMetadata.iptc = parseIptc(sharpMetadata.iptc); + } catch (err) { + logger.warn(`Couldn't extract IPTC Photo Metadata from file`); + logger.warn(err); + } + } + + if (sharpMetadata.xmp) { + try { + fullMetadata.xmp = parseXmp(sharpMetadata.xmp); + } catch (err) { + logger.warn(`Couldn't extract XMP data from file`); + logger.warn(err); + } + } + + if (fullMetadata?.iptc?.['Caption'] && typeof fullMetadata.iptc['Caption'] === 'string') { + metadata.description = fullMetadata.iptc?.['Caption']; + } + + if (fullMetadata?.iptc?.['Headline'] && typeof fullMetadata.iptc['Headline'] === 'string') { + metadata.title = fullMetadata.iptc['Headline']; + } + + if (fullMetadata?.iptc?.['Keywords']) { + metadata.tags = fullMetadata.iptc['Keywords'] as string; + } + + if (allowList === '*' || allowList?.[0] === '*') { + metadata.metadata = fullMetadata; + } else { + metadata.metadata = pick(fullMetadata, allowList); + } + + // Fix (incorrectly parsed?) values starting / ending with spaces, + // limited to one level and string values only + for (const section of Object.keys(metadata.metadata)) { + for (const [key, value] of Object.entries(metadata.metadata[section])) { + if (typeof value === 'string') { + metadata.metadata[section][key] = value.trim(); + } + } + } + + resolve(metadata); + }), + ); + }); +} diff --git a/api/src/utils/parse-image-metadata.ts b/api/src/services/files/utils/parse-image-metadata.ts similarity index 100% rename from api/src/utils/parse-image-metadata.ts rename to api/src/services/files/utils/parse-image-metadata.ts diff --git a/api/src/services/items.ts b/api/src/services/items.ts index af6feb8924..7c3971f433 100644 --- a/api/src/services/items.ts +++ b/api/src/services/items.ts @@ -43,15 +43,15 @@ export type MutationTracker = { getCount: () => number; }; -export class ItemsService implements AbstractService { - collection: string; +export class ItemsService implements AbstractService { + collection: Collection; knex: Knex; accountability: Accountability | null; eventScope: string; schema: SchemaOverview; cache: Keyv | null; - constructor(collection: string, options: AbstractServiceOptions) { + constructor(collection: Collection, options: AbstractServiceOptions) { this.collection = collection; this.knex = options.knex || getDatabase(); this.accountability = options.accountability || null; diff --git a/api/src/services/server.ts b/api/src/services/server.ts index 6e4557dd61..6a632a73cb 100644 --- a/api/src/services/server.ts +++ b/api/src/services/server.ts @@ -16,6 +16,7 @@ import { SERVER_ONLINE } from '../server.js'; import { getStorage } from '../storage/index.js'; import type { AbstractServiceOptions } from '../types/index.js'; import { SettingsService } from './settings.js'; +import { RESUMABLE_UPLOADS } from '../constants.js'; const env = useEnv(); const logger = useLogger(); @@ -113,6 +114,12 @@ export class ServerService { info['websocket'] = false; } + if (RESUMABLE_UPLOADS.ENABLED) { + info['uploads'] = { + chunkSize: RESUMABLE_UPLOADS.CHUNK_SIZE, + }; + } + info['version'] = version; } diff --git a/api/src/services/tus/data-store.ts b/api/src/services/tus/data-store.ts new file mode 100644 index 0000000000..715335ba5d --- /dev/null +++ b/api/src/services/tus/data-store.ts @@ -0,0 +1,290 @@ +import formatTitle from '@directus/format-title'; +import type { TusDriver, ChunkedUploadContext } from '@directus/storage'; +import type { Accountability, File, SchemaOverview } from '@directus/types'; +import { extension } from 'mime-types'; +import { extname } from 'node:path'; +import stream from 'node:stream'; +import { DataStore, ERRORS, Upload } from '@tus/utils'; +import { ItemsService } from '../items.js'; +import { useLogger } from '../../logger.js'; +import getDatabase from '../../database/index.js'; +import { omit } from 'lodash-es'; + +export type TusDataStoreConfig = { + constants: { + ENABLED: boolean; + CHUNK_SIZE: number; + MAX_SIZE: number; + EXPIRATION_TIME: number; + SCHEDULE: string; + }; + /** Storage location name **/ + location: string; + driver: TusDriver; + + schema: SchemaOverview; + accountability: Accountability | undefined; +}; + +export class TusDataStore extends DataStore { + protected chunkSize: number; + protected maxSize: number; + protected expirationTime: number; + protected location: string; + protected storageDriver: TusDriver; + protected schema: SchemaOverview; + protected accountability: Accountability | undefined; + + constructor(config: TusDataStoreConfig) { + super(); + + this.chunkSize = config.constants.CHUNK_SIZE; + this.maxSize = config.constants.MAX_SIZE; + this.expirationTime = config.constants.EXPIRATION_TIME; + this.location = config.location; + this.storageDriver = config.driver; + this.extensions = this.storageDriver.tusExtensions; + this.schema = config.schema; + this.accountability = config.accountability; + } + + public override async create(upload: Upload): Promise { + const logger = useLogger(); + const knex = getDatabase(); + + const itemsService = new ItemsService('directus_files', { + accountability: this.accountability, + schema: this.schema, + knex, + }); + + upload.creation_date = new Date().toISOString(); + + if (!upload.size || !upload.metadata || !upload.metadata['filename_download']) { + throw ERRORS.INVALID_METADATA; + } + + if (!upload.metadata['type']) { + upload.metadata['type'] = 'application/octet-stream'; + } + + if (!upload.metadata['title']) { + upload.metadata['title'] = formatTitle(upload.metadata['filename_download']); + } + + let existingFile: Record | null = null; + + // If the payload contains a primary key, we'll check if the file already exists + if (upload.metadata['id']) { + // If the file you're uploading already exists, we'll consider this upload a replace so we'll fetch the existing file's folder and filename_download + existingFile = + (await knex + .select('folder', 'filename_download', 'filename_disk', 'title', 'description', 'metadata', 'tus_id') + .from('directus_files') + .andWhere({ id: upload.metadata['id'] }) + .first()) ?? null; + + if (existingFile && existingFile['tus_id'] !== null) { + throw ERRORS.INVALID_METADATA; + } + } + + // Is this file a replacement? if the file data already exists and we have a primary key + const isReplacement = existingFile !== null && !!upload.metadata['id']; + + if (isReplacement === true && upload.metadata['id']) { + upload.metadata['replace_id'] = upload.metadata['id']; + } + + const fileData: Partial = { + ...omit(upload.metadata, ['id']), + tus_id: upload.id, + tus_data: upload, + filesize: upload.size, + storage: this.location, + }; + + // If no folder is specified, we'll use the default folder from the settings if it exists + if ('folder' in fileData === false) { + const settings = await knex.select('storage_default_folder').from('directus_settings').first(); + + if (settings?.storage_default_folder) { + fileData.folder = settings.storage_default_folder; + } + } + + // If this is a new file upload, we need to generate a new primary key and DB record + const primaryKey = await itemsService.createOne(fileData, { emitEvents: false }); + + const fileExtension = + extname(upload.metadata['filename_download']) || + (upload.metadata['type'] && '.' + extension(upload.metadata['type'])) || + ''; + + // The filename_disk is the FINAL filename on disk + fileData.filename_disk ||= primaryKey + (fileExtension || ''); + + // Temp filename is used for replacements + // const tempFilenameDisk = fileData.tus_id! + (fileExtension || ''); + + // if (isReplacement) { + // upload.metadata['temp_file'] = tempFilenameDisk; + // } + + try { + // If this is a replacement, we'll write the file to a temp location first to ensure we don't overwrite the existing file if something goes wrong + upload = (await this.storageDriver.createChunkedUpload(fileData.filename_disk, upload)) as Upload; + + fileData.tus_data = upload; + + await itemsService.updateOne(primaryKey!, fileData, { emitEvents: false }); + + return upload; + } catch (err) { + logger.warn(`Couldn't create chunked upload for ${fileData.filename_disk}`); + logger.warn(err); + + if (isReplacement) { + await itemsService.updateOne(primaryKey!, { tus_id: null, tus_data: null }, { emitEvents: false }); + } else { + await itemsService.deleteOne(primaryKey!, { emitEvents: false }); + } + + throw ERRORS.UNKNOWN_ERROR; + } + } + + public override async write(readable: stream.Readable, tus_id: string, offset: number): Promise { + const fileData = await this.getFileById(tus_id); + const filePath = fileData.filename_disk!; + + const sudoService = new ItemsService('directus_files', { + schema: this.schema, + }); + + try { + const newOffset = await this.storageDriver.writeChunk( + filePath, + readable, + offset, + fileData.tus_data as ChunkedUploadContext, + ); + + await sudoService.updateOne(fileData.id!, { + tus_data: { + ...fileData.tus_data, + offset: newOffset, + }, + }); + + if (Number(fileData.filesize) === newOffset) { + try { + await this.storageDriver.finishChunkedUpload(filePath, fileData.tus_data as ChunkedUploadContext); + } catch (err) { + await this.remove(fileData.tus_id!); + throw err; + } + + const isReplacement = Boolean(fileData.tus_data?.['metadata']?.['replace_id']); + + // If the file is a replacement, delete the old files, and upgrade the temp file + if (isReplacement === true) { + const replaceId = fileData.tus_data!['metadata']!['replace_id'] as string; + const replaceData = await sudoService.readOne(replaceId, { fields: ['filename_disk'] }); + + // delete the previously saved file and thumbnails to ensure they're generated fresh + for await (const partPath of this.storageDriver.list(replaceId)) { + await this.storageDriver.delete(partPath); + } + + // Upgrade the temp file to the final filename + await this.storageDriver.move(filePath, replaceData.filename_disk); + } + } + + return newOffset; + } catch (err: any) { + if ('status_code' in err && err.status_code === 500) { + throw err; + } + + throw ERRORS.FILE_WRITE_ERROR; + } + } + + override async remove(tus_id: string): Promise { + const sudoService = new ItemsService('directus_files', { + schema: this.schema, + }); + + const fileData = await this.getFileById(tus_id); + await this.storageDriver.deleteChunkedUpload(fileData.filename_disk!, fileData.tus_data as ChunkedUploadContext); + await sudoService.deleteOne(fileData.id!); + } + + override async deleteExpired(): Promise { + const sudoService = new ItemsService('directus_files', { + schema: this.schema, + }); + + const now = new Date(); + const toDelete: Promise[] = []; + + const uploadFiles = await sudoService.readByQuery({ + fields: ['modified_on', 'tus_id', 'tus_data'], + filter: { tus_id: { _nnull: true } }, + }); + + if (!uploadFiles) return 0; + + for (const fileData of uploadFiles) { + if ( + fileData && + fileData.tus_data && + this.getExpiration() > 0 && + fileData.tus_data['size'] !== fileData.tus_data['offset'] && + fileData.modified_on + ) { + const modified = new Date(fileData.modified_on); + const expires = new Date(modified.getTime() + this.getExpiration()); + + if (now > expires) { + toDelete.push(this.remove(fileData.tus_id!)); + } + } + } + + await Promise.allSettled(toDelete); + return toDelete.length; + } + + override getExpiration(): number { + return this.expirationTime; + } + + override async getUpload(id: string): Promise { + const fileData = await this.getFileById(id); + + return new Upload(fileData.tus_data as any); + } + + protected async getFileById(tus_id: string) { + const itemsService = new ItemsService('directus_files', { + schema: this.schema, + }); + + const results = await itemsService.readByQuery({ + filter: { + tus_id: { _eq: tus_id }, + storage: { _eq: this.location }, + ...(this.accountability?.user ? { uploaded_by: { _eq: this.accountability.user } } : {}), + }, + }); + + if (!results || !results[0]) { + throw ERRORS.FILE_NOT_FOUND; + } + + return results[0] as File; + } +} diff --git a/api/src/services/tus/index.ts b/api/src/services/tus/index.ts new file mode 100644 index 0000000000..88f915a015 --- /dev/null +++ b/api/src/services/tus/index.ts @@ -0,0 +1,2 @@ +export * from './server.js'; +export * from './lockers.js'; diff --git a/api/src/services/tus/lockers.ts b/api/src/services/tus/lockers.ts new file mode 100644 index 0000000000..ac9de170a1 --- /dev/null +++ b/api/src/services/tus/lockers.ts @@ -0,0 +1,99 @@ +import { ERRORS, type Lock, type Locker, type RequestRelease } from '@tus/utils'; +import { useLock } from '../../lock/index.js'; +import type { Kv } from '@directus/memory'; +import { waitTimeout } from './utils/wait-timeout.js'; + +/** + * TusLocker is an implementation of the Locker interface that manages locks in memory or using Redis. + * This class is designed for exclusive access control over resources, often used in scenarios like upload management. + * + * Locking Behavior: + * - When the `lock` method is invoked for an already locked resource, the `cancelReq` callback is called. + * This signals to the current lock holder that another process is requesting the lock, encouraging them to release it as soon as possible. + * - The lock attempt continues until the specified timeout is reached. If the timeout expires and the lock is still not + * available, an error is thrown to indicate lock acquisition failure. + * + * Lock Acquisition and Release: + * - The `lock` method implements a wait mechanism, allowing a lock request to either succeed when the lock becomes available, + * or fail after the timeout period. + * - The `unlock` method releases a lock, making the resource available for other requests. + */ +export class TusLocker implements Locker { + lockTimeout: number; + acquireTimeout: number; + + constructor(options?: { acquireLockTimeout: number; lockTimeout: number }) { + this.acquireTimeout = options?.acquireLockTimeout ?? 1000 * 30; + this.lockTimeout = options?.lockTimeout ?? 1000 * 60; + } + + newLock(id: string) { + return new KvLock(id, this.lockTimeout, this.acquireTimeout); + } +} + +export class KvLock implements Lock { + private kv: Kv; + constructor( + private id: string, + private lockTimeout: number = 1000 * 60, + private acquireTimeout: number = 1000 * 30, + ) { + this.kv = useLock(); + } + + async lock(cancelReq: RequestRelease) { + const abortController = new AbortController(); + + const lock = await Promise.race([ + waitTimeout(this.acquireTimeout, abortController.signal), + this.acquireLock(this.id, cancelReq, abortController.signal), + ]); + + abortController.abort(); + + if (!lock) { + throw ERRORS.ERR_LOCK_TIMEOUT; + } + } + + protected async acquireLock(id: string, requestRelease: RequestRelease, signal: AbortSignal): Promise { + if (signal.aborted) { + return false; + } + + const lockTime = await this.kv.get(id); + const now = Date.now(); + + if (!lockTime || Number(lockTime) < now - this.lockTimeout) { + await this.kv.set(id, now); + return true; + } + + await requestRelease(); + + return await new Promise((resolve, reject) => { + // Using setImmediate to: + // 1. Prevent stack overflow by deferring recursive calls to the next event loop iteration. + // 2. Allow event loop to process other pending events, maintaining server responsiveness. + // 3. Ensure fairness in lock acquisition by giving other requests a chance to acquire the lock. + setImmediate(() => { + this.acquireLock(id, requestRelease, signal).then(resolve).catch(reject); + }); + }); + } + + async unlock() { + await this.kv.delete(this.id); + } +} + +let _locker: Locker | undefined = undefined; + +export function getTusLocker() { + if (!_locker) { + _locker = new TusLocker(); + } + + return _locker; +} diff --git a/api/src/services/tus/server.ts b/api/src/services/tus/server.ts new file mode 100644 index 0000000000..05a67be8cf --- /dev/null +++ b/api/src/services/tus/server.ts @@ -0,0 +1,100 @@ +/** + * TUS implementation for resumable uploads + * + * https://tus.io/ + */ +import { useEnv } from '@directus/env'; +import type { Driver, TusDriver } from '@directus/storage'; +import { supportsTus } from '@directus/storage'; +import type { Accountability, File, SchemaOverview } from '@directus/types'; +import { toArray } from '@directus/utils'; +import { Server } from '@tus/server'; +import { RESUMABLE_UPLOADS } from '../../constants.js'; +import { getStorage } from '../../storage/index.js'; +import { extractMetadata } from '../files/lib/extract-metadata.js'; +import { ItemsService } from '../index.js'; +import { TusDataStore } from './data-store.js'; +import { getTusLocker } from './lockers.js'; +import { pick } from 'lodash-es'; + +type Context = { + schema: SchemaOverview; + accountability?: Accountability | undefined; +}; + +async function createTusStore(context: Context) { + const env = useEnv(); + const storage = await getStorage(); + const location = toArray(env['STORAGE_LOCATIONS'] as string)[0]!; + const driver: Driver | TusDriver = storage.location(location); + + if (!supportsTus(driver)) { + throw new Error(`Storage location ${location} does not support the TUS protocol`); + } + + return new TusDataStore({ + constants: RESUMABLE_UPLOADS, + accountability: context.accountability, + schema: context.schema, + location, + driver, + }); +} + +export async function createTusServer(context: Context) { + const env = useEnv(); + const store = await createTusStore(context); + + return new Server({ + path: '/files/tus', + datastore: store, + locker: getTusLocker(), + maxSize: RESUMABLE_UPLOADS.MAX_SIZE, + async onUploadFinish(req: any, res, upload) { + const service = new ItemsService('directus_files', { + schema: req.schema, + }); + + const file = ( + await service.readByQuery({ + filter: { tus_id: { _eq: upload.id } }, + limit: 1, + }) + )[0]; + + if (!file) return res; + + // update metadata when file is replaced + if (file.tus_data?.['metadata']?.['replace_id']) { + const newFile = await service.readOne(file.tus_data['metadata']['replace_id']); + const updateFields = pick(file, ['filename_download', 'filesize', 'type']); + + const metadata = await extractMetadata(newFile.storage, { + ...newFile, + ...updateFields, + }); + + await service.updateOne(file.tus_data['metadata']['replace_id'], { + ...updateFields, + ...metadata, + }); + + await service.deleteOne(file.id); + } else { + const metadata = await extractMetadata(file.storage, file); + + await service.updateOne(file.id, { + ...metadata, + tus_id: null, + tus_data: null, + }); + } + + return res; + }, + generateUrl(_req, opts) { + return env['PUBLIC_URL'] + '/files/tus/' + opts.id; + }, + relativeLocation: String(env['PUBLIC_URL']).startsWith('http'), + }); +} diff --git a/api/src/services/tus/utils/wait-timeout.ts b/api/src/services/tus/utils/wait-timeout.ts new file mode 100644 index 0000000000..ddd852103a --- /dev/null +++ b/api/src/services/tus/utils/wait-timeout.ts @@ -0,0 +1,15 @@ +export function waitTimeout(timeout: number, signal: AbortSignal) { + return new Promise((resolve) => { + const handler = setTimeout(() => { + resolve(false); + }, timeout); + + const abortListener = () => { + clearTimeout(handler); + signal.removeEventListener('abort', abortListener); + resolve(false); + }; + + signal.addEventListener('abort', abortListener); + }); +} diff --git a/api/src/storage/register-locations.test.ts b/api/src/storage/register-locations.test.ts index 986df8019f..ecdd4a1112 100644 --- a/api/src/storage/register-locations.test.ts +++ b/api/src/storage/register-locations.test.ts @@ -9,10 +9,12 @@ vi.mock('@directus/env'); vi.mock('../utils/get-config-from-env.js'); +vi.mock('../constants.js', () => ({ RESUMABLE_UPLOADS: { CHUNK_SIZE: 9999 } })); + let sample: { options: { [location: string]: { - [key: string]: string; + [key: string]: any; }; }; locations: string[]; @@ -32,6 +34,7 @@ beforeEach(() => { sample.options[`STORAGE_${location.toUpperCase()}_`] = { driver: randWord(), + tus: { chunkSize: 9999 }, }; keys.forEach((key, index) => (sample.options[`STORAGE_${location.toUpperCase()}_`]![key] = values[index]!)); diff --git a/api/src/storage/register-locations.ts b/api/src/storage/register-locations.ts index b08a85ff5f..3bf59fb081 100644 --- a/api/src/storage/register-locations.ts +++ b/api/src/storage/register-locations.ts @@ -1,6 +1,7 @@ import { useEnv } from '@directus/env'; import type { StorageManager } from '@directus/storage'; import { toArray } from '@directus/utils'; +import { RESUMABLE_UPLOADS } from '../constants.js'; import { getConfigFromEnv } from '../utils/get-config-from-env.js'; export const registerLocations = async (storage: StorageManager) => { @@ -8,10 +9,14 @@ export const registerLocations = async (storage: StorageManager) => { const locations = toArray(env['STORAGE_LOCATIONS'] as string); + const tus = { + chunkSize: RESUMABLE_UPLOADS.CHUNK_SIZE, + }; + locations.forEach((location: string) => { location = location.trim(); const driverConfig = getConfigFromEnv(`STORAGE_${location.toUpperCase()}_`); const { driver, ...options } = driverConfig; - storage.registerLocation(location, { driver, options }); + storage.registerLocation(location, { driver, options: { ...options, tus } }); }); }; diff --git a/api/src/utils/verify-session-jwt.ts b/api/src/utils/verify-session-jwt.ts index 720df4c626..2ef423488c 100644 --- a/api/src/utils/verify-session-jwt.ts +++ b/api/src/utils/verify-session-jwt.ts @@ -15,7 +15,8 @@ export async function verifySessionJWT(payload: DirectusTokenPayload) { .from('directus_sessions') .where({ token: payload['session'], - user: payload['id'], + user: payload['id'] || null, + share: payload['share'] || null, }) .andWhere('expires', '>=', new Date()) .first(); diff --git a/api/src/websocket/controllers/base.ts b/api/src/websocket/controllers/base.ts index 3798549fdc..087db8ca1a 100644 --- a/api/src/websocket/controllers/base.ts +++ b/api/src/websocket/controllers/base.ts @@ -137,14 +137,15 @@ export default abstract class SocketController { const context: UpgradeContext = { request, socket, head }; const sessionCookieName = env['SESSION_COOKIE_NAME'] as string; - if (cookies[sessionCookieName]) { - const token = cookies[sessionCookieName] as string; - await this.handleTokenUpgrade(context, token); - return; - } + if (this.authentication.mode === 'strict' || query['access_token'] || cookies[sessionCookieName]) { + let token: string | null = null; + + if (typeof query['access_token'] === 'string') { + token = query['access_token']; + } else if (typeof cookies[sessionCookieName] === 'string') { + token = cookies[sessionCookieName] ?? null; + } - if (this.authentication.mode === 'strict') { - const token = query['access_token'] as string; await this.handleTokenUpgrade(context, token); return; } @@ -161,18 +162,21 @@ export default abstract class SocketController { }); } - protected async handleTokenUpgrade({ request, socket, head }: UpgradeContext, token: string) { - let accountability: Accountability | null, expires_at: number | null; + protected async handleTokenUpgrade({ request, socket, head }: UpgradeContext, token: string | null) { + let accountability: Accountability | null = null; + let expires_at: number | null = null; - try { - accountability = await getAccountabilityForToken(token); - expires_at = getExpiresAtForToken(token); - } catch { - accountability = null; - expires_at = null; + if (token) { + try { + accountability = await getAccountabilityForToken(token); + expires_at = getExpiresAtForToken(token); + } catch { + accountability = null; + expires_at = null; + } } - if (!accountability || !accountability.user) { + if (!token || !accountability || !accountability.user) { logger.debug('WebSocket upgrade denied - ' + JSON.stringify(accountability || 'invalid')); socket.write('HTTP/1.1 401 Unauthorized\r\n\r\n'); socket.destroy(); diff --git a/app/package.json b/app/package.json index 04374f81d6..48fac8e38e 100644 --- a/app/package.json +++ b/app/package.json @@ -56,7 +56,7 @@ "@editorjs/image": "2.9.0", "@editorjs/inline-code": "1.5.0", "@editorjs/nested-list": "1.4.2", - "@editorjs/paragraph": "2.11.4", + "@editorjs/paragraph": "2.11.5", "@editorjs/quote": "2.6.0", "@editorjs/raw": "2.5.0", "@editorjs/table": "2.3.0", @@ -90,7 +90,7 @@ "@types/dompurify": "3.0.5", "@types/file-saver": "2.0.7", "@types/geojson": "7946.0.14", - "@types/lodash": "4.17.5", + "@types/lodash": "4.17.6", "@types/mapbox__mapbox-gl-draw": "1.4.6", "@types/mapbox__mapbox-gl-geocoder": "4.7.7", "@types/qrcode": "1.5.5", @@ -102,7 +102,7 @@ "@vue/test-utils": "2.4.6", "@vueuse/core": "10.9.0", "@vueuse/router": "10.9.0", - "apexcharts": "3.49.1", + "apexcharts": "3.49.2", "axios": "1.7.2", "base-64": "1.0.0", "caret-pos": "2.0.0", @@ -119,7 +119,7 @@ "file-saver": "2.0.5", "flatpickr": "4.6.13", "geojson": "0.5.0", - "happy-dom": "14.12.0", + "happy-dom": "14.12.3", "histoire": "0.17.17", "html-entities": "2.5.2", "json-to-graphql-query": "2.2.5", @@ -140,13 +140,14 @@ "sass": "1.77.5", "semver": "7.6.2", "tinymce": "7.1.1", + "tus-js-client": "4.1.0", "typescript": "5.4.5", "vite": "5.2.11", "vitest": "1.5.3", "vue": "3.4.27", "vue-i18n": "9.13.1", - "vue-router": "4.3.3", - "vue-tsc": "2.0.19", + "vue-router": "4.4.0", + "vue-tsc": "2.0.22", "vuedraggable": "4.1.0", "wellknown": "0.5.0" } diff --git a/app/src/components/v-progress-linear.vue b/app/src/components/v-progress-linear.vue index 263bf3f8e9..1b1baf6280 100644 --- a/app/src/components/v-progress-linear.vue +++ b/app/src/components/v-progress-linear.vue @@ -94,7 +94,7 @@ const color = computed(() => { left: 0; height: 100%; background-color: var(--v-progress-linear-color, var(--theme--foreground)); - transition: width 400ms ease-in-out; + transition: width 200ms ease-in-out; } &.absolute { diff --git a/app/src/components/v-upload.vue b/app/src/components/v-upload.vue index 3602ac4c28..f4a279eea5 100644 --- a/app/src/components/v-upload.vue +++ b/app/src/components/v-upload.vue @@ -5,8 +5,15 @@ import { unexpectedError } from '@/utils/unexpected-error'; import { uploadFile } from '@/utils/upload-file'; import { uploadFiles } from '@/utils/upload-files'; import DrawerFiles from '@/views/private/components/drawer-files.vue'; +import { sum } from 'lodash'; import { computed, ref } from 'vue'; import { useI18n } from 'vue-i18n'; +import type { Upload } from 'tus-js-client'; + +export type UploadController = { + start(): void; + abort(): void; +}; interface Props { multiple?: boolean; @@ -24,10 +31,15 @@ const props = withDefaults(defineProps(), { fromUser: true, }); -const emit = defineEmits(['input']); +const emit = defineEmits<{ + input: [files: null | File | File[]]; + start: [controller: UploadController]; +}>(); const { t } = useI18n(); +let uploadController: Upload | null = null; + const { uploading, progress, upload, onBrowseSelect, done, numberOfFiles } = useUpload(); const { onDragEnter, onDragLeave, onDrop, dragging } = useDragging(); const { url, isValidURL, loading: urlLoading, importFromURL } = useURLImport(); @@ -70,10 +82,45 @@ function useUpload() { numberOfFiles.value = files.length; if (props.multiple === true) { + const fileSizes = Array.from(files).map((file) => file.size); + const totalBytes = sum(fileSizes); + const fileControllers: (UploadController | null)[] = new Array(files.length).fill(null); + + const controller = { + start() { + fileControllers.forEach((controller) => controller?.start()); + }, + abort() { + fileControllers.forEach((controller) => controller?.abort()); + }, + }; + const uploadedFiles = await uploadFiles(Array.from(files), { - onProgressChange: (percentage) => { - progress.value = Math.round(percentage.reduce((acc, cur) => (acc += cur)) / files.length); - done.value = percentage.filter((p) => p === 100).length; + onProgressChange: (percentages) => { + progress.value = Math.round( + (sum(fileSizes.map((total, i) => total * (percentages[i]! / 100))) / totalBytes) * 100, + ); + + const doneIndices = percentages + .map((p, i) => [p, i]) + .filter(([p]) => p === 100) + .map(([, i]) => i!); + + done.value = doneIndices.length; + + // Nullify controller for done uploads, to prevent resuming after pausing + for (const idx of doneIndices) { + if (fileControllers[idx]) fileControllers[idx] = null; + } + }, + onChunkedUpload: (controllers) => { + controllers.forEach((controller, i) => (fileControllers[i] = controller)); + uploadController = controller as Upload; + + if (controllers.every((c) => c !== null)) { + // Only emit start once every upload started + emit('start', controller); + } }, preset, }); @@ -85,11 +132,16 @@ function useUpload() { progress.value = percentage; done.value = percentage === 100 ? 1 : 0; }, + onChunkedUpload: (controller) => { + uploadController = controller; + emit('start', controller); + }, fileId: props.fileId, preset, }); uploadedFile && emit('input', uploadedFile); + uploadController = null; } } catch (error) { unexpectedError(error); @@ -226,6 +278,12 @@ function useURLImport() { function openFileBrowser() { input.value?.click(); } + +function abort() { + uploadController?.abort(); +} + +defineExpose({ abort });
+
+ + {{ t('expand') }} + + / + + +
+ {{ t('no_collections_copy_admin') }} @@ -205,9 +240,11 @@ async function onSort(updates: Collection[], removeGroup = false) { @@ -247,6 +284,7 @@ async function onSort(updates: Collection[], removeGroup = false) { :collection="collection" :collections="systemCollections" :visibility-tree="findVisibilityChild(collection.collection)!" + :is-collapsed="false" disable-drag /> @@ -319,4 +357,34 @@ async function onSort(updates: Collection[], removeGroup = false) { .db-only { margin-bottom: 16px; } + +.inline-header { + position: sticky; + top: var(--layout-offset-top); + z-index: 4; + display: flex; + align-items: center; + justify-content: end; + width: 100%; + height: 44px; + padding: 0 8px; + background-color: var(--theme--background); + border-top: var(--theme--border-width) solid var(--theme--border-color-subdued); + border-bottom: var(--theme--border-width) solid var(--theme--border-color-subdued); + box-shadow: 0 0 0 2px var(--theme--background); +} + +.expand-collapse-button { + color: var(--theme--foreground-subdued); + + button { + color: var(--theme--foreground-subdued); + transition: color var(--fast) var(--transition); + } + + button:hover { + color: var(--theme--foreground); + transition: none; + } +} diff --git a/app/src/modules/settings/routes/data-model/collections/components/collection-item.vue b/app/src/modules/settings/routes/data-model/collections/components/collection-item.vue index f63572985e..a1aad47475 100644 --- a/app/src/modules/settings/routes/data-model/collections/components/collection-item.vue +++ b/app/src/modules/settings/routes/data-model/collections/components/collection-item.vue @@ -1,26 +1,25 @@