@directus/api 13.1.0-beta.0 → 13.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/dist/__utils__/snapshots.js +0 -9
  2. package/dist/app.js +0 -2
  3. package/dist/controllers/files.js +1 -1
  4. package/dist/database/helpers/index.d.ts +2 -0
  5. package/dist/database/helpers/index.js +2 -0
  6. package/dist/database/helpers/sequence/dialects/default.d.ts +3 -0
  7. package/dist/database/helpers/sequence/dialects/default.js +3 -0
  8. package/dist/database/helpers/sequence/dialects/postgres.d.ts +9 -0
  9. package/dist/database/helpers/sequence/dialects/postgres.js +10 -0
  10. package/dist/database/helpers/sequence/index.d.ts +7 -0
  11. package/dist/database/helpers/sequence/index.js +7 -0
  12. package/dist/database/helpers/sequence/types.d.ts +5 -0
  13. package/dist/database/helpers/sequence/types.js +6 -0
  14. package/dist/database/index.js +8 -0
  15. package/dist/database/migrations/20230721A-require-shares-fields.js +45 -16
  16. package/dist/database/system-data/fields/collections.yaml +0 -19
  17. package/dist/env.d.ts +1 -1
  18. package/dist/env.js +18 -15
  19. package/dist/middleware/respond.js +0 -20
  20. package/dist/services/activity.js +4 -3
  21. package/dist/services/assets.js +17 -1
  22. package/dist/services/files.js +58 -1
  23. package/dist/services/import-export.js +16 -0
  24. package/dist/services/items.js +28 -3
  25. package/dist/services/users.d.ts +4 -0
  26. package/dist/services/users.js +19 -0
  27. package/dist/types/collection.d.ts +0 -1
  28. package/dist/types/items.d.ts +1 -0
  29. package/dist/utils/sanitize-query.js +0 -3
  30. package/dist/utils/validate-query.js +0 -1
  31. package/dist/websocket/controllers/base.d.ts +1 -0
  32. package/dist/websocket/controllers/base.js +16 -0
  33. package/package.json +15 -15
  34. package/dist/controllers/branches.d.ts +0 -2
  35. package/dist/controllers/branches.js +0 -190
  36. package/dist/database/migrations/20230823A-add-content-versioning.d.ts +0 -3
  37. package/dist/database/migrations/20230823A-add-content-versioning.js +0 -26
  38. package/dist/database/system-data/fields/branches.yaml +0 -19
  39. package/dist/services/branches.d.ts +0 -25
  40. package/dist/services/branches.js +0 -205
@@ -13,7 +13,6 @@ export const snapshotBeforeCreateCollection = {
13
13
  item_duplication_fields: null,
14
14
  note: null,
15
15
  singleton: false,
16
- branches_enabled: false,
17
16
  translations: {},
18
17
  },
19
18
  schema: {
@@ -87,7 +86,6 @@ export const snapshotCreateCollection = {
87
86
  item_duplication_fields: null,
88
87
  note: null,
89
88
  singleton: false,
90
- branches_enabled: false,
91
89
  translations: {},
92
90
  },
93
91
  schema: {
@@ -107,7 +105,6 @@ export const snapshotCreateCollection = {
107
105
  item_duplication_fields: null,
108
106
  note: null,
109
107
  singleton: false,
110
- branches_enabled: false,
111
108
  translations: {},
112
109
  },
113
110
  schema: {
@@ -127,7 +124,6 @@ export const snapshotCreateCollection = {
127
124
  item_duplication_fields: null,
128
125
  note: null,
129
126
  singleton: false,
130
- branches_enabled: false,
131
127
  translations: {},
132
128
  },
133
129
  schema: {
@@ -291,7 +287,6 @@ export const snapshotCreateCollectionNotNested = {
291
287
  item_duplication_fields: null,
292
288
  note: null,
293
289
  singleton: false,
294
- branches_enabled: false,
295
290
  translations: {},
296
291
  },
297
292
  schema: {
@@ -311,7 +306,6 @@ export const snapshotCreateCollectionNotNested = {
311
306
  item_duplication_fields: null,
312
307
  note: null,
313
308
  singleton: false,
314
- branches_enabled: false,
315
309
  translations: {},
316
310
  },
317
311
  schema: {
@@ -430,7 +424,6 @@ export const snapshotBeforeDeleteCollection = {
430
424
  item_duplication_fields: null,
431
425
  note: null,
432
426
  singleton: false,
433
- branches_enabled: false,
434
427
  translations: {},
435
428
  },
436
429
  schema: {
@@ -450,7 +443,6 @@ export const snapshotBeforeDeleteCollection = {
450
443
  item_duplication_fields: null,
451
444
  note: null,
452
445
  singleton: false,
453
- branches_enabled: false,
454
446
  translations: {},
455
447
  },
456
448
  schema: {
@@ -470,7 +462,6 @@ export const snapshotBeforeDeleteCollection = {
470
462
  item_duplication_fields: null,
471
463
  note: null,
472
464
  singleton: false,
473
- branches_enabled: false,
474
465
  translations: {},
475
466
  },
476
467
  schema: {
package/dist/app.js CHANGED
@@ -10,7 +10,6 @@ import { registerAuthProviders } from './auth.js';
10
10
  import activityRouter from './controllers/activity.js';
11
11
  import assetsRouter from './controllers/assets.js';
12
12
  import authRouter from './controllers/auth.js';
13
- import branchesRouter from './controllers/branches.js';
14
13
  import collectionsRouter from './controllers/collections.js';
15
14
  import dashboardsRouter from './controllers/dashboards.js';
16
15
  import extensionsRouter from './controllers/extensions.js';
@@ -200,7 +199,6 @@ export default async function createApp() {
200
199
  app.use('/graphql', graphqlRouter);
201
200
  app.use('/activity', activityRouter);
202
201
  app.use('/assets', assetsRouter);
203
- app.use('/branches', branchesRouter);
204
202
  app.use('/collections', collectionsRouter);
205
203
  app.use('/dashboards', dashboardsRouter);
206
204
  app.use('/extensions', extensionsRouter);
@@ -76,8 +76,8 @@ export const multipartHandler = (req, res, next) => {
76
76
  if (!payload.title) {
77
77
  payload.title = formatTitle(path.parse(filename).name);
78
78
  }
79
- payload.filename_download = filename;
80
79
  }
80
+ payload.filename_download = filename;
81
81
  const payloadWithRequiredFields = {
82
82
  ...payload,
83
83
  type: mimeType,
@@ -4,10 +4,12 @@ import * as dateHelpers from './date/index.js';
4
4
  import * as fnHelpers from './fn/index.js';
5
5
  import * as geometryHelpers from './geometry/index.js';
6
6
  import * as schemaHelpers from './schema/index.js';
7
+ import * as sequenceHelpers from './sequence/index.js';
7
8
  export declare function getHelpers(database: Knex): {
8
9
  date: dateHelpers.mysql | dateHelpers.postgres | dateHelpers.mssql | dateHelpers.sqlite | dateHelpers.oracle;
9
10
  st: geometryHelpers.mysql | geometryHelpers.postgres | geometryHelpers.mssql | geometryHelpers.sqlite | geometryHelpers.oracle | geometryHelpers.redshift;
10
11
  schema: schemaHelpers.mysql | schemaHelpers.cockroachdb | schemaHelpers.mssql | schemaHelpers.postgres | schemaHelpers.sqlite | schemaHelpers.oracle;
12
+ sequence: sequenceHelpers.mysql | sequenceHelpers.postgres;
11
13
  };
12
14
  export declare function getFunctions(database: Knex, schema: SchemaOverview): fnHelpers.mysql | fnHelpers.postgres | fnHelpers.mssql | fnHelpers.sqlite | fnHelpers.oracle;
13
15
  export type Helpers = ReturnType<typeof getHelpers>;
@@ -3,12 +3,14 @@ import * as dateHelpers from './date/index.js';
3
3
  import * as fnHelpers from './fn/index.js';
4
4
  import * as geometryHelpers from './geometry/index.js';
5
5
  import * as schemaHelpers from './schema/index.js';
6
+ import * as sequenceHelpers from './sequence/index.js';
6
7
  export function getHelpers(database) {
7
8
  const client = getDatabaseClient(database);
8
9
  return {
9
10
  date: new dateHelpers[client](database),
10
11
  st: new geometryHelpers[client](database),
11
12
  schema: new schemaHelpers[client](database),
13
+ sequence: new sequenceHelpers[client](database),
12
14
  };
13
15
  }
14
16
  export function getFunctions(database, schema) {
@@ -0,0 +1,3 @@
1
+ import { AutoSequenceHelper } from '../types.js';
2
+ export declare class AutoIncrementHelperDefault extends AutoSequenceHelper {
3
+ }
@@ -0,0 +1,3 @@
1
+ import { AutoSequenceHelper } from '../types.js';
2
+ export class AutoIncrementHelperDefault extends AutoSequenceHelper {
3
+ }
@@ -0,0 +1,9 @@
1
+ import type { Knex } from 'knex';
2
+ import { AutoSequenceHelper } from '../types.js';
3
+ export declare class AutoIncrementHelperPostgres extends AutoSequenceHelper {
4
+ /**
5
+ * Resets the auto increment sequence for a table based on the max value of the PK column.
6
+ * The sequence name of determined using a sub query.
7
+ */
8
+ resetAutoIncrementSequence(table: string, column: string): Promise<Knex.Raw | void>;
9
+ }
@@ -0,0 +1,10 @@
1
+ import { AutoSequenceHelper } from '../types.js';
2
+ export class AutoIncrementHelperPostgres extends AutoSequenceHelper {
3
+ /**
4
+ * Resets the auto increment sequence for a table based on the max value of the PK column.
5
+ * The sequence name of determined using a sub query.
6
+ */
7
+ async resetAutoIncrementSequence(table, column) {
8
+ return await this.knex.raw(`WITH sequence_infos AS (SELECT pg_get_serial_sequence('${table}', '${column}') AS seq_name, MAX(${column}) as max_val FROM ${table}) SELECT SETVAL(seq_name, max_val) FROM sequence_infos;`);
9
+ }
10
+ }
@@ -0,0 +1,7 @@
1
+ export { AutoIncrementHelperPostgres as postgres } from './dialects/postgres.js';
2
+ export { AutoIncrementHelperDefault as mysql } from './dialects/default.js';
3
+ export { AutoIncrementHelperDefault as cockroachdb } from './dialects/default.js';
4
+ export { AutoIncrementHelperDefault as redshift } from './dialects/default.js';
5
+ export { AutoIncrementHelperDefault as oracle } from './dialects/default.js';
6
+ export { AutoIncrementHelperDefault as sqlite } from './dialects/default.js';
7
+ export { AutoIncrementHelperDefault as mssql } from './dialects/default.js';
@@ -0,0 +1,7 @@
1
+ export { AutoIncrementHelperPostgres as postgres } from './dialects/postgres.js';
2
+ export { AutoIncrementHelperDefault as mysql } from './dialects/default.js';
3
+ export { AutoIncrementHelperDefault as cockroachdb } from './dialects/default.js';
4
+ export { AutoIncrementHelperDefault as redshift } from './dialects/default.js';
5
+ export { AutoIncrementHelperDefault as oracle } from './dialects/default.js';
6
+ export { AutoIncrementHelperDefault as sqlite } from './dialects/default.js';
7
+ export { AutoIncrementHelperDefault as mssql } from './dialects/default.js';
@@ -0,0 +1,5 @@
1
+ import type { Knex } from 'knex';
2
+ import { DatabaseHelper } from '../types.js';
3
+ export declare class AutoSequenceHelper extends DatabaseHelper {
4
+ resetAutoIncrementSequence(_table: string, _column: string): Promise<Knex.Raw | void>;
5
+ }
@@ -0,0 +1,6 @@
1
+ import { DatabaseHelper } from '../types.js';
2
+ export class AutoSequenceHelper extends DatabaseHelper {
3
+ async resetAutoIncrementSequence(_table, _column) {
4
+ return;
5
+ }
6
+ }
@@ -43,6 +43,14 @@ export default function getDatabase() {
43
43
  requiredEnvVars.push('DB_CONNECTION_STRING');
44
44
  }
45
45
  break;
46
+ case 'mysql':
47
+ if (!env['DB_SOCKET_PATH']) {
48
+ requiredEnvVars.push('DB_HOST', 'DB_PORT', 'DB_DATABASE', 'DB_USER', 'DB_PASSWORD');
49
+ }
50
+ else {
51
+ requiredEnvVars.push('DB_DATABASE', 'DB_USER', 'DB_PASSWORD', 'DB_SOCKET_PATH');
52
+ }
53
+ break;
46
54
  case 'mssql':
47
55
  if (!env['DB_TYPE'] || env['DB_TYPE'] === 'default') {
48
56
  requiredEnvVars.push('DB_HOST', 'DB_PORT', 'DB_DATABASE', 'DB_USER', 'DB_PASSWORD');
@@ -1,28 +1,57 @@
1
+ import { createInspector } from '@directus/schema';
2
+ import logger from '../../logger.js';
3
+ import { getHelpers } from '../helpers/index.js';
1
4
  export async function up(knex) {
5
+ const helper = getHelpers(knex).schema;
6
+ const isMysql = helper.isOneOfClients(['mysql']);
7
+ if (isMysql) {
8
+ await dropConstraint(knex);
9
+ }
2
10
  await knex.schema.alterTable('directus_shares', (table) => {
3
- if (knex.client.constructor.name === 'Client_MySQL') {
4
- // Temporary drop foreign key constraint, see https://github.com/directus/directus/issues/19399
5
- table.dropForeign('collection', 'directus_shares_collection_foreign');
6
- }
7
11
  table.dropNullable('collection');
8
- if (knex.client.constructor.name === 'Client_MySQL') {
9
- // Recreate foreign key constraint, from 20211211A-add-shares.ts
10
- table.foreign('collection').references('directus_collections.collection').onDelete('CASCADE');
11
- }
12
12
  table.dropNullable('item');
13
13
  });
14
+ if (isMysql) {
15
+ await recreateConstraint(knex);
16
+ }
14
17
  }
15
18
  export async function down(knex) {
19
+ const helper = getHelpers(knex).schema;
20
+ const isMysql = helper.isOneOfClients(['mysql']);
21
+ if (isMysql) {
22
+ await dropConstraint(knex);
23
+ }
16
24
  await knex.schema.alterTable('directus_shares', (table) => {
17
- if (knex.client.constructor.name === 'Client_MySQL') {
18
- // Temporary drop foreign key constraint, see https://github.com/directus/directus/issues/19399
19
- table.dropForeign('collection', 'directus_shares_collection_foreign');
20
- }
21
25
  table.setNullable('collection');
22
- if (knex.client.constructor.name === 'Client_MySQL') {
23
- // Recreate foreign key constraint, from 20211211A-add-shares.ts
24
- table.foreign('collection').references('directus_collections.collection').onDelete('CASCADE');
25
- }
26
26
  table.setNullable('item');
27
27
  });
28
+ if (isMysql) {
29
+ await recreateConstraint(knex);
30
+ }
31
+ }
32
+ /**
33
+ * Temporarily drop foreign key constraint for MySQL instances, see https://github.com/directus/directus/issues/19399
34
+ */
35
+ async function dropConstraint(knex) {
36
+ const inspector = createInspector(knex);
37
+ const foreignKeys = await inspector.foreignKeys('directus_shares');
38
+ const collectionForeignKeys = foreignKeys.filter((fk) => fk.column === 'collection');
39
+ const constraintName = collectionForeignKeys[0]?.constraint_name;
40
+ if (constraintName && collectionForeignKeys.length === 1) {
41
+ await knex.schema.alterTable('directus_shares', (table) => {
42
+ table.dropForeign('collection', constraintName);
43
+ });
44
+ }
45
+ else {
46
+ logger.warn(`Unexpected number of foreign key constraints on 'directus_shares.collection':`);
47
+ logger.warn(JSON.stringify(collectionForeignKeys, null, 4));
48
+ }
49
+ }
50
+ /**
51
+ * Recreate foreign key constraint for MySQL instances, from 20211211A-add-shares.ts
52
+ */
53
+ async function recreateConstraint(knex) {
54
+ return knex.schema.alterTable('directus_shares', async (table) => {
55
+ table.foreign('collection').references('directus_collections.collection').onDelete('CASCADE');
56
+ });
28
57
  }
@@ -114,27 +114,8 @@ fields:
114
114
  interface: system-display-template
115
115
  options:
116
116
  collectionField: collection
117
- injectBranchField: true
118
117
  width: full
119
118
 
120
- - field: content_versioning_divider
121
- special:
122
- - alias
123
- - no-data
124
- interface: presentation-divider
125
- options:
126
- icon: update
127
- title: $t:field_options.directus_collections.content_versioning_divider
128
- width: full
129
-
130
- - field: branches_enabled
131
- interface: boolean
132
- special:
133
- - cast-boolean
134
- options:
135
- label: $t:field_options.directus_collections.enable_branches
136
- width: half
137
-
138
119
  - field: archive_divider
139
120
  special:
140
121
  - alias
package/dist/env.d.ts CHANGED
@@ -12,4 +12,4 @@ export declare const getEnv: () => Record<string, any>;
12
12
  * When changes have been made during runtime, like in the CLI, we can refresh the env object with
13
13
  * the newly created variables
14
14
  */
15
- export declare function refreshEnv(): void;
15
+ export declare function refreshEnv(): Promise<void>;
package/dist/env.js CHANGED
@@ -3,11 +3,14 @@
3
3
  * For all possible keys, see: https://docs.directus.io/self-hosted/config-options/
4
4
  */
5
5
  import { parseJSON, toArray } from '@directus/utils';
6
+ import { JAVASCRIPT_FILE_EXTS } from '@directus/constants';
6
7
  import dotenv from 'dotenv';
7
8
  import fs from 'fs';
8
9
  import { clone, toNumber, toString } from 'lodash-es';
9
10
  import { createRequire } from 'node:module';
11
+ import { pathToFileURL } from 'node:url';
10
12
  import path from 'path';
13
+ import getModuleDefault from './utils/get-module-default.js';
11
14
  import { requireYAML } from './utils/require-yaml.js';
12
15
  import { toBoolean } from './utils/to-boolean.js';
13
16
  const require = createRequire(import.meta.url);
@@ -317,7 +320,7 @@ const typeMap = {
317
320
  let env = {
318
321
  ...defaults,
319
322
  ...process.env,
320
- ...processConfiguration(),
323
+ ...(await processConfiguration()),
321
324
  };
322
325
  process.env = env;
323
326
  env = processValues(env);
@@ -330,35 +333,35 @@ export const getEnv = () => env;
330
333
  * When changes have been made during runtime, like in the CLI, we can refresh the env object with
331
334
  * the newly created variables
332
335
  */
333
- export function refreshEnv() {
336
+ export async function refreshEnv() {
334
337
  env = {
335
338
  ...defaults,
336
339
  ...process.env,
337
- ...processConfiguration(),
340
+ ...(await processConfiguration()),
338
341
  };
339
342
  process.env = env;
340
343
  env = processValues(env);
341
344
  }
342
- function processConfiguration() {
345
+ async function processConfiguration() {
343
346
  const configPath = path.resolve(process.env['CONFIG_PATH'] || defaults['CONFIG_PATH']);
344
347
  if (fs.existsSync(configPath) === false)
345
348
  return {};
346
- const fileExt = path.extname(configPath).toLowerCase();
347
- if (fileExt === '.js') {
348
- const module = require(configPath);
349
- const exported = module.default || module;
350
- if (typeof exported === 'function') {
351
- return exported(process.env);
349
+ const fileExt = path.extname(configPath).toLowerCase().substring(1);
350
+ if (JAVASCRIPT_FILE_EXTS.includes(fileExt)) {
351
+ const data = await import(pathToFileURL(configPath).toString());
352
+ const config = getModuleDefault(data);
353
+ if (typeof config === 'function') {
354
+ return config(process.env);
352
355
  }
353
- else if (typeof exported === 'object') {
354
- return exported;
356
+ else if (typeof config === 'object') {
357
+ return config;
355
358
  }
356
- throw new Error(`Invalid JS configuration file export type. Requires one of "function", "object", received: "${typeof exported}"`);
359
+ throw new Error(`Invalid JS configuration file export type. Requires one of "function", "object", received: "${typeof config}"`);
357
360
  }
358
- if (fileExt === '.json') {
361
+ if (fileExt === 'json') {
359
362
  return require(configPath);
360
363
  }
361
- if (fileExt === '.yaml' || fileExt === '.yml') {
364
+ if (fileExt === 'yaml' || fileExt === 'yml') {
362
365
  const data = requireYAML(configPath);
363
366
  if (typeof data === 'object') {
364
367
  return data;
@@ -1,9 +1,7 @@
1
1
  import { parse as parseBytesConfiguration } from 'bytes';
2
- import { assign } from 'lodash-es';
3
2
  import { getCache, setCacheValue } from '../cache.js';
4
3
  import env from '../env.js';
5
4
  import logger from '../logger.js';
6
- import { BranchesService } from '../services/branches.js';
7
5
  import { ExportService } from '../services/import-export.js';
8
6
  import asyncHandler from '../utils/async-handler.js';
9
7
  import { getCacheControlHeader } from '../utils/get-cache-headers.js';
@@ -41,24 +39,6 @@ export const respond = asyncHandler(async (req, res) => {
41
39
  res.setHeader('Cache-Control', 'no-cache');
42
40
  res.setHeader('Vary', 'Origin, Cache-Control');
43
41
  }
44
- if (req.sanitizedQuery.branch &&
45
- req.collection &&
46
- (req.singleton || req.params['pk']) &&
47
- 'data' in res.locals['payload']) {
48
- const branchesService = new BranchesService({ accountability: req.accountability ?? null, schema: req.schema });
49
- const filter = {
50
- name: { _eq: req.sanitizedQuery.branch },
51
- collection: { _eq: req.collection },
52
- };
53
- if (req.params['pk']) {
54
- filter['item'] = { _eq: req.params['pk'] };
55
- }
56
- const branch = await branchesService.readByQuery({ filter });
57
- if (branch[0]) {
58
- const commits = await branchesService.getBranchCommits(branch[0]['id']);
59
- assign(res.locals['payload'].data, ...commits);
60
- }
61
- }
62
42
  if (req.sanitizedQuery.export) {
63
43
  const exportService = new ExportService({ accountability: req.accountability ?? null, schema: req.schema });
64
44
  let filename = '';
@@ -60,6 +60,9 @@ export class ActivityService extends ItemsService {
60
60
  comment = comment.replace(new RegExp(mention, 'gm'), userPreviews[uuid] ?? '@Unknown User');
61
61
  }
62
62
  comment = `> ${comment.replace(/\n+/gm, '\n> ')}`;
63
+ const href = new Url(env['PUBLIC_URL'])
64
+ .addPath('admin', 'content', data['collection'], data['item'])
65
+ .toString();
63
66
  const message = `
64
67
  Hello ${userName(user)},
65
68
 
@@ -67,9 +70,7 @@ ${userName(sender)} has mentioned you in a comment:
67
70
 
68
71
  ${comment}
69
72
 
70
- <a href="${new Url(env['PUBLIC_URL'])
71
- .addPath('admin', 'content', data['collection'], data['item'])
72
- .toString()}">Click here to view.</a>
73
+ <a href="${href}">Click here to view.</a>
73
74
  `;
74
75
  await this.notificationsService.createOne({
75
76
  recipient: userID,
@@ -129,7 +129,23 @@ export class AssetsService {
129
129
  logger.error(e, `Couldn't transform file ${file.id}`);
130
130
  readStream.unpipe(transformer);
131
131
  });
132
- await storage.location(file.storage).write(assetFilename, readStream.pipe(transformer), type);
132
+ try {
133
+ await storage.location(file.storage).write(assetFilename, readStream.pipe(transformer), type);
134
+ }
135
+ catch (error) {
136
+ try {
137
+ await storage.location(file.storage).delete(assetFilename);
138
+ }
139
+ catch {
140
+ // Ignored to prevent original error from being overwritten
141
+ }
142
+ if (error?.message?.includes('timeout')) {
143
+ throw new ServiceUnavailableError({ service: 'assets', reason: `Transformation timed out` });
144
+ }
145
+ else {
146
+ throw error;
147
+ }
148
+ }
133
149
  return {
134
150
  stream: await storage.location(file.storage).read(assetFilename, range),
135
151
  stat: await storage.location(file.storage).stat(assetFilename),
@@ -5,7 +5,9 @@ import exif from 'exif-reader';
5
5
  import { parse as parseIcc } from 'icc';
6
6
  import { clone, pick } from 'lodash-es';
7
7
  import { extension } from 'mime-types';
8
+ import { PassThrough as PassThroughStream, Transform as TransformStream } from 'node:stream';
8
9
  import { pipeline } from 'node:stream/promises';
10
+ import zlib from 'node:zlib';
9
11
  import path from 'path';
10
12
  import sharp from 'sharp';
11
13
  import url from 'url';
@@ -209,6 +211,7 @@ export class FilesService extends ItemsService {
209
211
  const axios = await getAxios();
210
212
  fileResponse = await axios.get(encodeURL(importURL), {
211
213
  responseType: 'stream',
214
+ decompress: false,
212
215
  });
213
216
  }
214
217
  catch (err) {
@@ -227,7 +230,7 @@ export class FilesService extends ItemsService {
227
230
  title: formatTitle(filename),
228
231
  ...(body || {}),
229
232
  };
230
- return await this.uploadOne(fileResponse.data, payload);
233
+ return await this.uploadOne(decompressResponse(fileResponse.data, fileResponse.headers), payload, payload.id);
231
234
  }
232
235
  /**
233
236
  * Create a file (only applicable when it is not a multipart/data POST request)
@@ -267,3 +270,57 @@ export class FilesService extends ItemsService {
267
270
  return keys;
268
271
  }
269
272
  }
273
+ function decompressResponse(stream, headers) {
274
+ const contentEncoding = (headers['content-encoding'] || '').toLowerCase();
275
+ if (!['gzip', 'deflate', 'br'].includes(contentEncoding)) {
276
+ return stream;
277
+ }
278
+ let isEmpty = true;
279
+ const checker = new TransformStream({
280
+ transform(data, _encoding, callback) {
281
+ if (isEmpty === false) {
282
+ callback(null, data);
283
+ return;
284
+ }
285
+ isEmpty = false;
286
+ handleContentEncoding(data);
287
+ callback(null, data);
288
+ },
289
+ flush(callback) {
290
+ callback();
291
+ },
292
+ });
293
+ const finalStream = new PassThroughStream({
294
+ autoDestroy: false,
295
+ destroy(error, callback) {
296
+ stream.destroy();
297
+ callback(error);
298
+ },
299
+ });
300
+ stream.pipe(checker);
301
+ return finalStream;
302
+ function handleContentEncoding(data) {
303
+ let decompressStream;
304
+ if (contentEncoding === 'br') {
305
+ decompressStream = zlib.createBrotliDecompress();
306
+ }
307
+ else if (contentEncoding === 'deflate' && isDeflateAlgorithm(data)) {
308
+ decompressStream = zlib.createInflateRaw();
309
+ }
310
+ else {
311
+ decompressStream = zlib.createUnzip();
312
+ }
313
+ decompressStream.once('error', (error) => {
314
+ if (isEmpty && !stream.readable) {
315
+ finalStream.end();
316
+ return;
317
+ }
318
+ finalStream.destroy(error);
319
+ });
320
+ checker.pipe(decompressStream).pipe(finalStream);
321
+ }
322
+ function isDeflateAlgorithm(data) {
323
+ const DEFLATE_ALGORITHM_HEADER = 0x08;
324
+ return data.length > 0 && (data[0] & DEFLATE_ALGORITHM_HEADER) === 0;
325
+ }
326
+ }
@@ -17,9 +17,12 @@ import env from '../env.js';
17
17
  import { ForbiddenError, InvalidPayloadError, ServiceUnavailableError, UnsupportedMediaTypeError, } from '../errors/index.js';
18
18
  import logger from '../logger.js';
19
19
  import { getDateFormatted } from '../utils/get-date-formatted.js';
20
+ import { userName } from '../utils/user-name.js';
20
21
  import { FilesService } from './files.js';
21
22
  import { ItemsService } from './items.js';
22
23
  import { NotificationsService } from './notifications.js';
24
+ import { UsersService } from './users.js';
25
+ import { Url } from '../utils/url.js';
23
26
  export class ImportService {
24
27
  knex;
25
28
  accountability;
@@ -226,10 +229,23 @@ export class ExportService {
226
229
  accountability: this.accountability,
227
230
  schema: this.schema,
228
231
  });
232
+ const usersService = new UsersService({
233
+ schema: this.schema,
234
+ });
235
+ const user = await usersService.readOne(this.accountability.user, {
236
+ fields: ['first_name', 'last_name', 'email'],
237
+ });
238
+ const href = new Url(env['PUBLIC_URL']).addPath('admin', 'files', savedFile).toString();
239
+ const message = `
240
+ Hello ${userName(user)},
241
+
242
+ Your export of ${collection} is ready. <a href="${href}">Click here to view.</a>
243
+ `;
229
244
  await notificationsService.createOne({
230
245
  recipient: this.accountability.user,
231
246
  sender: this.accountability.user,
232
247
  subject: `Your export of ${collection} is ready`,
248
+ message,
233
249
  collection: `directus_files`,
234
250
  item: savedFile,
235
251
  });