@directus/api 21.0.0-rc.0 → 22.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/dist/app.js +1 -1
  2. package/dist/cache.d.ts +0 -1
  3. package/dist/cache.js +7 -22
  4. package/dist/controllers/tus.js +7 -5
  5. package/dist/database/get-ast-from-query/lib/parse-fields.d.ts +1 -1
  6. package/dist/database/get-ast-from-query/lib/parse-fields.js +10 -0
  7. package/dist/database/helpers/schema/dialects/cockroachdb.d.ts +2 -1
  8. package/dist/database/helpers/schema/dialects/cockroachdb.js +4 -0
  9. package/dist/database/helpers/schema/dialects/mssql.d.ts +2 -1
  10. package/dist/database/helpers/schema/dialects/mssql.js +4 -0
  11. package/dist/database/helpers/schema/dialects/oracle.d.ts +2 -1
  12. package/dist/database/helpers/schema/dialects/oracle.js +4 -0
  13. package/dist/database/helpers/schema/dialects/postgres.d.ts +2 -1
  14. package/dist/database/helpers/schema/dialects/postgres.js +4 -0
  15. package/dist/database/helpers/schema/types.d.ts +5 -0
  16. package/dist/database/helpers/schema/types.js +3 -0
  17. package/dist/database/helpers/schema/utils/preprocess-bindings.d.ts +8 -0
  18. package/dist/database/helpers/schema/utils/preprocess-bindings.js +30 -0
  19. package/dist/database/index.js +14 -6
  20. package/dist/database/migrations/20240305A-change-useragent-type.js +1 -1
  21. package/dist/database/migrations/20240716A-update-files-date-fields.js +33 -0
  22. package/dist/database/migrations/20240806A-permissions-policies.d.ts +6 -0
  23. package/dist/database/migrations/20240806A-permissions-policies.js +338 -0
  24. package/dist/database/run-ast/lib/get-db-query.js +12 -2
  25. package/dist/database/run-ast/utils/apply-case-when.js +5 -4
  26. package/dist/database/run-ast/utils/with-preprocess-bindings.d.ts +2 -0
  27. package/dist/database/run-ast/utils/with-preprocess-bindings.js +14 -0
  28. package/dist/logger/index.js +1 -1
  29. package/dist/middleware/error-handler.d.ts +2 -2
  30. package/dist/middleware/error-handler.js +54 -51
  31. package/dist/permissions/lib/fetch-permissions.d.ts +1 -0
  32. package/dist/permissions/lib/fetch-permissions.js +3 -2
  33. package/dist/permissions/lib/fetch-policies.d.ts +7 -0
  34. package/dist/permissions/lib/fetch-policies.js +16 -1
  35. package/dist/permissions/modules/process-ast/lib/inject-cases.js +6 -6
  36. package/dist/permissions/modules/process-ast/types.d.ts +0 -6
  37. package/dist/permissions/modules/process-ast/utils/extract-paths-from-query.js +11 -1
  38. package/dist/permissions/utils/filter-policies-by-ip.d.ts +1 -1
  39. package/dist/services/assets.js +2 -5
  40. package/dist/services/fields.d.ts +3 -0
  41. package/dist/services/fields.js +29 -5
  42. package/dist/services/files/lib/get-sharp-instance.d.ts +2 -0
  43. package/dist/services/files/lib/get-sharp-instance.js +10 -0
  44. package/dist/services/files/utils/get-metadata.js +7 -6
  45. package/dist/services/files.js +5 -0
  46. package/dist/services/import-export.d.ts +3 -1
  47. package/dist/services/import-export.js +49 -5
  48. package/dist/services/mail/index.d.ts +1 -1
  49. package/dist/services/mail/index.js +9 -1
  50. package/dist/services/relations.d.ts +3 -1
  51. package/dist/services/relations.js +27 -5
  52. package/dist/services/tus/data-store.js +4 -5
  53. package/dist/services/tus/server.d.ts +1 -1
  54. package/dist/services/tus/server.js +9 -2
  55. package/dist/utils/apply-query.d.ts +8 -5
  56. package/dist/utils/apply-query.js +40 -5
  57. package/dist/utils/fetch-user-count/fetch-access-lookup.d.ts +2 -0
  58. package/dist/utils/fetch-user-count/fetch-access-lookup.js +3 -2
  59. package/dist/utils/fetch-user-count/fetch-user-count.js +10 -3
  60. package/dist/utils/fetch-user-count/get-user-count-query.js +1 -1
  61. package/dist/utils/get-schema.js +3 -3
  62. package/dist/utils/sanitize-schema.d.ts +1 -1
  63. package/package.json +38 -38
  64. package/dist/database/migrations/20240710A-permissions-policies.js +0 -169
  65. /package/dist/database/migrations/{20240710A-permissions-policies.d.ts → 20240716A-update-files-date-fields.d.ts} +0 -0
@@ -20,11 +20,7 @@ function processChildren(collection, children, permissions) {
20
20
  // TODO this can be optimized if all cases are the same for all requested keys, as those should be
21
21
  //
22
22
  for (const child of children) {
23
- // If there's one or more permissions that allow full access to this field, we can safe some
24
- // query perf overhead by ignoring the whole case/where system
25
23
  const fieldKey = getUnaliasedFieldKey(child);
26
- if (allowedFields.has('*') || allowedFields.has(fieldKey))
27
- continue;
28
24
  const globalWhenCase = caseMap['*'];
29
25
  const fieldWhenCase = caseMap[fieldKey];
30
26
  // Validation should catch any fields that are attempted to be read that don't have any access control configured.
@@ -33,8 +29,12 @@ function processChildren(collection, children, permissions) {
33
29
  if (!globalWhenCase && !fieldWhenCase) {
34
30
  throw new Error(`Cannot extract access permissions for field "${fieldKey}" in collection "${collection}"`);
35
31
  }
36
- // Global and field can't both be undefined as per the error check prior
37
- child.whenCase = [...(globalWhenCase ?? []), ...(fieldWhenCase ?? [])];
32
+ // The case/when system only needs to take place if no full access is given on this field,
33
+ // otherwise we can skip and thus safe some query perf overhead
34
+ if (!allowedFields.has('*') && !allowedFields.has(fieldKey)) {
35
+ // Global and field can't both be undefined as per the error check prior
36
+ child.whenCase = [...(globalWhenCase ?? []), ...(fieldWhenCase ?? [])];
37
+ }
38
38
  if (child.type === 'm2o') {
39
39
  child.cases = processChildren(child.relation.related_collection, child.children, permissions);
40
40
  }
@@ -16,9 +16,3 @@ export type FieldMap = {
16
16
  read: FieldMapEntries;
17
17
  other: FieldMapEntries;
18
18
  };
19
- export interface AccessRow {
20
- policy: {
21
- id: string;
22
- ip_access: string[] | null;
23
- };
24
- }
@@ -24,12 +24,22 @@ export function extractPathsFromQuery(query) {
24
24
  for (const field of query.sort) {
25
25
  // Sort can have dot notation fields for sorting on m2o values Sort fields can start with
26
26
  // `-` to indicate descending order, which should be dropped for permissions checks
27
- readOnlyPaths.push(field.split('.').map((field) => (field.startsWith('-') ? field.substring(1) : field)));
27
+ const parts = field.split('.').map((field) => (field.startsWith('-') ? field.substring(1) : field));
28
+ if (query.aggregate && parts.length > 0 && parts[0] in query.aggregate) {
29
+ // If query is an aggregate query and the first part is a requested aggregate operation, ignore the whole field.
30
+ // The correct field is extracted into the field map when processing the `query.aggregate` fields.
31
+ continue;
32
+ }
33
+ readOnlyPaths.push(parts);
28
34
  }
29
35
  }
30
36
  if (query.aggregate) {
31
37
  for (const fields of Object.values(query.aggregate)) {
32
38
  for (const field of fields) {
39
+ if (field === '*') {
40
+ // Don't add wildcard field to the paths
41
+ continue;
42
+ }
33
43
  // Aggregate doesn't currently support aggregating on nested fields, but it doesn't hurt
34
44
  // to standardize it in the validation layer
35
45
  paths.push(field.split('.'));
@@ -1,2 +1,2 @@
1
- import type { AccessRow } from '../modules/process-ast/types.js';
1
+ import type { AccessRow } from '../lib/fetch-policies.js';
2
2
  export declare function filterPoliciesByIp(policies: AccessRow[], ip: string | null | undefined): AccessRow[];
@@ -14,6 +14,7 @@ import { getMilliseconds } from '../utils/get-milliseconds.js';
14
14
  import { isValidUuid } from '../utils/is-valid-uuid.js';
15
15
  import * as TransformationUtils from '../utils/transformations.js';
16
16
  import { FilesService } from './files.js';
17
+ import { getSharpInstance } from './files/lib/get-sharp-instance.js';
17
18
  const env = useEnv();
18
19
  const logger = useLogger();
19
20
  export class AssetsService {
@@ -121,11 +122,7 @@ export class AssetsService {
121
122
  });
122
123
  }
123
124
  const readStream = await storage.location(file.storage).read(file.filename_disk, range);
124
- const transformer = sharp({
125
- limitInputPixels: Math.pow(env['ASSETS_TRANSFORM_IMAGE_MAX_DIMENSION'], 2),
126
- sequentialRead: true,
127
- failOn: env['ASSETS_INVALID_IMAGE_SENSITIVITY_LEVEL'],
128
- });
125
+ const transformer = getSharpInstance();
129
126
  transformer.timeout({
130
127
  seconds: clamp(Math.round(getMilliseconds(env['ASSETS_TRANSFORM_TIMEOUT'], 0) / 1000), 1, 3600),
131
128
  });
@@ -16,7 +16,10 @@ export declare class FieldsService {
16
16
  schema: SchemaOverview;
17
17
  cache: Keyv<any> | null;
18
18
  systemCache: Keyv<any>;
19
+ schemaCache: Keyv<any>;
19
20
  constructor(options: AbstractServiceOptions);
21
+ columnInfo(collection?: string): Promise<Column[]>;
22
+ columnInfo(collection: string, field: string): Promise<Column>;
20
23
  readAll(collection?: string): Promise<Field[]>;
21
24
  readOne(collection: string, field: string): Promise<Record<string, any>>;
22
25
  createField(collection: string, field: Partial<Field> & {
@@ -1,9 +1,10 @@
1
1
  import { DEFAULT_NUMERIC_PRECISION, DEFAULT_NUMERIC_SCALE, KNEX_TYPES, REGEX_BETWEEN_PARENS, } from '@directus/constants';
2
+ import { useEnv } from '@directus/env';
2
3
  import { ForbiddenError, InvalidPayloadError } from '@directus/errors';
3
4
  import { createInspector } from '@directus/schema';
4
5
  import { addFieldFlag, toArray } from '@directus/utils';
5
6
  import { isEqual, isNil, merge } from 'lodash-es';
6
- import { clearSystemCache, getCache } from '../cache.js';
7
+ import { clearSystemCache, getCache, getCacheValue, setCacheValue } from '../cache.js';
7
8
  import { ALIAS_TYPES } from '../constants.js';
8
9
  import { translateDatabaseError } from '../database/errors/translate.js';
9
10
  import { getHelpers } from '../database/helpers/index.js';
@@ -23,6 +24,7 @@ import { ItemsService } from './items.js';
23
24
  import { PayloadService } from './payload.js';
24
25
  import { RelationsService } from './relations.js';
25
26
  const systemFieldRows = getSystemFieldRowsWithAuthProviders();
27
+ const env = useEnv();
26
28
  export class FieldsService {
27
29
  knex;
28
30
  helpers;
@@ -33,6 +35,7 @@ export class FieldsService {
33
35
  schema;
34
36
  cache;
35
37
  systemCache;
38
+ schemaCache;
36
39
  constructor(options) {
37
40
  this.knex = options.knex || getDatabase();
38
41
  this.helpers = getHelpers(this.knex);
@@ -41,9 +44,30 @@ export class FieldsService {
41
44
  this.itemsService = new ItemsService('directus_fields', options);
42
45
  this.payloadService = new PayloadService('directus_fields', options);
43
46
  this.schema = options.schema;
44
- const { cache, systemCache } = getCache();
47
+ const { cache, systemCache, localSchemaCache } = getCache();
45
48
  this.cache = cache;
46
49
  this.systemCache = systemCache;
50
+ this.schemaCache = localSchemaCache;
51
+ }
52
+ async columnInfo(collection, field) {
53
+ const schemaCacheIsEnabled = Boolean(env['CACHE_SCHEMA']);
54
+ let columnInfo = null;
55
+ if (schemaCacheIsEnabled) {
56
+ columnInfo = await getCacheValue(this.schemaCache, 'columnInfo');
57
+ }
58
+ if (!columnInfo) {
59
+ columnInfo = await this.schemaInspector.columnInfo();
60
+ if (schemaCacheIsEnabled) {
61
+ setCacheValue(this.schemaCache, 'columnInfo', columnInfo);
62
+ }
63
+ }
64
+ if (collection) {
65
+ columnInfo = columnInfo.filter((column) => column.table === collection);
66
+ }
67
+ if (field) {
68
+ return columnInfo.find((column) => column.name === field);
69
+ }
70
+ return columnInfo;
47
71
  }
48
72
  async readAll(collection) {
49
73
  let fields;
@@ -72,7 +96,7 @@ export class FieldsService {
72
96
  fields = (await nonAuthorizedItemsService.readByQuery({ limit: -1 }));
73
97
  fields.push(...systemFieldRows);
74
98
  }
75
- const columns = (await this.schemaInspector.columnInfo(collection)).map((column) => ({
99
+ const columns = (await this.columnInfo(collection)).map((column) => ({
76
100
  ...column,
77
101
  default_value: getDefaultValue(column, fields.find((field) => field.collection === column.table && field.field === column.name)),
78
102
  }));
@@ -204,7 +228,7 @@ export class FieldsService {
204
228
  fieldInfo ||
205
229
  systemFieldRows.find((fieldMeta) => fieldMeta.collection === collection && fieldMeta.field === field);
206
230
  try {
207
- column = await this.schemaInspector.columnInfo(collection, field);
231
+ column = await this.columnInfo(collection, field);
208
232
  }
209
233
  catch {
210
234
  // Do nothing
@@ -359,7 +383,7 @@ export class FieldsService {
359
383
  throw new InvalidPayloadError({ reason: 'Alias type cannot be changed' });
360
384
  }
361
385
  if (hookAdjustedField.schema) {
362
- const existingColumn = await this.schemaInspector.columnInfo(collection, hookAdjustedField.field);
386
+ const existingColumn = await this.columnInfo(collection, hookAdjustedField.field);
363
387
  if (hookAdjustedField.schema?.is_nullable === true && existingColumn.is_primary_key) {
364
388
  throw new InvalidPayloadError({ reason: 'Primary key cannot be null' });
365
389
  }
@@ -0,0 +1,2 @@
1
+ import { type Sharp } from 'sharp';
2
+ export declare function getSharpInstance(): Sharp;
@@ -0,0 +1,10 @@
1
+ import { useEnv } from '@directus/env';
2
+ import sharp, {} from 'sharp';
3
+ export function getSharpInstance() {
4
+ const env = useEnv();
5
+ return sharp({
6
+ limitInputPixels: Math.trunc(Math.pow(env['ASSETS_TRANSFORM_IMAGE_MAX_DIMENSION'], 2)),
7
+ sequentialRead: true,
8
+ failOn: env['ASSETS_INVALID_IMAGE_SENSITIVITY_LEVEL'],
9
+ });
10
+ }
@@ -1,19 +1,20 @@
1
+ import { useEnv } from '@directus/env';
1
2
  import exif, {} from 'exif-reader';
2
3
  import { parse as parseIcc } from 'icc';
3
4
  import { pick } from 'lodash-es';
4
5
  import { pipeline } from 'node:stream/promises';
5
- import sharp from 'sharp';
6
- import { useEnv } from '@directus/env';
7
6
  import { useLogger } from '../../../logger/index.js';
7
+ import { getSharpInstance } from '../lib/get-sharp-instance.js';
8
8
  import { parseIptc, parseXmp } from './parse-image-metadata.js';
9
9
  const env = useEnv();
10
10
  const logger = useLogger();
11
11
  export async function getMetadata(stream, allowList = env['FILE_METADATA_ALLOW_LIST']) {
12
- return new Promise((resolve, reject) => {
13
- pipeline(stream, sharp().metadata(async (err, sharpMetadata) => {
12
+ const transformer = getSharpInstance();
13
+ return new Promise((resolve) => {
14
+ pipeline(stream, transformer.metadata(async (err, sharpMetadata) => {
14
15
  if (err) {
15
- reject(err);
16
- return;
16
+ logger.error(err);
17
+ return resolve({});
17
18
  }
18
19
  const metadata = {};
19
20
  if (sharpMetadata.orientation && sharpMetadata.orientation >= 5) {
@@ -58,6 +58,10 @@ export class FilesService extends ItemsService {
58
58
  const fileExtension = path.extname(payload.filename_download) || (payload.type && '.' + extension(payload.type)) || '';
59
59
  // The filename_disk is the FINAL filename on disk
60
60
  payload.filename_disk ||= primaryKey + (fileExtension || '');
61
+ // If the filename_disk extension doesn't match the new mimetype, update it
62
+ if (isReplacement === true && path.extname(payload.filename_disk) !== fileExtension) {
63
+ payload.filename_disk = primaryKey + (fileExtension || '');
64
+ }
61
65
  // Temp filename is used for replacements
62
66
  const tempFilenameDisk = 'temp_' + payload.filename_disk;
63
67
  if (!payload.type) {
@@ -126,6 +130,7 @@ export class FilesService extends ItemsService {
126
130
  const { size } = await storage.location(data.storage).stat(payload.filename_disk);
127
131
  payload.filesize = size;
128
132
  const metadata = await extractMetadata(data.storage, payload);
133
+ payload.uploaded_on = new Date().toISOString();
129
134
  // We do this in a service without accountability. Even if you don't have update permissions to the file,
130
135
  // we still want to be able to set the extracted values from the file on create
131
136
  const sudoService = new ItemsService('directus_files', {
@@ -2,7 +2,7 @@
2
2
  import type { Accountability, File, Query, SchemaOverview } from '@directus/types';
3
3
  import type { Knex } from 'knex';
4
4
  import type { Readable } from 'node:stream';
5
- import type { AbstractServiceOptions } from '../types/index.js';
5
+ import type { AbstractServiceOptions, FunctionFieldNode, FieldNode, NestedCollectionNode } from '../types/index.js';
6
6
  type ExportFormat = 'csv' | 'json' | 'xml' | 'yaml';
7
7
  export declare class ImportService {
8
8
  knex: Knex;
@@ -32,6 +32,8 @@ export declare class ExportService {
32
32
  transform(input: Record<string, any>[], format: ExportFormat, options?: {
33
33
  includeHeader?: boolean;
34
34
  includeFooter?: boolean;
35
+ fields?: string[] | null;
35
36
  }): string;
36
37
  }
38
+ export declare function getHeadingsForCsvExport(nodes: (NestedCollectionNode | FieldNode | FunctionFieldNode)[] | undefined, prefix?: string): string[];
37
39
  export {};
@@ -24,6 +24,7 @@ import { userName } from '../utils/user-name.js';
24
24
  import { FilesService } from './files.js';
25
25
  import { NotificationsService } from './notifications.js';
26
26
  import { UsersService } from './users.js';
27
+ import { parseFields } from '../database/get-ast-from-query/lib/parse-fields.js';
27
28
  const env = useEnv();
28
29
  const logger = useLogger();
29
30
  export class ImportService {
@@ -262,9 +263,26 @@ export class ExportService {
262
263
  });
263
264
  readCount += result.length;
264
265
  if (result.length) {
266
+ let csvHeadings = null;
267
+ if (format === 'csv') {
268
+ if (!query.fields)
269
+ query.fields = ['*'];
270
+ // to ensure the all headings are included in the CSV file, all possible fields need to be determined.
271
+ const parsedFields = await parseFields({
272
+ parentCollection: collection,
273
+ fields: query.fields,
274
+ query: query,
275
+ accountability: this.accountability,
276
+ }, {
277
+ schema: this.schema,
278
+ knex: database,
279
+ });
280
+ csvHeadings = getHeadingsForCsvExport(parsedFields);
281
+ }
265
282
  await appendFile(tmpFile.path, this.transform(result, format, {
266
283
  includeHeader: batch === 0,
267
284
  includeFooter: batch + 1 === batchesRequired,
285
+ fields: csvHeadings,
268
286
  }));
269
287
  }
270
288
  }
@@ -359,11 +377,12 @@ Your export of ${collection} is ready. <a href="${href}">Click here to view.</a>
359
377
  if (format === 'csv') {
360
378
  if (input.length === 0)
361
379
  return '';
362
- const parser = new CSVParser({
363
- transforms: [CSVTransforms.flatten({ separator: '.' })],
364
- header: options?.includeHeader !== false,
365
- });
366
- let string = parser.parse(input);
380
+ const transforms = [CSVTransforms.flatten({ separator: '.' })];
381
+ const header = options?.includeHeader !== false;
382
+ const transformOptions = options?.fields
383
+ ? { transforms, header, fields: options?.fields }
384
+ : { transforms, header };
385
+ let string = new CSVParser(transformOptions).parse(input);
367
386
  if (options?.includeHeader === false) {
368
387
  string = '\n' + string;
369
388
  }
@@ -375,3 +394,28 @@ Your export of ${collection} is ready. <a href="${href}">Click here to view.</a>
375
394
  throw new ServiceUnavailableError({ service: 'export', reason: `Illegal export type used: "${format}"` });
376
395
  }
377
396
  }
397
+ /*
398
+ * Recursive function to traverse the field nodes, to determine the headings for the CSV export file.
399
+ *
400
+ * Relational nodes which target a single item get expanded, which means that their nested fields get their own column in the csv file.
401
+ * For relational nodes which target a multiple items, the nested field names are not going to be expanded.
402
+ * Instead they will be stored as a single value/cell of the CSV file.
403
+ */
404
+ export function getHeadingsForCsvExport(nodes, prefix = '') {
405
+ let fieldNames = [];
406
+ if (!nodes)
407
+ return fieldNames;
408
+ nodes.forEach((node) => {
409
+ switch (node.type) {
410
+ case 'field':
411
+ case 'functionField':
412
+ case 'o2m':
413
+ case 'a2o':
414
+ fieldNames.push(prefix ? `${prefix}.${node.fieldKey}` : node.fieldKey);
415
+ break;
416
+ case 'm2o':
417
+ fieldNames = fieldNames.concat(getHeadingsForCsvExport(node.children, prefix ? `${prefix}.${node.fieldKey}` : node.fieldKey));
418
+ }
419
+ });
420
+ return fieldNames;
421
+ }
@@ -14,7 +14,7 @@ export declare class MailService {
14
14
  knex: Knex;
15
15
  mailer: Transporter;
16
16
  constructor(opts: AbstractServiceOptions);
17
- send<T>(options: EmailOptions): Promise<T>;
17
+ send<T>(options: EmailOptions): Promise<T | null>;
18
18
  private renderTemplate;
19
19
  private getDefaultTemplateData;
20
20
  }
@@ -8,6 +8,7 @@ import getDatabase from '../../database/index.js';
8
8
  import { useLogger } from '../../logger/index.js';
9
9
  import getMailer from '../../mailer.js';
10
10
  import { Url } from '../../utils/url.js';
11
+ import emitter from '../../emitter.js';
11
12
  const env = useEnv();
12
13
  const logger = useLogger();
13
14
  const __dirname = path.dirname(fileURLToPath(import.meta.url));
@@ -35,7 +36,14 @@ export class MailService {
35
36
  }
36
37
  }
37
38
  async send(options) {
38
- const { template, ...emailOptions } = options;
39
+ const payload = await emitter.emitFilter(`email.send`, options, {
40
+ database: getDatabase(),
41
+ schema: null,
42
+ accountability: null,
43
+ });
44
+ if (!payload)
45
+ return null;
46
+ const { template, ...emailOptions } = payload;
39
47
  let { html } = options;
40
48
  const defaultTemplateData = await this.getDefaultTemplateData();
41
49
  const from = `${defaultTemplateData.projectName} <${options.from || env['EMAIL_FROM']}>`;
@@ -1,4 +1,4 @@
1
- import type { SchemaInspector } from '@directus/schema';
1
+ import type { ForeignKey, SchemaInspector } from '@directus/schema';
2
2
  import type { Accountability, Relation, RelationMeta, SchemaOverview } from '@directus/types';
3
3
  import type Keyv from 'keyv';
4
4
  import type { Knex } from 'knex';
@@ -12,8 +12,10 @@ export declare class RelationsService {
12
12
  schema: SchemaOverview;
13
13
  relationsItemService: ItemsService<RelationMeta>;
14
14
  systemCache: Keyv<any>;
15
+ schemaCache: Keyv<any>;
15
16
  helpers: Helpers;
16
17
  constructor(options: AbstractServiceOptions);
18
+ foreignKeys(collection?: string): Promise<ForeignKey[]>;
17
19
  readAll(collection?: string, opts?: QueryOptions): Promise<Relation[]>;
18
20
  readOne(collection: string, field: string): Promise<Relation>;
19
21
  /**
@@ -1,8 +1,9 @@
1
+ import { useEnv } from '@directus/env';
1
2
  import { ForbiddenError, InvalidPayloadError } from '@directus/errors';
2
3
  import { createInspector } from '@directus/schema';
3
4
  import { systemRelationRows } from '@directus/system-data';
4
5
  import { toArray } from '@directus/utils';
5
- import { clearSystemCache, getCache } from '../cache.js';
6
+ import { clearSystemCache, getCache, getCacheValue, setCacheValue } from '../cache.js';
6
7
  import { getHelpers } from '../database/helpers/index.js';
7
8
  import getDatabase, { getSchemaInspector } from '../database/index.js';
8
9
  import emitter from '../emitter.js';
@@ -13,6 +14,7 @@ import { getDefaultIndexName } from '../utils/get-default-index-name.js';
13
14
  import { getSchema } from '../utils/get-schema.js';
14
15
  import { transaction } from '../utils/transaction.js';
15
16
  import { ItemsService } from './items.js';
17
+ const env = useEnv();
16
18
  export class RelationsService {
17
19
  knex;
18
20
  schemaInspector;
@@ -20,6 +22,7 @@ export class RelationsService {
20
22
  schema;
21
23
  relationsItemService;
22
24
  systemCache;
25
+ schemaCache;
23
26
  helpers;
24
27
  constructor(options) {
25
28
  this.knex = options.knex || getDatabase();
@@ -33,9 +36,28 @@ export class RelationsService {
33
36
  // allowed to extract the relations regardless of permissions to directus_relations. This
34
37
  // happens in `filterForbidden` down below
35
38
  });
36
- this.systemCache = getCache().systemCache;
39
+ const cache = getCache();
40
+ this.systemCache = cache.systemCache;
41
+ this.schemaCache = cache.localSchemaCache;
37
42
  this.helpers = getHelpers(this.knex);
38
43
  }
44
+ async foreignKeys(collection) {
45
+ const schemaCacheIsEnabled = Boolean(env['CACHE_SCHEMA']);
46
+ let foreignKeys = null;
47
+ if (schemaCacheIsEnabled) {
48
+ foreignKeys = await getCacheValue(this.schemaCache, 'foreignKeys');
49
+ }
50
+ if (!foreignKeys) {
51
+ foreignKeys = await this.schemaInspector.foreignKeys();
52
+ if (schemaCacheIsEnabled) {
53
+ setCacheValue(this.schemaCache, 'foreignKeys', foreignKeys);
54
+ }
55
+ }
56
+ if (collection) {
57
+ return foreignKeys.filter((row) => row.table === collection);
58
+ }
59
+ return foreignKeys;
60
+ }
39
61
  async readAll(collection, opts) {
40
62
  if (this.accountability) {
41
63
  await validateAccess({
@@ -65,7 +87,7 @@ export class RelationsService {
65
87
  return true;
66
88
  return metaRow.many_collection === collection;
67
89
  });
68
- const schemaRows = await this.schemaInspector.foreignKeys(collection);
90
+ const schemaRows = await this.foreignKeys(collection);
69
91
  const results = this.stitchRelations(metaRows, schemaRows);
70
92
  return await this.filterForbidden(results);
71
93
  }
@@ -101,7 +123,7 @@ export class RelationsService {
101
123
  ],
102
124
  },
103
125
  });
104
- const schemaRow = (await this.schemaInspector.foreignKeys(collection)).find((foreignKey) => foreignKey.column === field);
126
+ const schemaRow = (await this.foreignKeys(collection)).find((foreignKey) => foreignKey.column === field);
105
127
  const stitched = this.stitchRelations(metaRow, schemaRow ? [schemaRow] : []);
106
128
  const results = await this.filterForbidden(stitched);
107
129
  if (results.length === 0) {
@@ -316,7 +338,7 @@ export class RelationsService {
316
338
  const nestedActionEvents = [];
317
339
  try {
318
340
  await transaction(this.knex, async (trx) => {
319
- const existingConstraints = await this.schemaInspector.foreignKeys();
341
+ const existingConstraints = await this.foreignKeys();
320
342
  const constraintNames = existingConstraints.map((key) => key.constraint_name);
321
343
  if (existingRelation.schema?.constraint_name &&
322
344
  constraintNames.includes(existingRelation.schema.constraint_name)) {
@@ -79,16 +79,15 @@ export class TusDataStore extends DataStore {
79
79
  }
80
80
  // If this is a new file upload, we need to generate a new primary key and DB record
81
81
  const primaryKey = await itemsService.createOne(fileData, { emitEvents: false });
82
+ // Set the file id, so it is available to be sent as a header on upload creation / resume
83
+ if (!upload.metadata['id']) {
84
+ upload.metadata['id'] = primaryKey;
85
+ }
82
86
  const fileExtension = extname(upload.metadata['filename_download']) ||
83
87
  (upload.metadata['type'] && '.' + extension(upload.metadata['type'])) ||
84
88
  '';
85
89
  // The filename_disk is the FINAL filename on disk
86
90
  fileData.filename_disk ||= primaryKey + (fileExtension || '');
87
- // Temp filename is used for replacements
88
- // const tempFilenameDisk = fileData.tus_id! + (fileExtension || '');
89
- // if (isReplacement) {
90
- // upload.metadata['temp_file'] = tempFilenameDisk;
91
- // }
92
91
  try {
93
92
  // If this is a replacement, we'll write the file to a temp location first to ensure we don't overwrite the existing file if something goes wrong
94
93
  upload = (await this.storageDriver.createChunkedUpload(fileData.filename_disk, upload));
@@ -4,5 +4,5 @@ type Context = {
4
4
  schema: SchemaOverview;
5
5
  accountability?: Accountability | undefined;
6
6
  };
7
- export declare function createTusServer(context: Context): Promise<Server>;
7
+ export declare function createTusServer(context: Context): Promise<[Server, () => void]>;
8
8
  export {};
@@ -6,7 +6,7 @@
6
6
  import { useEnv } from '@directus/env';
7
7
  import { supportsTus } from '@directus/storage';
8
8
  import { toArray } from '@directus/utils';
9
- import { Server } from '@tus/server';
9
+ import { Server, EVENTS } from '@tus/server';
10
10
  import { RESUMABLE_UPLOADS } from '../../constants.js';
11
11
  import { getStorage } from '../../storage/index.js';
12
12
  import { extractMetadata } from '../files/lib/extract-metadata.js';
@@ -33,7 +33,7 @@ async function createTusStore(context) {
33
33
  export async function createTusServer(context) {
34
34
  const env = useEnv();
35
35
  const store = await createTusStore(context);
36
- return new Server({
36
+ const server = new Server({
37
37
  path: '/files/tus',
38
38
  datastore: store,
39
39
  locker: getTusLocker(),
@@ -77,4 +77,11 @@ export async function createTusServer(context) {
77
77
  },
78
78
  relativeLocation: String(env['PUBLIC_URL']).startsWith('http'),
79
79
  });
80
+ server.on(EVENTS.POST_CREATE, async (_req, res, upload) => {
81
+ res.setHeader('Directus-File-Id', upload.metadata['id']);
82
+ });
83
+ return [server, cleanup];
84
+ function cleanup() {
85
+ server.removeAllListeners();
86
+ }
80
87
  }
@@ -2,14 +2,16 @@ import type { Aggregate, Filter, Query, SchemaOverview } from '@directus/types';
2
2
  import type { Knex } from 'knex';
3
3
  import type { AliasMap } from './get-column-path.js';
4
4
  export declare const generateAlias: (size?: number | undefined) => string;
5
- /**
6
- * Apply the Query to a given Knex query builder instance
7
- */
8
- export default function applyQuery(knex: Knex, collection: string, dbQuery: Knex.QueryBuilder, query: Query, schema: SchemaOverview, cases: Filter[], options?: {
5
+ type ApplyQueryOptions = {
9
6
  aliasMap?: AliasMap;
10
7
  isInnerQuery?: boolean;
11
8
  hasMultiRelationalSort?: boolean | undefined;
12
- }): {
9
+ groupWhenCases?: number[][] | undefined;
10
+ };
11
+ /**
12
+ * Apply the Query to a given Knex query builder instance
13
+ */
14
+ export default function applyQuery(knex: Knex, collection: string, dbQuery: Knex.QueryBuilder, query: Query, schema: SchemaOverview, cases: Filter[], options?: ApplyQueryOptions): {
13
15
  query: Knex.QueryBuilder<any, any>;
14
16
  hasJoins: boolean;
15
17
  hasMultiRelationalFilter: boolean;
@@ -40,3 +42,4 @@ export declare function applyFilter(knex: Knex, schema: SchemaOverview, rootQuer
40
42
  export declare function applySearch(knex: Knex, schema: SchemaOverview, dbQuery: Knex.QueryBuilder, searchQuery: string, collection: string): void;
41
43
  export declare function applyAggregate(schema: SchemaOverview, dbQuery: Knex.QueryBuilder, aggregate: Aggregate, collection: string, hasJoins: boolean): void;
42
44
  export declare function joinFilterWithCases(filter: Filter | null | undefined, cases: Filter[]): Filter | null;
45
+ export {};