@directus/api 27.1.0 → 28.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/dist/auth/drivers/oauth2.js +4 -4
  2. package/dist/auth/drivers/openid.d.ts +2 -1
  3. package/dist/auth/drivers/openid.js +75 -42
  4. package/dist/database/errors/dialects/mssql.d.ts +2 -1
  5. package/dist/database/errors/dialects/mssql.js +124 -120
  6. package/dist/database/errors/dialects/mysql.d.ts +2 -1
  7. package/dist/database/errors/dialects/mysql.js +112 -108
  8. package/dist/database/errors/dialects/postgres.d.ts +2 -1
  9. package/dist/database/errors/dialects/postgres.js +75 -71
  10. package/dist/database/errors/dialects/sqlite.d.ts +2 -1
  11. package/dist/database/errors/dialects/sqlite.js +6 -5
  12. package/dist/database/errors/translate.d.ts +2 -1
  13. package/dist/database/errors/translate.js +5 -5
  14. package/dist/database/get-ast-from-query/lib/convert-wildcards.d.ts +3 -1
  15. package/dist/database/get-ast-from-query/lib/convert-wildcards.js +18 -8
  16. package/dist/database/get-ast-from-query/lib/parse-fields.d.ts +2 -1
  17. package/dist/database/get-ast-from-query/lib/parse-fields.js +3 -2
  18. package/dist/database/migrations/20250609A-license-banner.d.ts +3 -0
  19. package/dist/database/migrations/20250609A-license-banner.js +14 -0
  20. package/dist/database/migrations/20250613A-add-project-id.d.ts +3 -0
  21. package/dist/database/migrations/20250613A-add-project-id.js +26 -0
  22. package/dist/extensions/lib/get-extensions-settings.js +14 -8
  23. package/dist/flows.d.ts +5 -1
  24. package/dist/flows.js +61 -4
  25. package/dist/permissions/utils/get-permissions-for-share.js +2 -0
  26. package/dist/permissions/utils/merge-fields.d.ts +1 -0
  27. package/dist/permissions/utils/merge-fields.js +29 -0
  28. package/dist/permissions/utils/merge-permissions.js +3 -14
  29. package/dist/services/fields.js +3 -3
  30. package/dist/services/graphql/resolvers/mutation.js +1 -1
  31. package/dist/services/graphql/resolvers/query.js +1 -1
  32. package/dist/services/graphql/resolvers/system.js +4 -4
  33. package/dist/services/graphql/schema/parse-query.d.ts +1 -1
  34. package/dist/services/graphql/schema/parse-query.js +8 -1
  35. package/dist/services/graphql/schema/read.js +4 -4
  36. package/dist/services/graphql/subscription.js +1 -1
  37. package/dist/services/graphql/utils/filter-replace-m2a.d.ts +3 -0
  38. package/dist/services/graphql/utils/filter-replace-m2a.js +59 -0
  39. package/dist/services/items.js +2 -2
  40. package/dist/services/specifications.js +6 -2
  41. package/dist/services/users.js +3 -0
  42. package/dist/telemetry/lib/get-report.js +4 -1
  43. package/dist/telemetry/types/report.d.ts +4 -0
  44. package/dist/telemetry/utils/get-project-id.d.ts +2 -0
  45. package/dist/telemetry/utils/get-project-id.js +4 -0
  46. package/dist/utils/is-url-allowed.js +1 -1
  47. package/dist/utils/sanitize-query.js +6 -0
  48. package/dist/utils/validate-query.js +1 -0
  49. package/dist/websocket/utils/items.d.ts +1 -1
  50. package/package.json +22 -21
  51. package/dist/utils/map-values-deep.d.ts +0 -1
  52. package/dist/utils/map-values-deep.js +0 -25
@@ -2,6 +2,7 @@ import { randomUUID } from 'node:crypto';
2
2
  import getDatabase from '../../database/index.js';
3
3
  import { ExtensionsService } from '../../services/extensions.js';
4
4
  import { getSchema } from '../../utils/get-schema.js';
5
+ import { list } from '@directus/extensions-registry';
5
6
  /**
6
7
  * Loads stored settings for all extensions. Creates empty new rows in extensions tables for
7
8
  * extensions that don't have settings yet, and remove any settings for extensions that are no
@@ -42,11 +43,16 @@ export const getExtensionsSettings = async ({ local, module, registry, }) => {
42
43
  });
43
44
  }
44
45
  };
45
- const generateSettingsEntry = (folder, extension, source) => {
46
+ const generateSettingsEntry = async (folder, extension, source) => {
47
+ let marketplaceId;
48
+ if (source === 'registry') {
49
+ const marketplace = await list({ search: extension.name });
50
+ marketplaceId = marketplace.data.find((ext) => ext.name === extension.name)?.id;
51
+ }
52
+ const id = marketplaceId ?? randomUUID();
46
53
  if (extension.type === 'bundle') {
47
- const bundleId = randomUUID();
48
54
  newSettings.push({
49
- id: bundleId,
55
+ id,
50
56
  enabled: true,
51
57
  source: source,
52
58
  bundle: null,
@@ -57,14 +63,14 @@ export const getExtensionsSettings = async ({ local, module, registry, }) => {
57
63
  id: randomUUID(),
58
64
  enabled: true,
59
65
  source: source,
60
- bundle: bundleId,
66
+ bundle: id,
61
67
  folder: entry.name,
62
68
  });
63
69
  }
64
70
  }
65
71
  else {
66
72
  newSettings.push({
67
- id: randomUUID(),
73
+ id,
68
74
  enabled: true,
69
75
  source: source,
70
76
  bundle: null,
@@ -94,12 +100,12 @@ export const getExtensionsSettings = async ({ local, module, registry, }) => {
94
100
  await service.extensionsItemService.updateOne(settingsForName.id, { folder });
95
101
  continue;
96
102
  }
97
- generateSettingsEntry(folder, extension, 'local');
103
+ await generateSettingsEntry(folder, extension, 'local');
98
104
  }
99
105
  for (const [folder, extension] of module.entries()) {
100
106
  const existingSettings = moduleSettings.find((settings) => settings.folder === folder);
101
107
  if (!existingSettings) {
102
- generateSettingsEntry(folder, extension, 'module');
108
+ await generateSettingsEntry(folder, extension, 'module');
103
109
  }
104
110
  else if (extension.type === 'bundle') {
105
111
  updateBundleEntriesSettings(extension, existingSettings, moduleSettings);
@@ -108,7 +114,7 @@ export const getExtensionsSettings = async ({ local, module, registry, }) => {
108
114
  for (const [folder, extension] of registry.entries()) {
109
115
  const existingSettings = registrySettings.find((settings) => settings.folder === folder);
110
116
  if (!existingSettings) {
111
- generateSettingsEntry(folder, extension, 'registry');
117
+ await generateSettingsEntry(folder, extension, 'registry');
112
118
  }
113
119
  else if (extension.type === 'bundle') {
114
120
  updateBundleEntriesSettings(extension, existingSettings, registrySettings);
package/dist/flows.d.ts CHANGED
@@ -1,4 +1,5 @@
1
1
  import type { OperationHandler } from '@directus/extensions';
2
+ import type { Accountability, SchemaOverview } from '@directus/types';
2
3
  export declare function getFlowManager(): FlowManager;
3
4
  declare class FlowManager {
4
5
  private isLoaded;
@@ -14,7 +15,10 @@ declare class FlowManager {
14
15
  addOperation(id: string, operation: OperationHandler): void;
15
16
  removeOperation(id: string): void;
16
17
  runOperationFlow(id: string, data: unknown, context: Record<string, unknown>): Promise<unknown>;
17
- runWebhookFlow(id: string, data: unknown, context: Record<string, unknown>): Promise<{
18
+ runWebhookFlow(id: string, data: unknown, context: {
19
+ schema: SchemaOverview;
20
+ accountability: Accountability | undefined;
21
+ } & Record<string, unknown>): Promise<{
18
22
  result: unknown;
19
23
  cacheEnabled?: boolean;
20
24
  }>;
package/dist/flows.js CHANGED
@@ -2,21 +2,23 @@ import { Action } from '@directus/constants';
2
2
  import { useEnv } from '@directus/env';
3
3
  import { ForbiddenError } from '@directus/errors';
4
4
  import { isSystemCollection } from '@directus/system-data';
5
- import { applyOptionsData, getRedactedString, isValidJSON, parseJSON, toArray } from '@directus/utils';
5
+ import { applyOptionsData, deepMap, getRedactedString, isValidJSON, parseJSON, toArray } from '@directus/utils';
6
6
  import { pick } from 'lodash-es';
7
7
  import { get } from 'micromustache';
8
8
  import { useBus } from './bus/index.js';
9
9
  import getDatabase from './database/index.js';
10
10
  import emitter from './emitter.js';
11
11
  import { useLogger } from './logger/index.js';
12
+ import { fetchPermissions } from './permissions/lib/fetch-permissions.js';
13
+ import { fetchPolicies } from './permissions/lib/fetch-policies.js';
12
14
  import { ActivityService } from './services/activity.js';
13
15
  import { FlowsService } from './services/flows.js';
14
16
  import * as services from './services/index.js';
15
17
  import { RevisionsService } from './services/revisions.js';
16
18
  import { constructFlowTree } from './utils/construct-flow-tree.js';
17
19
  import { getSchema } from './utils/get-schema.js';
20
+ import { getService } from './utils/get-service.js';
18
21
  import { JobQueue } from './utils/job-queue.js';
19
- import { mapValuesDeep } from './utils/map-values-deep.js';
20
22
  import { redactObject } from './utils/redact-object.js';
21
23
  import { scheduleSynchronizedJob, validateCron } from './utils/schedule.js';
22
24
  let flowManager;
@@ -189,7 +191,9 @@ class FlowManager {
189
191
  else if (flow.trigger === 'manual') {
190
192
  const handler = async (data, context) => {
191
193
  const enabledCollections = flow.options?.['collections'] ?? [];
194
+ const requireSelection = flow.options?.['requireSelection'] ?? true;
192
195
  const targetCollection = data?.['body'].collection;
196
+ const targetKeys = data?.['body'].keys;
193
197
  if (!targetCollection) {
194
198
  logger.warn(`Manual trigger requires "collection" to be specified in the payload`);
195
199
  throw new ForbiddenError();
@@ -202,6 +206,44 @@ class FlowManager {
202
206
  logger.warn(`Specified collection must be one of: ${enabledCollections.join(', ')}.`);
203
207
  throw new ForbiddenError();
204
208
  }
209
+ if (!targetKeys || !Array.isArray(targetKeys)) {
210
+ logger.warn(`Manual trigger requires "keys" to be specified in the payload`);
211
+ throw new ForbiddenError();
212
+ }
213
+ if (requireSelection && targetKeys.length === 0) {
214
+ logger.warn(`Manual trigger requires at least one key to be specified in the payload`);
215
+ throw new ForbiddenError();
216
+ }
217
+ const accountability = context?.['accountability'];
218
+ if (!accountability) {
219
+ logger.warn(`Manual flows are only triggerable when authenticated`);
220
+ throw new ForbiddenError();
221
+ }
222
+ if (accountability.admin === false) {
223
+ const database = context['database'] ?? getDatabase();
224
+ const schema = context['schema'] ?? (await getSchema({ database }));
225
+ const policies = await fetchPolicies(accountability, { schema, knex: database });
226
+ const permissions = await fetchPermissions({
227
+ policies,
228
+ accountability,
229
+ action: 'read',
230
+ collections: [targetCollection],
231
+ }, { schema, knex: database });
232
+ if (permissions.length === 0) {
233
+ logger.warn(`Triggering ${targetCollection} is not allowed`);
234
+ throw new ForbiddenError();
235
+ }
236
+ const service = getService(targetCollection, { schema, accountability, knex: database });
237
+ const primaryField = schema.collections[targetCollection].primary;
238
+ let keys = await service.readMany(targetKeys, { fields: [primaryField] }, {
239
+ emitEvents: false,
240
+ });
241
+ keys = keys.map((key) => key[primaryField]);
242
+ if (targetKeys.some((key) => !keys.includes(key))) {
243
+ logger.warn(`Triggering keys ${targetKeys} is not allowed`);
244
+ throw new ForbiddenError();
245
+ }
246
+ }
205
247
  if (flow.options['async']) {
206
248
  this.executeFlow(flow, data, context);
207
249
  return { result: undefined };
@@ -321,9 +363,24 @@ class FlowManager {
321
363
  return { successor: null, status: 'unknown', data: null, options: null };
322
364
  }
323
365
  const handler = this.operations.get(operation.type);
366
+ let optionData = keyedData;
367
+ if (operation.type === 'log') {
368
+ optionData = redactObject(keyedData, {
369
+ keys: [
370
+ ['**', 'headers', 'authorization'],
371
+ ['**', 'headers', 'cookie'],
372
+ ['**', 'query', 'access_token'],
373
+ ['**', 'payload', 'password'],
374
+ ['**', 'payload', 'token'],
375
+ ['**', 'payload', 'tfa_secret'],
376
+ ['**', 'payload', 'external_identifier'],
377
+ ['**', 'payload', 'auth_data'],
378
+ ],
379
+ }, getRedactedString);
380
+ }
324
381
  let options = operation.options;
325
382
  try {
326
- options = applyOptionsData(options, keyedData);
383
+ options = applyOptionsData(options, optionData);
327
384
  let result = await handler(options, {
328
385
  services,
329
386
  env: useEnv(),
@@ -339,7 +396,7 @@ class FlowManager {
339
396
  // JSON structures don't allow for undefined values, so we need to replace them with null
340
397
  // Otherwise the applyOptionsData function will not work correctly on the next operation
341
398
  if (typeof result === 'object' && result !== null) {
342
- result = mapValuesDeep(result, (_, value) => (value === undefined ? null : value));
399
+ result = deepMap(result, (value) => (value === undefined ? null : value));
343
400
  }
344
401
  return { successor: operation.resolve, status: 'resolve', data: result ?? null, options };
345
402
  }
@@ -70,6 +70,8 @@ export async function getPermissionsForShare(accountability, collections, contex
70
70
  reducedSchema = reduceSchema(context.schema, shareFieldMap);
71
71
  reducedSchema = reduceSchema(reducedSchema, userFieldMap);
72
72
  }
73
+ if (!isAdmin)
74
+ defaults.fields = permissions.find((perm) => perm.collection === collection)?.fields ?? [];
73
75
  const parentPrimaryKeyField = context.schema.collections[collection].primary;
74
76
  const relationalPermissions = traverse(reducedSchema, parentPrimaryKeyField, item, collection);
75
77
  const parentCollectionPermission = {
@@ -0,0 +1 @@
1
+ export declare function mergeFields(fieldsA: string[] | null, fieldsB: string[] | null, strategy: 'and' | 'or'): string[];
@@ -0,0 +1,29 @@
1
+ import { intersection, union } from 'lodash-es';
2
+ export function mergeFields(fieldsA, fieldsB, strategy) {
3
+ if (fieldsA === null)
4
+ fieldsA = [];
5
+ if (fieldsB === null)
6
+ fieldsB = [];
7
+ let fields = [];
8
+ if (strategy === 'and') {
9
+ if (fieldsA.length === 0 || fieldsB.length === 0)
10
+ return [];
11
+ if (fieldsA.includes('*'))
12
+ return fieldsB;
13
+ if (fieldsB.includes('*'))
14
+ return fieldsA;
15
+ fields = intersection(fieldsA, fieldsB);
16
+ }
17
+ else {
18
+ if (fieldsA.length === 0)
19
+ return fieldsB;
20
+ if (fieldsB.length === 0)
21
+ return fieldsA;
22
+ if (fieldsA.includes('*') || fieldsB.includes('*'))
23
+ return ['*'];
24
+ fields = union(fieldsA, fieldsB);
25
+ }
26
+ if (fields.includes('*'))
27
+ return ['*'];
28
+ return fields;
29
+ }
@@ -1,4 +1,5 @@
1
- import { flatten, intersection, isEqual, merge, omit, uniq } from 'lodash-es';
1
+ import { flatten, isEqual, merge, omit } from 'lodash-es';
2
+ import { mergeFields } from './merge-fields.js';
2
3
  // Adapted from https://github.com/directus/directus/blob/141b8adbf4dd8e06530a7929f34e3fc68a522053/api/src/utils/merge-permissions.ts#L4
3
4
  /**
4
5
  * Merges multiple permission lists into a flat list of unique permissions.
@@ -92,19 +93,7 @@ export function mergePermission(strategy, currentPerm, newPerm) {
92
93
  };
93
94
  }
94
95
  }
95
- if (newPerm.fields) {
96
- if (Array.isArray(currentPerm.fields) && strategy === 'or') {
97
- fields = uniq([...currentPerm.fields, ...newPerm.fields]);
98
- }
99
- else if (Array.isArray(currentPerm.fields) && strategy === 'and') {
100
- fields = intersection(currentPerm.fields, newPerm.fields);
101
- }
102
- else {
103
- fields = newPerm.fields;
104
- }
105
- if (fields.includes('*'))
106
- fields = ['*'];
107
- }
96
+ fields = mergeFields(currentPerm.fields, newPerm.fields, strategy);
108
97
  if (newPerm.presets) {
109
98
  presets = merge({}, presets, newPerm.presets);
110
99
  }
@@ -405,7 +405,7 @@ export class FieldsService {
405
405
  });
406
406
  }
407
407
  catch (err) {
408
- throw await translateDatabaseError(err);
408
+ throw await translateDatabaseError(err, field);
409
409
  }
410
410
  }
411
411
  }
@@ -649,11 +649,11 @@ export class FieldsService {
649
649
  }
650
650
  }
651
651
  else if (field.type === 'string') {
652
- column = table.string(field.field, field.schema?.max_length ?? undefined);
652
+ column = table.string(field.field, field.schema?.max_length ?? existing?.max_length ?? undefined);
653
653
  }
654
654
  else if (['float', 'decimal'].includes(field.type)) {
655
655
  const type = field.type;
656
- column = table[type](field.field, field.schema?.numeric_precision ?? DEFAULT_NUMERIC_PRECISION, field.schema?.numeric_scale ?? DEFAULT_NUMERIC_SCALE);
656
+ column = table[type](field.field, field.schema?.numeric_precision ?? existing?.numeric_precision ?? DEFAULT_NUMERIC_PRECISION, field.schema?.numeric_scale ?? existing?.numeric_scale ?? DEFAULT_NUMERIC_SCALE);
657
657
  }
658
658
  else if (field.type === 'csv') {
659
659
  column = table.text(field.field);
@@ -8,7 +8,7 @@ export async function resolveMutation(gql, args, info) {
8
8
  if (gql.scope === 'system')
9
9
  collection = `directus_${collection}`;
10
10
  const selections = replaceFragmentsInSelections(info.fieldNodes[0]?.selectionSet?.selections, info.fragments);
11
- const query = await getQuery(args, selections || [], info.variableValues, gql.schema, gql.accountability);
11
+ const query = await getQuery(args, gql.schema, selections || [], info.variableValues, gql.accountability, collection);
12
12
  const singleton = collection.endsWith('_batch') === false &&
13
13
  collection.endsWith('_items') === false &&
14
14
  collection.endsWith('_item') === false &&
@@ -26,7 +26,7 @@ export async function resolveQuery(gql, info) {
26
26
  collection = collection.slice(0, -11);
27
27
  }
28
28
  else {
29
- query = await getQuery(args, selections, info.variableValues, gql.schema, gql.accountability);
29
+ query = await getQuery(args, gql.schema, selections, info.variableValues, gql.accountability, collection);
30
30
  if (collection.endsWith('_by_id') && collection in gql.schema.collections === false) {
31
31
  collection = collection.slice(0, -6);
32
32
  }
@@ -311,7 +311,7 @@ export function injectSystemResolvers(gql, schemaComposer, { CreateCollectionTyp
311
311
  return null;
312
312
  const service = new UsersService({ schema: gql.schema, accountability: gql.accountability });
313
313
  const selections = replaceFragmentsInSelections(info.fieldNodes[0]?.selectionSet?.selections, info.fragments);
314
- const query = await getQuery(args, selections || [], info.variableValues, gql.schema, gql.accountability);
314
+ const query = await getQuery(args, gql.schema, selections || [], info.variableValues, gql.accountability, 'directus_users');
315
315
  return await service.readOne(gql.accountability.user, query);
316
316
  },
317
317
  },
@@ -349,7 +349,7 @@ export function injectSystemResolvers(gql, schemaComposer, { CreateCollectionTyp
349
349
  schema: gql.schema,
350
350
  });
351
351
  const selections = replaceFragmentsInSelections(info.fieldNodes[0]?.selectionSet?.selections, info.fragments);
352
- const query = await getQuery(args, selections || [], info.variableValues, gql.schema, gql.accountability);
352
+ const query = await getQuery(args, gql.schema, selections || [], info.variableValues, gql.accountability, 'directus_roles');
353
353
  query.limit = -1;
354
354
  const roles = await service.readMany(gql.accountability.roles, query);
355
355
  return roles;
@@ -397,7 +397,7 @@ export function injectSystemResolvers(gql, schemaComposer, { CreateCollectionTyp
397
397
  await service.updateOne(gql.accountability.user, args['data']);
398
398
  if ('directus_users' in ReadCollectionTypes) {
399
399
  const selections = replaceFragmentsInSelections(info.fieldNodes[0]?.selectionSet?.selections, info.fragments);
400
- const query = await getQuery(args, selections || [], info.variableValues, gql.schema, gql.accountability);
400
+ const query = await getQuery(args, gql.schema, selections || [], info.variableValues, gql.accountability, 'directus_users');
401
401
  return await service.readOne(gql.accountability.user, query);
402
402
  }
403
403
  return true;
@@ -421,7 +421,7 @@ export function injectSystemResolvers(gql, schemaComposer, { CreateCollectionTyp
421
421
  const primaryKey = await service.importOne(args['url'], args['data']);
422
422
  if ('directus_files' in ReadCollectionTypes) {
423
423
  const selections = replaceFragmentsInSelections(info.fieldNodes[0]?.selectionSet?.selections, info.fragments);
424
- const query = await getQuery(args, selections || [], info.variableValues, gql.schema, gql.accountability);
424
+ const query = await getQuery(args, gql.schema, selections || [], info.variableValues, gql.accountability, 'directus_files');
425
425
  return await service.readOne(primaryKey, query);
426
426
  }
427
427
  return true;
@@ -4,4 +4,4 @@ import type { GraphQLResolveInfo, SelectionNode } from 'graphql';
4
4
  * Get a Directus Query object from the parsed arguments (rawQuery) and GraphQL AST selectionSet. Converts SelectionSet into
5
5
  * Directus' `fields` query for use in the resolver. Also applies variables where appropriate.
6
6
  */
7
- export declare function getQuery(rawQuery: Query, selections: readonly SelectionNode[], variableValues: GraphQLResolveInfo['variableValues'], schema: SchemaOverview, accountability?: Accountability | null): Promise<Query>;
7
+ export declare function getQuery(rawQuery: Query, schema: SchemaOverview, selections: readonly SelectionNode[], variableValues: GraphQLResolveInfo['variableValues'], accountability?: Accountability | null, collection?: string): Promise<Query>;
@@ -3,11 +3,12 @@ import { sanitizeQuery } from '../../../utils/sanitize-query.js';
3
3
  import { validateQuery } from '../../../utils/validate-query.js';
4
4
  import { replaceFuncs } from '../utils/replace-funcs.js';
5
5
  import { parseArgs } from './parse-args.js';
6
+ import { filterReplaceM2A, filterReplaceM2ADeep } from '../utils/filter-replace-m2a.js';
6
7
  /**
7
8
  * Get a Directus Query object from the parsed arguments (rawQuery) and GraphQL AST selectionSet. Converts SelectionSet into
8
9
  * Directus' `fields` query for use in the resolver. Also applies variables where appropriate.
9
10
  */
10
- export async function getQuery(rawQuery, selections, variableValues, schema, accountability) {
11
+ export async function getQuery(rawQuery, schema, selections, variableValues, accountability, collection) {
11
12
  const query = await sanitizeQuery(rawQuery, schema, accountability);
12
13
  const parseAliases = (selections) => {
13
14
  const aliases = {};
@@ -93,6 +94,12 @@ export async function getQuery(rawQuery, selections, variableValues, schema, acc
93
94
  if (query.filter)
94
95
  query.filter = replaceFuncs(query.filter);
95
96
  query.deep = replaceFuncs(query.deep);
97
+ if (collection) {
98
+ if (query.filter) {
99
+ query.filter = filterReplaceM2A(query.filter, collection, schema);
100
+ }
101
+ query.deep = filterReplaceM2ADeep(query.deep, collection, schema);
102
+ }
96
103
  validateQuery(query);
97
104
  return query;
98
105
  }
@@ -721,14 +721,14 @@ export async function getReadableTypes(gql, schemaComposer, schema, inconsistent
721
721
  }
722
722
  }
723
723
  else if (relation.meta?.one_allowed_collections) {
724
- ReadableCollectionQuantifierFilterTypes[relation.collection]?.removeField('item');
725
- ReadableCollectionFilterTypes[relation.collection]?.removeField('item');
724
+ ReadableCollectionQuantifierFilterTypes[relation.collection]?.removeField(relation.field);
725
+ ReadableCollectionFilterTypes[relation.collection]?.removeField(relation.field);
726
726
  for (const collection of relation.meta.one_allowed_collections) {
727
727
  ReadableCollectionQuantifierFilterTypes[relation.collection]?.addFields({
728
- [`item__${collection}`]: ReadableCollectionFilterTypes[collection],
728
+ [`${relation.field}__${collection}`]: ReadableCollectionFilterTypes[collection],
729
729
  });
730
730
  ReadableCollectionFilterTypes[relation.collection]?.addFields({
731
- [`item__${collection}`]: ReadableCollectionFilterTypes[collection],
731
+ [`${relation.field}__${collection}`]: ReadableCollectionFilterTypes[collection],
732
732
  });
733
733
  }
734
734
  }
@@ -89,7 +89,7 @@ async function parseFields(gql, request) {
89
89
  }
90
90
  return result;
91
91
  }, []);
92
- const { fields } = await getQuery({}, dataSelections, request.variableValues, gql.schema, gql.accountability);
92
+ const { fields } = await getQuery({}, gql.schema, dataSelections, request.variableValues, gql.accountability);
93
93
  return fields ?? [];
94
94
  }
95
95
  function parseArguments(request) {
@@ -0,0 +1,3 @@
1
+ import type { Filter, NestedDeepQuery, SchemaOverview } from '@directus/types';
2
+ export declare function filterReplaceM2A(filter_arg: Filter, collection: string, schema: SchemaOverview): any;
3
+ export declare function filterReplaceM2ADeep(deep_arg: NestedDeepQuery | null | undefined, collection: string, schema: SchemaOverview): any;
@@ -0,0 +1,59 @@
1
+ import { getRelation } from '@directus/utils';
2
+ import { getRelationType } from '../../../utils/get-relation-type.js';
3
+ export function filterReplaceM2A(filter_arg, collection, schema) {
4
+ const filter = filter_arg;
5
+ for (const key in filter) {
6
+ const [field, any_collection] = key.split('__');
7
+ if (!field)
8
+ continue;
9
+ const relation = getRelation(schema.relations, collection, field);
10
+ const type = relation ? getRelationType({ relation, collection, field }) : null;
11
+ if (type === 'o2m' && relation) {
12
+ filter[key] = filterReplaceM2A(filter[key], relation.collection, schema);
13
+ }
14
+ else if (type === 'm2o' && relation) {
15
+ filter[key] = filterReplaceM2A(filter[key], relation.related_collection, schema);
16
+ }
17
+ else if (type === 'a2o' &&
18
+ relation &&
19
+ any_collection &&
20
+ relation.meta?.one_allowed_collections?.includes(any_collection)) {
21
+ filter[`${field}:${any_collection}`] = filterReplaceM2A(filter[key], any_collection, schema);
22
+ delete filter[key];
23
+ }
24
+ else if (Array.isArray(filter[key])) {
25
+ filter[key] = filter[key].map((item) => filterReplaceM2A(item, collection, schema));
26
+ }
27
+ else if (typeof filter[key] === 'object') {
28
+ filter[key] = filterReplaceM2A(filter[key], collection, schema);
29
+ }
30
+ }
31
+ return filter;
32
+ }
33
+ export function filterReplaceM2ADeep(deep_arg, collection, schema) {
34
+ const deep = deep_arg;
35
+ for (const key in deep) {
36
+ if (key.startsWith('_') === false) {
37
+ const [field, any_collection] = key.split('__');
38
+ if (!field)
39
+ continue;
40
+ const relation = getRelation(schema.relations, collection, field);
41
+ if (!relation)
42
+ continue;
43
+ const type = getRelationType({ relation, collection, field });
44
+ if (type === 'o2m') {
45
+ deep[key] = filterReplaceM2ADeep(deep[key], relation.collection, schema);
46
+ }
47
+ else if (type === 'm2o') {
48
+ deep[key] = filterReplaceM2ADeep(deep[key], relation.related_collection, schema);
49
+ }
50
+ else if (type === 'a2o' && any_collection && relation.meta?.one_allowed_collections?.includes(any_collection)) {
51
+ deep[key] = filterReplaceM2ADeep(deep[key], any_collection, schema);
52
+ }
53
+ }
54
+ if (key === '_filter') {
55
+ deep[key] = filterReplaceM2A(deep[key], collection, schema);
56
+ }
57
+ }
58
+ return deep;
59
+ }
@@ -186,7 +186,7 @@ export class ItemsService {
186
186
  }
187
187
  }
188
188
  catch (err) {
189
- const dbError = await translateDatabaseError(err);
189
+ const dbError = await translateDatabaseError(err, data);
190
190
  if (isDirectusError(dbError, ErrorCode.RecordNotUnique) && dbError.extensions.primaryKey) {
191
191
  // This is a MySQL specific thing we need to handle here, since MySQL does not return the field name
192
192
  // if the unique constraint is the primary key
@@ -590,7 +590,7 @@ export class ItemsService {
590
590
  await trx(this.collection).update(payloadWithTypeCasting).whereIn(primaryKeyField, keys);
591
591
  }
592
592
  catch (err) {
593
- throw await translateDatabaseError(err);
593
+ throw await translateDatabaseError(err, data);
594
594
  }
595
595
  }
596
596
  const childrenRevisions = [...revisionsM2O, ...revisionsA2O];
@@ -3,8 +3,8 @@ import formatTitle from '@directus/format-title';
3
3
  import { spec } from '@directus/specs';
4
4
  import { isSystemCollection } from '@directus/system-data';
5
5
  import { getRelation } from '@directus/utils';
6
- import { version } from 'directus/version';
7
6
  import { cloneDeep, mergeWith } from 'lodash-es';
7
+ import hash from 'object-hash';
8
8
  import { OAS_REQUIRED_SCHEMAS } from '../constants.js';
9
9
  import getDatabase from '../database/index.js';
10
10
  import { fetchPermissions } from '../permissions/lib/fetch-permissions.js';
@@ -54,12 +54,16 @@ class OASSpecsService {
54
54
  const components = await this.generateComponents(schemaForSpec, tags);
55
55
  const isDefaultPublicUrl = env['PUBLIC_URL'] === '/';
56
56
  const url = isDefaultPublicUrl && host ? host : env['PUBLIC_URL'];
57
+ const hashedVersion = hash({
58
+ now: new Date().toISOString(),
59
+ user: this.accountability?.user,
60
+ });
57
61
  const spec = {
58
62
  openapi: '3.0.1',
59
63
  info: {
60
64
  title: 'Dynamic API Specification',
61
65
  description: 'This is a dynamically generated API specification for all endpoints existing on the current project.',
62
- version: version,
66
+ version: hashedVersion,
63
67
  },
64
68
  servers: [
65
69
  {
@@ -40,6 +40,7 @@ export class UsersService extends ItemsService {
40
40
  throw new RecordNotUniqueError({
41
41
  collection: 'directus_users',
42
42
  field: 'email',
43
+ value: '[' + String(duplicates) + ']',
43
44
  });
44
45
  }
45
46
  const query = this.knex
@@ -54,6 +55,7 @@ export class UsersService extends ItemsService {
54
55
  throw new RecordNotUniqueError({
55
56
  collection: 'directus_users',
56
57
  field: 'email',
58
+ value: '[' + String(emails) + ']',
57
59
  });
58
60
  }
59
61
  }
@@ -210,6 +212,7 @@ export class UsersService extends ItemsService {
210
212
  throw new RecordNotUniqueError({
211
213
  collection: 'directus_users',
212
214
  field: 'email',
215
+ value: data['email'],
213
216
  });
214
217
  }
215
218
  this.validateEmail(data['email']);
@@ -8,6 +8,7 @@ import { getFieldCount } from '../utils/get-field-count.js';
8
8
  import { getFilesizeSum } from '../utils/get-filesize-sum.js';
9
9
  import { getItemCount } from '../utils/get-item-count.js';
10
10
  import { getUserItemCount } from '../utils/get-user-item-count.js';
11
+ import { getProjectId } from '../utils/get-project-id.js';
11
12
  const basicCountTasks = [
12
13
  { collection: 'directus_dashboards' },
13
14
  { collection: 'directus_files' },
@@ -25,7 +26,7 @@ export const getReport = async () => {
25
26
  const db = getDatabase();
26
27
  const env = useEnv();
27
28
  const helpers = getHelpers(db);
28
- const [basicCounts, userCounts, userItemCount, fieldsCounts, extensionsCounts, databaseSize, filesizes] = await Promise.all([
29
+ const [basicCounts, userCounts, userItemCount, fieldsCounts, extensionsCounts, databaseSize, filesizes, projectId] = await Promise.all([
29
30
  getItemCount(db, basicCountTasks),
30
31
  fetchUserCount({ knex: db }),
31
32
  getUserItemCount(db),
@@ -33,6 +34,7 @@ export const getReport = async () => {
33
34
  getExtensionCount(db),
34
35
  helpers.schema.getDatabaseSize(),
35
36
  getFilesizeSum(db),
37
+ getProjectId(db),
36
38
  ]);
37
39
  return {
38
40
  url: env['PUBLIC_URL'],
@@ -53,5 +55,6 @@ export const getReport = async () => {
53
55
  extensions: extensionsCounts.totalEnabled,
54
56
  database_size: databaseSize ?? 0,
55
57
  files_size_total: filesizes.total,
58
+ project_id: projectId,
56
59
  };
57
60
  };
@@ -71,4 +71,8 @@ export interface TelemetryReport {
71
71
  * Total size of the files in bytes
72
72
  */
73
73
  files_size_total: number;
74
+ /**
75
+ * Unique project identifier
76
+ */
77
+ project_id?: string | null;
74
78
  }
@@ -0,0 +1,2 @@
1
+ import type { Knex } from 'knex';
2
+ export declare const getProjectId: (db: Knex) => Promise<string>;
@@ -0,0 +1,4 @@
1
+ export const getProjectId = async (db) => {
2
+ const projectId = await db.select('project_id').from('directus_settings').first();
3
+ return projectId?.project_id || null;
4
+ };
@@ -16,7 +16,7 @@ export default function isUrlAllowed(url, allowList) {
16
16
  return origin + pathname;
17
17
  }
18
18
  catch {
19
- logger.warn(`Invalid URL used "${url}"`);
19
+ logger.warn(`Invalid URL used "${allowedURL}"`);
20
20
  }
21
21
  return null;
22
22
  })
@@ -71,6 +71,9 @@ export async function sanitizeQuery(rawQuery, schema, accountability) {
71
71
  if (rawQuery['alias']) {
72
72
  query.alias = sanitizeAlias(rawQuery['alias']);
73
73
  }
74
+ if ('backlink' in rawQuery) {
75
+ query.backlink = sanitizeBacklink(rawQuery['backlink']);
76
+ }
74
77
  return query;
75
78
  }
76
79
  function sanitizeFields(rawFields) {
@@ -171,6 +174,9 @@ function sanitizeMeta(rawMeta) {
171
174
  }
172
175
  return [rawMeta];
173
176
  }
177
+ function sanitizeBacklink(rawBacklink) {
178
+ return rawBacklink !== false && rawBacklink !== 'false';
179
+ }
174
180
  async function sanitizeDeep(deep, schema, accountability) {
175
181
  const logger = useLogger();
176
182
  const result = {};