@directus/api 27.0.2 → 28.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/auth/drivers/oauth2.js +4 -4
- package/dist/auth/drivers/openid.d.ts +2 -1
- package/dist/auth/drivers/openid.js +78 -43
- package/dist/database/errors/dialects/mssql.d.ts +2 -1
- package/dist/database/errors/dialects/mssql.js +124 -120
- package/dist/database/errors/dialects/mysql.d.ts +2 -1
- package/dist/database/errors/dialects/mysql.js +112 -108
- package/dist/database/errors/dialects/postgres.d.ts +2 -1
- package/dist/database/errors/dialects/postgres.js +75 -71
- package/dist/database/errors/dialects/sqlite.d.ts +2 -1
- package/dist/database/errors/dialects/sqlite.js +6 -5
- package/dist/database/errors/translate.d.ts +2 -1
- package/dist/database/errors/translate.js +5 -5
- package/dist/database/get-ast-from-query/lib/convert-wildcards.d.ts +5 -3
- package/dist/database/get-ast-from-query/lib/convert-wildcards.js +26 -16
- package/dist/database/get-ast-from-query/lib/parse-fields.d.ts +2 -1
- package/dist/database/get-ast-from-query/lib/parse-fields.js +5 -4
- package/dist/database/migrations/20250609A-license-banner.d.ts +3 -0
- package/dist/database/migrations/20250609A-license-banner.js +14 -0
- package/dist/database/migrations/20250613A-add-project-id.d.ts +3 -0
- package/dist/database/migrations/20250613A-add-project-id.js +26 -0
- package/dist/extensions/lib/get-extensions-settings.js +14 -8
- package/dist/extensions/manager.js +26 -0
- package/dist/flows.d.ts +5 -1
- package/dist/flows.js +61 -4
- package/dist/operations/condition/index.js +1 -1
- package/dist/permissions/utils/get-permissions-for-share.js +2 -0
- package/dist/permissions/utils/merge-fields.d.ts +1 -0
- package/dist/permissions/utils/merge-fields.js +29 -0
- package/dist/permissions/utils/merge-permissions.js +3 -14
- package/dist/services/fields.js +3 -3
- package/dist/services/graphql/resolvers/mutation.js +1 -1
- package/dist/services/graphql/resolvers/query.js +1 -1
- package/dist/services/graphql/resolvers/system.js +4 -4
- package/dist/services/graphql/schema/parse-query.d.ts +1 -1
- package/dist/services/graphql/schema/parse-query.js +8 -1
- package/dist/services/graphql/schema/read.js +4 -4
- package/dist/services/graphql/subscription.js +1 -1
- package/dist/services/graphql/utils/filter-replace-m2a.d.ts +3 -0
- package/dist/services/graphql/utils/filter-replace-m2a.js +59 -0
- package/dist/services/items.js +2 -2
- package/dist/services/payload.js +2 -2
- package/dist/services/relations.d.ts +1 -1
- package/dist/services/relations.js +5 -2
- package/dist/services/specifications.js +6 -2
- package/dist/services/users.js +3 -0
- package/dist/telemetry/lib/get-report.js +4 -1
- package/dist/telemetry/types/report.d.ts +4 -0
- package/dist/telemetry/utils/get-project-id.d.ts +2 -0
- package/dist/telemetry/utils/get-project-id.js +4 -0
- package/dist/utils/get-ip-from-req.d.ts +2 -1
- package/dist/utils/get-ip-from-req.js +29 -2
- package/dist/utils/get-schema.js +1 -1
- package/dist/utils/is-url-allowed.js +1 -1
- package/dist/utils/sanitize-query.js +6 -0
- package/dist/utils/validate-query.js +1 -0
- package/dist/websocket/controllers/base.d.ts +2 -2
- package/dist/websocket/controllers/base.js +33 -5
- package/dist/websocket/types.d.ts +1 -0
- package/dist/websocket/utils/items.d.ts +1 -1
- package/package.json +27 -24
- package/dist/utils/map-values-deep.d.ts +0 -1
- package/dist/utils/map-values-deep.js +0 -25
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { v7 as uuid } from 'uuid';
|
|
2
|
+
export async function up(knex) {
|
|
3
|
+
await knex.schema.alterTable('directus_settings', (table) => {
|
|
4
|
+
table.uuid('project_id');
|
|
5
|
+
});
|
|
6
|
+
const existing = await knex('directus_settings').select('id').first();
|
|
7
|
+
const timestamp = await knex('directus_migrations').select('timestamp').first();
|
|
8
|
+
const msecs = timestamp ? new Date(timestamp.timestamp).getTime() : Date.now();
|
|
9
|
+
if (existing) {
|
|
10
|
+
await knex('directus_settings').update({
|
|
11
|
+
project_id: uuid({
|
|
12
|
+
msecs,
|
|
13
|
+
}),
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
else {
|
|
17
|
+
await knex('directus_settings').insert({
|
|
18
|
+
project_id: uuid(),
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
export async function down(knex) {
|
|
23
|
+
await knex.schema.alterTable('directus_settings', (table) => {
|
|
24
|
+
table.dropColumn('project_id');
|
|
25
|
+
});
|
|
26
|
+
}
|
|
@@ -2,6 +2,7 @@ import { randomUUID } from 'node:crypto';
|
|
|
2
2
|
import getDatabase from '../../database/index.js';
|
|
3
3
|
import { ExtensionsService } from '../../services/extensions.js';
|
|
4
4
|
import { getSchema } from '../../utils/get-schema.js';
|
|
5
|
+
import { list } from '@directus/extensions-registry';
|
|
5
6
|
/**
|
|
6
7
|
* Loads stored settings for all extensions. Creates empty new rows in extensions tables for
|
|
7
8
|
* extensions that don't have settings yet, and remove any settings for extensions that are no
|
|
@@ -42,11 +43,16 @@ export const getExtensionsSettings = async ({ local, module, registry, }) => {
|
|
|
42
43
|
});
|
|
43
44
|
}
|
|
44
45
|
};
|
|
45
|
-
const generateSettingsEntry = (folder, extension, source) => {
|
|
46
|
+
const generateSettingsEntry = async (folder, extension, source) => {
|
|
47
|
+
let marketplaceId;
|
|
48
|
+
if (source === 'registry') {
|
|
49
|
+
const marketplace = await list({ search: extension.name });
|
|
50
|
+
marketplaceId = marketplace.data.find((ext) => ext.name === extension.name)?.id;
|
|
51
|
+
}
|
|
52
|
+
const id = marketplaceId ?? randomUUID();
|
|
46
53
|
if (extension.type === 'bundle') {
|
|
47
|
-
const bundleId = randomUUID();
|
|
48
54
|
newSettings.push({
|
|
49
|
-
id
|
|
55
|
+
id,
|
|
50
56
|
enabled: true,
|
|
51
57
|
source: source,
|
|
52
58
|
bundle: null,
|
|
@@ -57,14 +63,14 @@ export const getExtensionsSettings = async ({ local, module, registry, }) => {
|
|
|
57
63
|
id: randomUUID(),
|
|
58
64
|
enabled: true,
|
|
59
65
|
source: source,
|
|
60
|
-
bundle:
|
|
66
|
+
bundle: id,
|
|
61
67
|
folder: entry.name,
|
|
62
68
|
});
|
|
63
69
|
}
|
|
64
70
|
}
|
|
65
71
|
else {
|
|
66
72
|
newSettings.push({
|
|
67
|
-
id
|
|
73
|
+
id,
|
|
68
74
|
enabled: true,
|
|
69
75
|
source: source,
|
|
70
76
|
bundle: null,
|
|
@@ -94,12 +100,12 @@ export const getExtensionsSettings = async ({ local, module, registry, }) => {
|
|
|
94
100
|
await service.extensionsItemService.updateOne(settingsForName.id, { folder });
|
|
95
101
|
continue;
|
|
96
102
|
}
|
|
97
|
-
generateSettingsEntry(folder, extension, 'local');
|
|
103
|
+
await generateSettingsEntry(folder, extension, 'local');
|
|
98
104
|
}
|
|
99
105
|
for (const [folder, extension] of module.entries()) {
|
|
100
106
|
const existingSettings = moduleSettings.find((settings) => settings.folder === folder);
|
|
101
107
|
if (!existingSettings) {
|
|
102
|
-
generateSettingsEntry(folder, extension, 'module');
|
|
108
|
+
await generateSettingsEntry(folder, extension, 'module');
|
|
103
109
|
}
|
|
104
110
|
else if (extension.type === 'bundle') {
|
|
105
111
|
updateBundleEntriesSettings(extension, existingSettings, moduleSettings);
|
|
@@ -108,7 +114,7 @@ export const getExtensionsSettings = async ({ local, module, registry, }) => {
|
|
|
108
114
|
for (const [folder, extension] of registry.entries()) {
|
|
109
115
|
const existingSettings = registrySettings.find((settings) => settings.folder === folder);
|
|
110
116
|
if (!existingSettings) {
|
|
111
|
-
generateSettingsEntry(folder, extension, 'registry');
|
|
117
|
+
await generateSettingsEntry(folder, extension, 'registry');
|
|
112
118
|
}
|
|
113
119
|
else if (extension.type === 'bundle') {
|
|
114
120
|
updateBundleEntriesSettings(extension, existingSettings, registrySettings);
|
|
@@ -169,13 +169,25 @@ export class ExtensionManager {
|
|
|
169
169
|
* Installs an external extension from registry
|
|
170
170
|
*/
|
|
171
171
|
async install(versionId) {
|
|
172
|
+
const logger = useLogger();
|
|
172
173
|
await this.installationManager.install(versionId);
|
|
173
174
|
await this.reload({ forceSync: true });
|
|
175
|
+
emitter.emitAction('extensions.installed', {
|
|
176
|
+
extensions: this.extensions,
|
|
177
|
+
versionId,
|
|
178
|
+
});
|
|
179
|
+
logger.info(`Installed extension: ${versionId}`);
|
|
174
180
|
await this.broadcastReloadNotification();
|
|
175
181
|
}
|
|
176
182
|
async uninstall(folder) {
|
|
183
|
+
const logger = useLogger();
|
|
177
184
|
await this.installationManager.uninstall(folder);
|
|
178
185
|
await this.reload({ forceSync: true });
|
|
186
|
+
emitter.emitAction('extensions.uninstalled', {
|
|
187
|
+
extensions: this.extensions,
|
|
188
|
+
folder,
|
|
189
|
+
});
|
|
190
|
+
logger.info(`Uninstalled extension: ${folder}`);
|
|
179
191
|
await this.broadcastReloadNotification();
|
|
180
192
|
}
|
|
181
193
|
async broadcastReloadNotification() {
|
|
@@ -211,6 +223,10 @@ export class ExtensionManager {
|
|
|
211
223
|
this.appExtensionsBundle = await this.generateExtensionBundle();
|
|
212
224
|
}
|
|
213
225
|
this.isLoaded = true;
|
|
226
|
+
emitter.emitAction('extensions.load', {
|
|
227
|
+
extensions: this.extensions,
|
|
228
|
+
});
|
|
229
|
+
logger.info('Extensions loaded');
|
|
214
230
|
}
|
|
215
231
|
/**
|
|
216
232
|
* Unregister all extensions from the current process
|
|
@@ -220,6 +236,11 @@ export class ExtensionManager {
|
|
|
220
236
|
this.localEmitter.offAll();
|
|
221
237
|
this.appExtensionsBundle = null;
|
|
222
238
|
this.isLoaded = false;
|
|
239
|
+
emitter.emitAction('extensions.unload', {
|
|
240
|
+
extensions: this.extensions,
|
|
241
|
+
});
|
|
242
|
+
const logger = useLogger();
|
|
243
|
+
logger.info('Extensions unloaded');
|
|
223
244
|
}
|
|
224
245
|
/**
|
|
225
246
|
* Reload all the extensions. Will unload if extensions have already been loaded
|
|
@@ -247,6 +268,11 @@ export class ExtensionManager {
|
|
|
247
268
|
this.updateWatchedExtensions(added, removed);
|
|
248
269
|
const addedExtensions = added.map((extension) => extension.name);
|
|
249
270
|
const removedExtensions = removed.map((extension) => extension.name);
|
|
271
|
+
emitter.emitAction('extensions.reload', {
|
|
272
|
+
extensions: this.extensions,
|
|
273
|
+
added: addedExtensions,
|
|
274
|
+
removed: removedExtensions,
|
|
275
|
+
});
|
|
250
276
|
if (addedExtensions.length > 0) {
|
|
251
277
|
logger.info(`Added extensions: ${addedExtensions.join(', ')}`);
|
|
252
278
|
}
|
package/dist/flows.d.ts
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import type { OperationHandler } from '@directus/extensions';
|
|
2
|
+
import type { Accountability, SchemaOverview } from '@directus/types';
|
|
2
3
|
export declare function getFlowManager(): FlowManager;
|
|
3
4
|
declare class FlowManager {
|
|
4
5
|
private isLoaded;
|
|
@@ -14,7 +15,10 @@ declare class FlowManager {
|
|
|
14
15
|
addOperation(id: string, operation: OperationHandler): void;
|
|
15
16
|
removeOperation(id: string): void;
|
|
16
17
|
runOperationFlow(id: string, data: unknown, context: Record<string, unknown>): Promise<unknown>;
|
|
17
|
-
runWebhookFlow(id: string, data: unknown, context:
|
|
18
|
+
runWebhookFlow(id: string, data: unknown, context: {
|
|
19
|
+
schema: SchemaOverview;
|
|
20
|
+
accountability: Accountability | undefined;
|
|
21
|
+
} & Record<string, unknown>): Promise<{
|
|
18
22
|
result: unknown;
|
|
19
23
|
cacheEnabled?: boolean;
|
|
20
24
|
}>;
|
package/dist/flows.js
CHANGED
|
@@ -2,21 +2,23 @@ import { Action } from '@directus/constants';
|
|
|
2
2
|
import { useEnv } from '@directus/env';
|
|
3
3
|
import { ForbiddenError } from '@directus/errors';
|
|
4
4
|
import { isSystemCollection } from '@directus/system-data';
|
|
5
|
-
import { applyOptionsData, getRedactedString, isValidJSON, parseJSON, toArray } from '@directus/utils';
|
|
5
|
+
import { applyOptionsData, deepMap, getRedactedString, isValidJSON, parseJSON, toArray } from '@directus/utils';
|
|
6
6
|
import { pick } from 'lodash-es';
|
|
7
7
|
import { get } from 'micromustache';
|
|
8
8
|
import { useBus } from './bus/index.js';
|
|
9
9
|
import getDatabase from './database/index.js';
|
|
10
10
|
import emitter from './emitter.js';
|
|
11
11
|
import { useLogger } from './logger/index.js';
|
|
12
|
+
import { fetchPermissions } from './permissions/lib/fetch-permissions.js';
|
|
13
|
+
import { fetchPolicies } from './permissions/lib/fetch-policies.js';
|
|
12
14
|
import { ActivityService } from './services/activity.js';
|
|
13
15
|
import { FlowsService } from './services/flows.js';
|
|
14
16
|
import * as services from './services/index.js';
|
|
15
17
|
import { RevisionsService } from './services/revisions.js';
|
|
16
18
|
import { constructFlowTree } from './utils/construct-flow-tree.js';
|
|
17
19
|
import { getSchema } from './utils/get-schema.js';
|
|
20
|
+
import { getService } from './utils/get-service.js';
|
|
18
21
|
import { JobQueue } from './utils/job-queue.js';
|
|
19
|
-
import { mapValuesDeep } from './utils/map-values-deep.js';
|
|
20
22
|
import { redactObject } from './utils/redact-object.js';
|
|
21
23
|
import { scheduleSynchronizedJob, validateCron } from './utils/schedule.js';
|
|
22
24
|
let flowManager;
|
|
@@ -189,7 +191,9 @@ class FlowManager {
|
|
|
189
191
|
else if (flow.trigger === 'manual') {
|
|
190
192
|
const handler = async (data, context) => {
|
|
191
193
|
const enabledCollections = flow.options?.['collections'] ?? [];
|
|
194
|
+
const requireSelection = flow.options?.['requireSelection'] ?? true;
|
|
192
195
|
const targetCollection = data?.['body'].collection;
|
|
196
|
+
const targetKeys = data?.['body'].keys;
|
|
193
197
|
if (!targetCollection) {
|
|
194
198
|
logger.warn(`Manual trigger requires "collection" to be specified in the payload`);
|
|
195
199
|
throw new ForbiddenError();
|
|
@@ -202,6 +206,44 @@ class FlowManager {
|
|
|
202
206
|
logger.warn(`Specified collection must be one of: ${enabledCollections.join(', ')}.`);
|
|
203
207
|
throw new ForbiddenError();
|
|
204
208
|
}
|
|
209
|
+
if (!targetKeys || !Array.isArray(targetKeys)) {
|
|
210
|
+
logger.warn(`Manual trigger requires "keys" to be specified in the payload`);
|
|
211
|
+
throw new ForbiddenError();
|
|
212
|
+
}
|
|
213
|
+
if (requireSelection && targetKeys.length === 0) {
|
|
214
|
+
logger.warn(`Manual trigger requires at least one key to be specified in the payload`);
|
|
215
|
+
throw new ForbiddenError();
|
|
216
|
+
}
|
|
217
|
+
const accountability = context?.['accountability'];
|
|
218
|
+
if (!accountability) {
|
|
219
|
+
logger.warn(`Manual flows are only triggerable when authenticated`);
|
|
220
|
+
throw new ForbiddenError();
|
|
221
|
+
}
|
|
222
|
+
if (accountability.admin === false) {
|
|
223
|
+
const database = context['database'] ?? getDatabase();
|
|
224
|
+
const schema = context['schema'] ?? (await getSchema({ database }));
|
|
225
|
+
const policies = await fetchPolicies(accountability, { schema, knex: database });
|
|
226
|
+
const permissions = await fetchPermissions({
|
|
227
|
+
policies,
|
|
228
|
+
accountability,
|
|
229
|
+
action: 'read',
|
|
230
|
+
collections: [targetCollection],
|
|
231
|
+
}, { schema, knex: database });
|
|
232
|
+
if (permissions.length === 0) {
|
|
233
|
+
logger.warn(`Triggering ${targetCollection} is not allowed`);
|
|
234
|
+
throw new ForbiddenError();
|
|
235
|
+
}
|
|
236
|
+
const service = getService(targetCollection, { schema, accountability, knex: database });
|
|
237
|
+
const primaryField = schema.collections[targetCollection].primary;
|
|
238
|
+
let keys = await service.readMany(targetKeys, { fields: [primaryField] }, {
|
|
239
|
+
emitEvents: false,
|
|
240
|
+
});
|
|
241
|
+
keys = keys.map((key) => key[primaryField]);
|
|
242
|
+
if (targetKeys.some((key) => !keys.includes(key))) {
|
|
243
|
+
logger.warn(`Triggering keys ${targetKeys} is not allowed`);
|
|
244
|
+
throw new ForbiddenError();
|
|
245
|
+
}
|
|
246
|
+
}
|
|
205
247
|
if (flow.options['async']) {
|
|
206
248
|
this.executeFlow(flow, data, context);
|
|
207
249
|
return { result: undefined };
|
|
@@ -321,9 +363,24 @@ class FlowManager {
|
|
|
321
363
|
return { successor: null, status: 'unknown', data: null, options: null };
|
|
322
364
|
}
|
|
323
365
|
const handler = this.operations.get(operation.type);
|
|
366
|
+
let optionData = keyedData;
|
|
367
|
+
if (operation.type === 'log') {
|
|
368
|
+
optionData = redactObject(keyedData, {
|
|
369
|
+
keys: [
|
|
370
|
+
['**', 'headers', 'authorization'],
|
|
371
|
+
['**', 'headers', 'cookie'],
|
|
372
|
+
['**', 'query', 'access_token'],
|
|
373
|
+
['**', 'payload', 'password'],
|
|
374
|
+
['**', 'payload', 'token'],
|
|
375
|
+
['**', 'payload', 'tfa_secret'],
|
|
376
|
+
['**', 'payload', 'external_identifier'],
|
|
377
|
+
['**', 'payload', 'auth_data'],
|
|
378
|
+
],
|
|
379
|
+
}, getRedactedString);
|
|
380
|
+
}
|
|
324
381
|
let options = operation.options;
|
|
325
382
|
try {
|
|
326
|
-
options = applyOptionsData(options,
|
|
383
|
+
options = applyOptionsData(options, optionData);
|
|
327
384
|
let result = await handler(options, {
|
|
328
385
|
services,
|
|
329
386
|
env: useEnv(),
|
|
@@ -339,7 +396,7 @@ class FlowManager {
|
|
|
339
396
|
// JSON structures don't allow for undefined values, so we need to replace them with null
|
|
340
397
|
// Otherwise the applyOptionsData function will not work correctly on the next operation
|
|
341
398
|
if (typeof result === 'object' && result !== null) {
|
|
342
|
-
result =
|
|
399
|
+
result = deepMap(result, (value) => (value === undefined ? null : value));
|
|
343
400
|
}
|
|
344
401
|
return { successor: operation.resolve, status: 'resolve', data: result ?? null, options };
|
|
345
402
|
}
|
|
@@ -4,7 +4,7 @@ import { FailedValidationError, joiValidationErrorItemToErrorExtensions } from '
|
|
|
4
4
|
export default defineOperationApi({
|
|
5
5
|
id: 'condition',
|
|
6
6
|
handler: ({ filter }, { data, accountability }) => {
|
|
7
|
-
const parsedFilter = parseFilter(filter, accountability);
|
|
7
|
+
const parsedFilter = parseFilter(filter, accountability, undefined, true);
|
|
8
8
|
if (!parsedFilter) {
|
|
9
9
|
return null;
|
|
10
10
|
}
|
|
@@ -70,6 +70,8 @@ export async function getPermissionsForShare(accountability, collections, contex
|
|
|
70
70
|
reducedSchema = reduceSchema(context.schema, shareFieldMap);
|
|
71
71
|
reducedSchema = reduceSchema(reducedSchema, userFieldMap);
|
|
72
72
|
}
|
|
73
|
+
if (!isAdmin)
|
|
74
|
+
defaults.fields = permissions.find((perm) => perm.collection === collection)?.fields ?? [];
|
|
73
75
|
const parentPrimaryKeyField = context.schema.collections[collection].primary;
|
|
74
76
|
const relationalPermissions = traverse(reducedSchema, parentPrimaryKeyField, item, collection);
|
|
75
77
|
const parentCollectionPermission = {
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function mergeFields(fieldsA: string[] | null, fieldsB: string[] | null, strategy: 'and' | 'or'): string[];
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import { intersection, union } from 'lodash-es';
|
|
2
|
+
export function mergeFields(fieldsA, fieldsB, strategy) {
|
|
3
|
+
if (fieldsA === null)
|
|
4
|
+
fieldsA = [];
|
|
5
|
+
if (fieldsB === null)
|
|
6
|
+
fieldsB = [];
|
|
7
|
+
let fields = [];
|
|
8
|
+
if (strategy === 'and') {
|
|
9
|
+
if (fieldsA.length === 0 || fieldsB.length === 0)
|
|
10
|
+
return [];
|
|
11
|
+
if (fieldsA.includes('*'))
|
|
12
|
+
return fieldsB;
|
|
13
|
+
if (fieldsB.includes('*'))
|
|
14
|
+
return fieldsA;
|
|
15
|
+
fields = intersection(fieldsA, fieldsB);
|
|
16
|
+
}
|
|
17
|
+
else {
|
|
18
|
+
if (fieldsA.length === 0)
|
|
19
|
+
return fieldsB;
|
|
20
|
+
if (fieldsB.length === 0)
|
|
21
|
+
return fieldsA;
|
|
22
|
+
if (fieldsA.includes('*') || fieldsB.includes('*'))
|
|
23
|
+
return ['*'];
|
|
24
|
+
fields = union(fieldsA, fieldsB);
|
|
25
|
+
}
|
|
26
|
+
if (fields.includes('*'))
|
|
27
|
+
return ['*'];
|
|
28
|
+
return fields;
|
|
29
|
+
}
|
|
@@ -1,4 +1,5 @@
|
|
|
1
|
-
import { flatten,
|
|
1
|
+
import { flatten, isEqual, merge, omit } from 'lodash-es';
|
|
2
|
+
import { mergeFields } from './merge-fields.js';
|
|
2
3
|
// Adapted from https://github.com/directus/directus/blob/141b8adbf4dd8e06530a7929f34e3fc68a522053/api/src/utils/merge-permissions.ts#L4
|
|
3
4
|
/**
|
|
4
5
|
* Merges multiple permission lists into a flat list of unique permissions.
|
|
@@ -92,19 +93,7 @@ export function mergePermission(strategy, currentPerm, newPerm) {
|
|
|
92
93
|
};
|
|
93
94
|
}
|
|
94
95
|
}
|
|
95
|
-
|
|
96
|
-
if (Array.isArray(currentPerm.fields) && strategy === 'or') {
|
|
97
|
-
fields = uniq([...currentPerm.fields, ...newPerm.fields]);
|
|
98
|
-
}
|
|
99
|
-
else if (Array.isArray(currentPerm.fields) && strategy === 'and') {
|
|
100
|
-
fields = intersection(currentPerm.fields, newPerm.fields);
|
|
101
|
-
}
|
|
102
|
-
else {
|
|
103
|
-
fields = newPerm.fields;
|
|
104
|
-
}
|
|
105
|
-
if (fields.includes('*'))
|
|
106
|
-
fields = ['*'];
|
|
107
|
-
}
|
|
96
|
+
fields = mergeFields(currentPerm.fields, newPerm.fields, strategy);
|
|
108
97
|
if (newPerm.presets) {
|
|
109
98
|
presets = merge({}, presets, newPerm.presets);
|
|
110
99
|
}
|
package/dist/services/fields.js
CHANGED
|
@@ -405,7 +405,7 @@ export class FieldsService {
|
|
|
405
405
|
});
|
|
406
406
|
}
|
|
407
407
|
catch (err) {
|
|
408
|
-
throw await translateDatabaseError(err);
|
|
408
|
+
throw await translateDatabaseError(err, field);
|
|
409
409
|
}
|
|
410
410
|
}
|
|
411
411
|
}
|
|
@@ -649,11 +649,11 @@ export class FieldsService {
|
|
|
649
649
|
}
|
|
650
650
|
}
|
|
651
651
|
else if (field.type === 'string') {
|
|
652
|
-
column = table.string(field.field, field.schema?.max_length ?? undefined);
|
|
652
|
+
column = table.string(field.field, field.schema?.max_length ?? existing?.max_length ?? undefined);
|
|
653
653
|
}
|
|
654
654
|
else if (['float', 'decimal'].includes(field.type)) {
|
|
655
655
|
const type = field.type;
|
|
656
|
-
column = table[type](field.field, field.schema?.numeric_precision ?? DEFAULT_NUMERIC_PRECISION, field.schema?.numeric_scale ?? DEFAULT_NUMERIC_SCALE);
|
|
656
|
+
column = table[type](field.field, field.schema?.numeric_precision ?? existing?.numeric_precision ?? DEFAULT_NUMERIC_PRECISION, field.schema?.numeric_scale ?? existing?.numeric_scale ?? DEFAULT_NUMERIC_SCALE);
|
|
657
657
|
}
|
|
658
658
|
else if (field.type === 'csv') {
|
|
659
659
|
column = table.text(field.field);
|
|
@@ -8,7 +8,7 @@ export async function resolveMutation(gql, args, info) {
|
|
|
8
8
|
if (gql.scope === 'system')
|
|
9
9
|
collection = `directus_${collection}`;
|
|
10
10
|
const selections = replaceFragmentsInSelections(info.fieldNodes[0]?.selectionSet?.selections, info.fragments);
|
|
11
|
-
const query = await getQuery(args, selections || [], info.variableValues, gql.
|
|
11
|
+
const query = await getQuery(args, gql.schema, selections || [], info.variableValues, gql.accountability, collection);
|
|
12
12
|
const singleton = collection.endsWith('_batch') === false &&
|
|
13
13
|
collection.endsWith('_items') === false &&
|
|
14
14
|
collection.endsWith('_item') === false &&
|
|
@@ -26,7 +26,7 @@ export async function resolveQuery(gql, info) {
|
|
|
26
26
|
collection = collection.slice(0, -11);
|
|
27
27
|
}
|
|
28
28
|
else {
|
|
29
|
-
query = await getQuery(args, selections, info.variableValues, gql.
|
|
29
|
+
query = await getQuery(args, gql.schema, selections, info.variableValues, gql.accountability, collection);
|
|
30
30
|
if (collection.endsWith('_by_id') && collection in gql.schema.collections === false) {
|
|
31
31
|
collection = collection.slice(0, -6);
|
|
32
32
|
}
|
|
@@ -311,7 +311,7 @@ export function injectSystemResolvers(gql, schemaComposer, { CreateCollectionTyp
|
|
|
311
311
|
return null;
|
|
312
312
|
const service = new UsersService({ schema: gql.schema, accountability: gql.accountability });
|
|
313
313
|
const selections = replaceFragmentsInSelections(info.fieldNodes[0]?.selectionSet?.selections, info.fragments);
|
|
314
|
-
const query = await getQuery(args, selections || [], info.variableValues, gql.
|
|
314
|
+
const query = await getQuery(args, gql.schema, selections || [], info.variableValues, gql.accountability, 'directus_users');
|
|
315
315
|
return await service.readOne(gql.accountability.user, query);
|
|
316
316
|
},
|
|
317
317
|
},
|
|
@@ -349,7 +349,7 @@ export function injectSystemResolvers(gql, schemaComposer, { CreateCollectionTyp
|
|
|
349
349
|
schema: gql.schema,
|
|
350
350
|
});
|
|
351
351
|
const selections = replaceFragmentsInSelections(info.fieldNodes[0]?.selectionSet?.selections, info.fragments);
|
|
352
|
-
const query = await getQuery(args, selections || [], info.variableValues, gql.
|
|
352
|
+
const query = await getQuery(args, gql.schema, selections || [], info.variableValues, gql.accountability, 'directus_roles');
|
|
353
353
|
query.limit = -1;
|
|
354
354
|
const roles = await service.readMany(gql.accountability.roles, query);
|
|
355
355
|
return roles;
|
|
@@ -397,7 +397,7 @@ export function injectSystemResolvers(gql, schemaComposer, { CreateCollectionTyp
|
|
|
397
397
|
await service.updateOne(gql.accountability.user, args['data']);
|
|
398
398
|
if ('directus_users' in ReadCollectionTypes) {
|
|
399
399
|
const selections = replaceFragmentsInSelections(info.fieldNodes[0]?.selectionSet?.selections, info.fragments);
|
|
400
|
-
const query = await getQuery(args, selections || [], info.variableValues, gql.
|
|
400
|
+
const query = await getQuery(args, gql.schema, selections || [], info.variableValues, gql.accountability, 'directus_users');
|
|
401
401
|
return await service.readOne(gql.accountability.user, query);
|
|
402
402
|
}
|
|
403
403
|
return true;
|
|
@@ -421,7 +421,7 @@ export function injectSystemResolvers(gql, schemaComposer, { CreateCollectionTyp
|
|
|
421
421
|
const primaryKey = await service.importOne(args['url'], args['data']);
|
|
422
422
|
if ('directus_files' in ReadCollectionTypes) {
|
|
423
423
|
const selections = replaceFragmentsInSelections(info.fieldNodes[0]?.selectionSet?.selections, info.fragments);
|
|
424
|
-
const query = await getQuery(args, selections || [], info.variableValues, gql.
|
|
424
|
+
const query = await getQuery(args, gql.schema, selections || [], info.variableValues, gql.accountability, 'directus_files');
|
|
425
425
|
return await service.readOne(primaryKey, query);
|
|
426
426
|
}
|
|
427
427
|
return true;
|
|
@@ -4,4 +4,4 @@ import type { GraphQLResolveInfo, SelectionNode } from 'graphql';
|
|
|
4
4
|
* Get a Directus Query object from the parsed arguments (rawQuery) and GraphQL AST selectionSet. Converts SelectionSet into
|
|
5
5
|
* Directus' `fields` query for use in the resolver. Also applies variables where appropriate.
|
|
6
6
|
*/
|
|
7
|
-
export declare function getQuery(rawQuery: Query, selections: readonly SelectionNode[], variableValues: GraphQLResolveInfo['variableValues'],
|
|
7
|
+
export declare function getQuery(rawQuery: Query, schema: SchemaOverview, selections: readonly SelectionNode[], variableValues: GraphQLResolveInfo['variableValues'], accountability?: Accountability | null, collection?: string): Promise<Query>;
|
|
@@ -3,11 +3,12 @@ import { sanitizeQuery } from '../../../utils/sanitize-query.js';
|
|
|
3
3
|
import { validateQuery } from '../../../utils/validate-query.js';
|
|
4
4
|
import { replaceFuncs } from '../utils/replace-funcs.js';
|
|
5
5
|
import { parseArgs } from './parse-args.js';
|
|
6
|
+
import { filterReplaceM2A, filterReplaceM2ADeep } from '../utils/filter-replace-m2a.js';
|
|
6
7
|
/**
|
|
7
8
|
* Get a Directus Query object from the parsed arguments (rawQuery) and GraphQL AST selectionSet. Converts SelectionSet into
|
|
8
9
|
* Directus' `fields` query for use in the resolver. Also applies variables where appropriate.
|
|
9
10
|
*/
|
|
10
|
-
export async function getQuery(rawQuery, selections, variableValues,
|
|
11
|
+
export async function getQuery(rawQuery, schema, selections, variableValues, accountability, collection) {
|
|
11
12
|
const query = await sanitizeQuery(rawQuery, schema, accountability);
|
|
12
13
|
const parseAliases = (selections) => {
|
|
13
14
|
const aliases = {};
|
|
@@ -93,6 +94,12 @@ export async function getQuery(rawQuery, selections, variableValues, schema, acc
|
|
|
93
94
|
if (query.filter)
|
|
94
95
|
query.filter = replaceFuncs(query.filter);
|
|
95
96
|
query.deep = replaceFuncs(query.deep);
|
|
97
|
+
if (collection) {
|
|
98
|
+
if (query.filter) {
|
|
99
|
+
query.filter = filterReplaceM2A(query.filter, collection, schema);
|
|
100
|
+
}
|
|
101
|
+
query.deep = filterReplaceM2ADeep(query.deep, collection, schema);
|
|
102
|
+
}
|
|
96
103
|
validateQuery(query);
|
|
97
104
|
return query;
|
|
98
105
|
}
|
|
@@ -721,14 +721,14 @@ export async function getReadableTypes(gql, schemaComposer, schema, inconsistent
|
|
|
721
721
|
}
|
|
722
722
|
}
|
|
723
723
|
else if (relation.meta?.one_allowed_collections) {
|
|
724
|
-
ReadableCollectionQuantifierFilterTypes[relation.collection]?.removeField(
|
|
725
|
-
ReadableCollectionFilterTypes[relation.collection]?.removeField(
|
|
724
|
+
ReadableCollectionQuantifierFilterTypes[relation.collection]?.removeField(relation.field);
|
|
725
|
+
ReadableCollectionFilterTypes[relation.collection]?.removeField(relation.field);
|
|
726
726
|
for (const collection of relation.meta.one_allowed_collections) {
|
|
727
727
|
ReadableCollectionQuantifierFilterTypes[relation.collection]?.addFields({
|
|
728
|
-
[
|
|
728
|
+
[`${relation.field}__${collection}`]: ReadableCollectionFilterTypes[collection],
|
|
729
729
|
});
|
|
730
730
|
ReadableCollectionFilterTypes[relation.collection]?.addFields({
|
|
731
|
-
[
|
|
731
|
+
[`${relation.field}__${collection}`]: ReadableCollectionFilterTypes[collection],
|
|
732
732
|
});
|
|
733
733
|
}
|
|
734
734
|
}
|
|
@@ -89,7 +89,7 @@ async function parseFields(gql, request) {
|
|
|
89
89
|
}
|
|
90
90
|
return result;
|
|
91
91
|
}, []);
|
|
92
|
-
const { fields } = await getQuery({}, dataSelections, request.variableValues, gql.
|
|
92
|
+
const { fields } = await getQuery({}, gql.schema, dataSelections, request.variableValues, gql.accountability);
|
|
93
93
|
return fields ?? [];
|
|
94
94
|
}
|
|
95
95
|
function parseArguments(request) {
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
import type { Filter, NestedDeepQuery, SchemaOverview } from '@directus/types';
|
|
2
|
+
export declare function filterReplaceM2A(filter_arg: Filter, collection: string, schema: SchemaOverview): any;
|
|
3
|
+
export declare function filterReplaceM2ADeep(deep_arg: NestedDeepQuery | null | undefined, collection: string, schema: SchemaOverview): any;
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import { getRelation } from '@directus/utils';
|
|
2
|
+
import { getRelationType } from '../../../utils/get-relation-type.js';
|
|
3
|
+
export function filterReplaceM2A(filter_arg, collection, schema) {
|
|
4
|
+
const filter = filter_arg;
|
|
5
|
+
for (const key in filter) {
|
|
6
|
+
const [field, any_collection] = key.split('__');
|
|
7
|
+
if (!field)
|
|
8
|
+
continue;
|
|
9
|
+
const relation = getRelation(schema.relations, collection, field);
|
|
10
|
+
const type = relation ? getRelationType({ relation, collection, field }) : null;
|
|
11
|
+
if (type === 'o2m' && relation) {
|
|
12
|
+
filter[key] = filterReplaceM2A(filter[key], relation.collection, schema);
|
|
13
|
+
}
|
|
14
|
+
else if (type === 'm2o' && relation) {
|
|
15
|
+
filter[key] = filterReplaceM2A(filter[key], relation.related_collection, schema);
|
|
16
|
+
}
|
|
17
|
+
else if (type === 'a2o' &&
|
|
18
|
+
relation &&
|
|
19
|
+
any_collection &&
|
|
20
|
+
relation.meta?.one_allowed_collections?.includes(any_collection)) {
|
|
21
|
+
filter[`${field}:${any_collection}`] = filterReplaceM2A(filter[key], any_collection, schema);
|
|
22
|
+
delete filter[key];
|
|
23
|
+
}
|
|
24
|
+
else if (Array.isArray(filter[key])) {
|
|
25
|
+
filter[key] = filter[key].map((item) => filterReplaceM2A(item, collection, schema));
|
|
26
|
+
}
|
|
27
|
+
else if (typeof filter[key] === 'object') {
|
|
28
|
+
filter[key] = filterReplaceM2A(filter[key], collection, schema);
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
return filter;
|
|
32
|
+
}
|
|
33
|
+
export function filterReplaceM2ADeep(deep_arg, collection, schema) {
|
|
34
|
+
const deep = deep_arg;
|
|
35
|
+
for (const key in deep) {
|
|
36
|
+
if (key.startsWith('_') === false) {
|
|
37
|
+
const [field, any_collection] = key.split('__');
|
|
38
|
+
if (!field)
|
|
39
|
+
continue;
|
|
40
|
+
const relation = getRelation(schema.relations, collection, field);
|
|
41
|
+
if (!relation)
|
|
42
|
+
continue;
|
|
43
|
+
const type = getRelationType({ relation, collection, field });
|
|
44
|
+
if (type === 'o2m') {
|
|
45
|
+
deep[key] = filterReplaceM2ADeep(deep[key], relation.collection, schema);
|
|
46
|
+
}
|
|
47
|
+
else if (type === 'm2o') {
|
|
48
|
+
deep[key] = filterReplaceM2ADeep(deep[key], relation.related_collection, schema);
|
|
49
|
+
}
|
|
50
|
+
else if (type === 'a2o' && any_collection && relation.meta?.one_allowed_collections?.includes(any_collection)) {
|
|
51
|
+
deep[key] = filterReplaceM2ADeep(deep[key], any_collection, schema);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
if (key === '_filter') {
|
|
55
|
+
deep[key] = filterReplaceM2A(deep[key], collection, schema);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
return deep;
|
|
59
|
+
}
|
package/dist/services/items.js
CHANGED
|
@@ -186,7 +186,7 @@ export class ItemsService {
|
|
|
186
186
|
}
|
|
187
187
|
}
|
|
188
188
|
catch (err) {
|
|
189
|
-
const dbError = await translateDatabaseError(err);
|
|
189
|
+
const dbError = await translateDatabaseError(err, data);
|
|
190
190
|
if (isDirectusError(dbError, ErrorCode.RecordNotUnique) && dbError.extensions.primaryKey) {
|
|
191
191
|
// This is a MySQL specific thing we need to handle here, since MySQL does not return the field name
|
|
192
192
|
// if the unique constraint is the primary key
|
|
@@ -590,7 +590,7 @@ export class ItemsService {
|
|
|
590
590
|
await trx(this.collection).update(payloadWithTypeCasting).whereIn(primaryKeyField, keys);
|
|
591
591
|
}
|
|
592
592
|
catch (err) {
|
|
593
|
-
throw await translateDatabaseError(err);
|
|
593
|
+
throw await translateDatabaseError(err, data);
|
|
594
594
|
}
|
|
595
595
|
}
|
|
596
596
|
const childrenRevisions = [...revisionsM2O, ...revisionsA2O];
|
package/dist/services/payload.js
CHANGED
|
@@ -285,7 +285,7 @@ export class PayloadService {
|
|
|
285
285
|
payload[name] = newValue;
|
|
286
286
|
}
|
|
287
287
|
if (dateColumn.type === 'dateTime') {
|
|
288
|
-
const year = String(value.getFullYear());
|
|
288
|
+
const year = String(value.getFullYear()).padStart(4, '0');
|
|
289
289
|
const month = String(value.getMonth() + 1).padStart(2, '0');
|
|
290
290
|
const day = String(value.getDate()).padStart(2, '0');
|
|
291
291
|
const hours = String(value.getHours()).padStart(2, '0');
|
|
@@ -295,7 +295,7 @@ export class PayloadService {
|
|
|
295
295
|
payload[name] = newValue;
|
|
296
296
|
}
|
|
297
297
|
if (dateColumn.type === 'date') {
|
|
298
|
-
const year = String(value.getFullYear());
|
|
298
|
+
const year = String(value.getFullYear()).padStart(4, '0');
|
|
299
299
|
const month = String(value.getMonth() + 1).padStart(2, '0');
|
|
300
300
|
const day = String(value.getDate()).padStart(2, '0');
|
|
301
301
|
// Strip off the time / timezone information from a date-only value
|
|
@@ -16,7 +16,7 @@ export declare class RelationsService {
|
|
|
16
16
|
helpers: Helpers;
|
|
17
17
|
constructor(options: AbstractServiceOptions);
|
|
18
18
|
foreignKeys(collection?: string): Promise<ForeignKey[]>;
|
|
19
|
-
readAll(collection?: string, opts?: QueryOptions): Promise<Relation[]>;
|
|
19
|
+
readAll(collection?: string, opts?: QueryOptions, bypassCache?: boolean): Promise<Relation[]>;
|
|
20
20
|
readOne(collection: string, field: string): Promise<Relation>;
|
|
21
21
|
/**
|
|
22
22
|
* Create a new relationship / foreign key constraint
|