@directus/api 9.26.0 → 10.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/__mocks__/cache.d.mts +5 -0
- package/dist/__mocks__/cache.mjs +7 -0
- package/dist/__utils__/items-utils.d.ts +2 -0
- package/dist/__utils__/items-utils.js +31 -0
- package/dist/__utils__/schemas.d.ts +13 -0
- package/dist/__utils__/schemas.js +301 -0
- package/dist/__utils__/snapshots.d.ts +5 -0
- package/dist/__utils__/snapshots.js +894 -0
- package/dist/app.js +17 -0
- package/dist/auth/drivers/oauth2.js +1 -1
- package/dist/auth/drivers/openid.js +1 -1
- package/dist/cli/commands/bootstrap/index.js +1 -0
- package/dist/controllers/flows.js +4 -1
- package/dist/controllers/schema.js +20 -16
- package/dist/controllers/utils.js +4 -2
- package/dist/database/migrations/run.js +3 -2
- package/dist/env.js +7 -0
- package/dist/extensions.js +4 -5
- package/dist/flows.d.ts +4 -1
- package/dist/flows.js +12 -10
- package/dist/services/collections.js +2 -1
- package/dist/services/graphql/index.js +4 -2
- package/dist/services/items.js +1 -1
- package/dist/services/permissions.js +18 -0
- package/dist/utils/apply-query.js +12 -2
- package/dist/utils/sanitize-error.d.ts +1 -0
- package/dist/utils/sanitize-error.js +7 -0
- package/dist/utils/sanitize-query.js +1 -1
- package/dist/utils/transformations.js +5 -3
- package/license +107 -0
- package/package.json +22 -23
- package/LICENSE +0 -674
- package/dist/app.test.d.ts +0 -1
- package/dist/controllers/files.test.d.ts +0 -1
- package/dist/database/migrations/run.test.d.ts +0 -1
- package/dist/env.test.d.ts +0 -1
- package/dist/logger.test.d.ts +0 -1
- package/dist/middleware/authenticate.test.d.ts +0 -1
- package/dist/middleware/extract-token.test.d.ts +0 -1
- package/dist/middleware/validate-batch.test.d.ts +0 -1
- package/dist/operations/condition/index.test.d.ts +0 -1
- package/dist/operations/exec/index.test.d.ts +0 -1
- package/dist/operations/item-create/index.test.d.ts +0 -1
- package/dist/operations/item-delete/index.test.d.ts +0 -1
- package/dist/operations/item-read/index.test.d.ts +0 -1
- package/dist/operations/item-update/index.test.d.ts +0 -1
- package/dist/operations/log/index.test.d.ts +0 -1
- package/dist/operations/mail/index.test.d.ts +0 -1
- package/dist/operations/notification/index.test.d.ts +0 -1
- package/dist/operations/request/index.test.d.ts +0 -1
- package/dist/operations/sleep/index.test.d.ts +0 -1
- package/dist/operations/transform/index.test.d.ts +0 -1
- package/dist/operations/trigger/index.test.d.ts +0 -1
- package/dist/request/index.test.d.ts +0 -1
- package/dist/request/request-interceptor.test.d.ts +0 -1
- package/dist/request/response-interceptor.test.d.ts +0 -1
- package/dist/request/validate-ip.test.d.ts +0 -1
- package/dist/services/files.test.d.ts +0 -1
- package/dist/services/graphql/utils/process-error.test.d.ts +0 -1
- package/dist/services/items.test.d.ts +0 -1
- package/dist/services/payload.test.d.ts +0 -1
- package/dist/services/roles.test.d.ts +0 -1
- package/dist/services/schema.test.d.ts +0 -1
- package/dist/services/specifications.test.d.ts +0 -1
- package/dist/services/users.test.d.ts +0 -1
- package/dist/services/webhooks.test.d.ts +0 -1
- package/dist/storage/get-storage-driver.test.d.ts +0 -1
- package/dist/storage/index.test.d.ts +0 -1
- package/dist/storage/register-drivers.test.d.ts +0 -1
- package/dist/storage/register-locations.test.d.ts +0 -1
- package/dist/utils/apply-diff.test.d.ts +0 -1
- package/dist/utils/apply-function-to-column-name.test.d.ts +0 -1
- package/dist/utils/apply-snapshot.test.d.ts +0 -1
- package/dist/utils/async-handler.test.d.ts +0 -1
- package/dist/utils/calculate-field-depth.test.d.ts +0 -1
- package/dist/utils/filter-items.test.d.ts +0 -1
- package/dist/utils/get-auth-providers.test.d.ts +0 -1
- package/dist/utils/get-cache-headers.test.d.ts +0 -1
- package/dist/utils/get-cache-key.test.d.ts +0 -1
- package/dist/utils/get-collection-from-alias.test.d.ts +0 -1
- package/dist/utils/get-column-path.test.d.ts +0 -1
- package/dist/utils/get-config-from-env.test.d.ts +0 -1
- package/dist/utils/get-date-formatted.test.d.ts +0 -1
- package/dist/utils/get-graphql-query-and-variables.test.d.ts +0 -1
- package/dist/utils/get-milliseconds.test.d.ts +0 -1
- package/dist/utils/get-relation-info.test.d.ts +0 -1
- package/dist/utils/get-relation-type.test.d.ts +0 -1
- package/dist/utils/get-string-byte-size.test.d.ts +0 -1
- package/dist/utils/get-versioned-hash.test.d.ts +0 -1
- package/dist/utils/is-directus-jwt.test.d.ts +0 -1
- package/dist/utils/jwt.test.d.ts +0 -1
- package/dist/utils/map-values-deep.test.d.ts +0 -1
- package/dist/utils/md.test.d.ts +0 -1
- package/dist/utils/merge-permissions.test.d.ts +0 -1
- package/dist/utils/sanitize-query.test.d.ts +0 -1
- package/dist/utils/sanitize-schema.test.d.ts +0 -1
- package/dist/utils/should-skip-cache.test.d.ts +0 -1
- package/dist/utils/stall.test.d.ts +0 -1
- package/dist/utils/strip-function.test.d.ts +0 -1
- package/dist/utils/url.test.d.ts +0 -1
- package/dist/utils/user-name.test.d.ts +0 -1
- package/dist/utils/validate-diff.test.d.ts +0 -1
- package/dist/utils/validate-env.test.d.ts +0 -1
- package/dist/utils/validate-keys.test.d.ts +0 -1
- package/dist/utils/validate-query.test.d.ts +0 -1
- package/dist/utils/validate-snapshot.test.d.ts +0 -1
package/dist/app.js
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { handlePressure } from '@directus/pressure';
|
|
1
2
|
import cookieParser from 'cookie-parser';
|
|
2
3
|
import express from 'express';
|
|
3
4
|
import { merge } from 'lodash-es';
|
|
@@ -38,6 +39,7 @@ import { isInstalled, validateDatabaseConnection, validateDatabaseExtensions, va
|
|
|
38
39
|
import emitter from './emitter.js';
|
|
39
40
|
import env from './env.js';
|
|
40
41
|
import { InvalidPayloadException } from './exceptions/invalid-payload.js';
|
|
42
|
+
import { ServiceUnavailableException } from './exceptions/service-unavailable.js';
|
|
41
43
|
import { getExtensionManager } from './extensions.js';
|
|
42
44
|
import { getFlowManager } from './flows.js';
|
|
43
45
|
import logger, { expressLogger } from './logger.js';
|
|
@@ -84,6 +86,21 @@ export default async function createApp() {
|
|
|
84
86
|
app.disable('x-powered-by');
|
|
85
87
|
app.set('trust proxy', env['IP_TRUST_PROXY']);
|
|
86
88
|
app.set('query parser', (str) => qs.parse(str, { depth: 10 }));
|
|
89
|
+
if (env['PRESSURE_LIMITER_ENABLED']) {
|
|
90
|
+
const sampleInterval = Number(env['PRESSURE_LIMITER_SAMPLE_INTERVAL']);
|
|
91
|
+
if (Number.isNaN(sampleInterval) === true || Number.isFinite(sampleInterval) === false) {
|
|
92
|
+
throw new Error(`Invalid value for PRESSURE_LIMITER_SAMPLE_INTERVAL environment variable`);
|
|
93
|
+
}
|
|
94
|
+
app.use(handlePressure({
|
|
95
|
+
sampleInterval,
|
|
96
|
+
maxEventLoopUtilization: env['PRESSURE_LIMITER_MAX_EVENT_LOOP_UTILIZATION'],
|
|
97
|
+
maxEventLoopDelay: env['PRESSURE_LIMITER_MAX_EVENT_LOOP_DELAY'],
|
|
98
|
+
maxMemoryRss: env['PRESSURE_LIMITER_MAX_MEMORY_RSS'],
|
|
99
|
+
maxMemoryHeapUsed: env['PRESSURE_LIMITER_MAX_MEMORY_HEAP_USED'],
|
|
100
|
+
error: new ServiceUnavailableException('Under pressure', { service: 'api' }),
|
|
101
|
+
retryAfter: env['PRESSURE_LIMITER_RETRY_AFTER'],
|
|
102
|
+
}));
|
|
103
|
+
}
|
|
87
104
|
app.use(helmet.contentSecurityPolicy(merge({
|
|
88
105
|
useDefaults: true,
|
|
89
106
|
directives: {
|
|
@@ -117,7 +117,7 @@ export class OAuth2AuthDriver extends LocalAuthDriver {
|
|
|
117
117
|
if (userId) {
|
|
118
118
|
// Run hook so the end user has the chance to augment the
|
|
119
119
|
// user that is about to be updated
|
|
120
|
-
const updatedUserPayload = await emitter.emitFilter(`auth.update`, {}, {
|
|
120
|
+
const updatedUserPayload = await emitter.emitFilter(`auth.update`, { auth_data: userPayload.auth_data }, {
|
|
121
121
|
identifier,
|
|
122
122
|
provider: this.config['provider'],
|
|
123
123
|
providerPayload: { accessToken: tokenSet.access_token, userInfo },
|
|
@@ -136,7 +136,7 @@ export class OpenIDAuthDriver extends LocalAuthDriver {
|
|
|
136
136
|
if (userId) {
|
|
137
137
|
// Run hook so the end user has the chance to augment the
|
|
138
138
|
// user that is about to be updated
|
|
139
|
-
const updatedUserPayload = await emitter.emitFilter(`auth.update`, {}, {
|
|
139
|
+
const updatedUserPayload = await emitter.emitFilter(`auth.update`, { auth_data: userPayload.auth_data }, {
|
|
140
140
|
identifier,
|
|
141
141
|
provider: this.config['provider'],
|
|
142
142
|
providerPayload: { accessToken: tokenSet.access_token, userInfo },
|
|
@@ -13,7 +13,7 @@ const router = express.Router();
|
|
|
13
13
|
router.use(useCollection('directus_flows'));
|
|
14
14
|
const webhookFlowHandler = asyncHandler(async (req, res, next) => {
|
|
15
15
|
const flowManager = getFlowManager();
|
|
16
|
-
const result = await flowManager.runWebhookFlow(`${req.method}-${req.params['pk']}`, {
|
|
16
|
+
const { result, cacheEnabled } = await flowManager.runWebhookFlow(`${req.method}-${req.params['pk']}`, {
|
|
17
17
|
path: req.path,
|
|
18
18
|
query: req.query,
|
|
19
19
|
body: req.body,
|
|
@@ -23,6 +23,9 @@ const webhookFlowHandler = asyncHandler(async (req, res, next) => {
|
|
|
23
23
|
accountability: req.accountability,
|
|
24
24
|
schema: req.schema,
|
|
25
25
|
});
|
|
26
|
+
if (!cacheEnabled) {
|
|
27
|
+
res.locals['cache'] = false;
|
|
28
|
+
}
|
|
26
29
|
res.locals['payload'] = result;
|
|
27
30
|
return next();
|
|
28
31
|
});
|
|
@@ -15,20 +15,17 @@ router.get('/snapshot', asyncHandler(async (req, res, next) => {
|
|
|
15
15
|
res.locals['payload'] = { data: currentSnapshot };
|
|
16
16
|
return next();
|
|
17
17
|
}), respond);
|
|
18
|
-
router.post('/apply', asyncHandler(async (req, _res, next) => {
|
|
19
|
-
const service = new SchemaService({ accountability: req.accountability });
|
|
20
|
-
await service.apply(req.body);
|
|
21
|
-
return next();
|
|
22
|
-
}), respond);
|
|
23
18
|
const schemaMultipartHandler = (req, res, next) => {
|
|
24
19
|
if (req.is('application/json')) {
|
|
25
|
-
if (Object.keys(req.body).length === 0)
|
|
20
|
+
if (Object.keys(req.body).length === 0) {
|
|
26
21
|
throw new InvalidPayloadException(`No data was included in the body`);
|
|
27
|
-
|
|
22
|
+
}
|
|
23
|
+
res.locals['upload'] = req.body;
|
|
28
24
|
return next();
|
|
29
25
|
}
|
|
30
|
-
if (!req.is('multipart/form-data'))
|
|
26
|
+
if (!req.is('multipart/form-data')) {
|
|
31
27
|
throw new UnsupportedMediaTypeException(`Unsupported Content-Type header`);
|
|
28
|
+
}
|
|
32
29
|
const headers = req.headers['content-type']
|
|
33
30
|
? req.headers
|
|
34
31
|
: {
|
|
@@ -37,7 +34,7 @@ const schemaMultipartHandler = (req, res, next) => {
|
|
|
37
34
|
};
|
|
38
35
|
const busboy = Busboy({ headers });
|
|
39
36
|
let isFileIncluded = false;
|
|
40
|
-
let
|
|
37
|
+
let upload = null;
|
|
41
38
|
busboy.on('file', async (_, fileStream, { mimeType }) => {
|
|
42
39
|
if (isFileIncluded)
|
|
43
40
|
return next(new InvalidPayloadException(`More than one file was included in the body`));
|
|
@@ -47,25 +44,26 @@ const schemaMultipartHandler = (req, res, next) => {
|
|
|
47
44
|
const uploadedString = await readableStreamToString(fileStream);
|
|
48
45
|
if (mimeType === 'application/json') {
|
|
49
46
|
try {
|
|
50
|
-
|
|
47
|
+
upload = parseJSON(uploadedString);
|
|
51
48
|
}
|
|
52
49
|
catch (err) {
|
|
53
50
|
logger.warn(err);
|
|
54
|
-
throw new InvalidPayloadException('
|
|
51
|
+
throw new InvalidPayloadException('The provided JSON is invalid.');
|
|
55
52
|
}
|
|
56
53
|
}
|
|
57
54
|
else {
|
|
58
55
|
try {
|
|
59
|
-
|
|
56
|
+
upload = await loadYaml(uploadedString);
|
|
60
57
|
}
|
|
61
58
|
catch (err) {
|
|
62
59
|
logger.warn(err);
|
|
63
|
-
throw new InvalidPayloadException('
|
|
60
|
+
throw new InvalidPayloadException('The provided YAML is invalid.');
|
|
64
61
|
}
|
|
65
62
|
}
|
|
66
|
-
if (!
|
|
63
|
+
if (!upload) {
|
|
67
64
|
throw new InvalidPayloadException(`No file was included in the body`);
|
|
68
|
-
|
|
65
|
+
}
|
|
66
|
+
res.locals['upload'] = upload;
|
|
69
67
|
return next();
|
|
70
68
|
}
|
|
71
69
|
catch (error) {
|
|
@@ -81,7 +79,7 @@ const schemaMultipartHandler = (req, res, next) => {
|
|
|
81
79
|
};
|
|
82
80
|
router.post('/diff', asyncHandler(schemaMultipartHandler), asyncHandler(async (req, res, next) => {
|
|
83
81
|
const service = new SchemaService({ accountability: req.accountability });
|
|
84
|
-
const snapshot = res.locals['
|
|
82
|
+
const snapshot = res.locals['upload'];
|
|
85
83
|
const currentSnapshot = await service.snapshot();
|
|
86
84
|
const snapshotDiff = await service.diff(snapshot, { currentSnapshot, force: 'force' in req.query });
|
|
87
85
|
if (!snapshotDiff)
|
|
@@ -90,4 +88,10 @@ router.post('/diff', asyncHandler(schemaMultipartHandler), asyncHandler(async (r
|
|
|
90
88
|
res.locals['payload'] = { data: { hash: currentSnapshotHash, diff: snapshotDiff } };
|
|
91
89
|
return next();
|
|
92
90
|
}), respond);
|
|
91
|
+
router.post('/apply', asyncHandler(schemaMultipartHandler), asyncHandler(async (req, res, next) => {
|
|
92
|
+
const service = new SchemaService({ accountability: req.accountability });
|
|
93
|
+
const diff = res.locals['upload'];
|
|
94
|
+
await service.apply(diff);
|
|
95
|
+
return next();
|
|
96
|
+
}), respond);
|
|
93
97
|
export default router;
|
|
@@ -15,8 +15,9 @@ import { sanitizeQuery } from '../utils/sanitize-query.js';
|
|
|
15
15
|
const router = Router();
|
|
16
16
|
router.get('/random/string', asyncHandler(async (req, res) => {
|
|
17
17
|
const { nanoid } = await import('nanoid');
|
|
18
|
-
if (req.query && req.query['length'] && Number(req.query['length']) > 500)
|
|
18
|
+
if (req.query && req.query['length'] && Number(req.query['length']) > 500) {
|
|
19
19
|
throw new InvalidQueryException(`"length" can't be more than 500 characters`);
|
|
20
|
+
}
|
|
20
21
|
const string = nanoid(req.query?.['length'] ? Number(req.query['length']) : 32);
|
|
21
22
|
return res.json({ data: string });
|
|
22
23
|
}));
|
|
@@ -61,8 +62,9 @@ router.post('/revert/:revision', asyncHandler(async (req, _res, next) => {
|
|
|
61
62
|
next();
|
|
62
63
|
}), respond);
|
|
63
64
|
router.post('/import/:collection', collectionExists, asyncHandler(async (req, res, next) => {
|
|
64
|
-
if (req.is('multipart/form-data') === false)
|
|
65
|
+
if (req.is('multipart/form-data') === false) {
|
|
65
66
|
throw new UnsupportedMediaTypeException(`Unsupported Content-Type header`);
|
|
67
|
+
}
|
|
66
68
|
const service = new ImportService({
|
|
67
69
|
accountability: req.accountability,
|
|
68
70
|
schema: req.schema,
|
|
@@ -7,13 +7,14 @@ import path from 'path';
|
|
|
7
7
|
import { flushCaches } from '../../cache.js';
|
|
8
8
|
import env from '../../env.js';
|
|
9
9
|
import logger from '../../logger.js';
|
|
10
|
+
import getModuleDefault from '../../utils/get-module-default.js';
|
|
10
11
|
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
11
12
|
export default async function run(database, direction, log = true) {
|
|
12
13
|
let migrationFiles = await fse.readdir(__dirname);
|
|
13
14
|
const customMigrationsPath = path.resolve(env['EXTENSIONS_PATH'], 'migrations');
|
|
14
15
|
let customMigrationFiles = ((await fse.pathExists(customMigrationsPath)) && (await fse.readdir(customMigrationsPath))) || [];
|
|
15
16
|
migrationFiles = migrationFiles.filter((file) => /^[0-9]+[A-Z]-[^.]+\.(?:js|ts)$/.test(file));
|
|
16
|
-
customMigrationFiles = customMigrationFiles.filter((file) =>
|
|
17
|
+
customMigrationFiles = customMigrationFiles.filter((file) => /\.(c|m)?js$/.test(file));
|
|
17
18
|
const completedMigrations = await database.select('*').from('directus_migrations').orderBy('version');
|
|
18
19
|
const migrations = [
|
|
19
20
|
...migrationFiles.map((path) => parseFilePath(path)),
|
|
@@ -84,7 +85,7 @@ export default async function run(database, direction, log = true) {
|
|
|
84
85
|
for (const migration of migrations) {
|
|
85
86
|
if (migration.completed === false) {
|
|
86
87
|
needsCacheFlush = true;
|
|
87
|
-
const { up } = await import(`file://${migration.file}`);
|
|
88
|
+
const { up } = getModuleDefault(await import(`file://${migration.file}`));
|
|
88
89
|
if (log) {
|
|
89
90
|
logger.info(`Applying ${migration.name}...`);
|
|
90
91
|
}
|
package/dist/env.js
CHANGED
|
@@ -262,6 +262,13 @@ const defaults = {
|
|
|
262
262
|
GRAPHQL_INTROSPECTION: true,
|
|
263
263
|
FLOWS_EXEC_ALLOWED_MODULES: false,
|
|
264
264
|
FLOWS_ENV_ALLOW_LIST: false,
|
|
265
|
+
PRESSURE_LIMITER_ENABLED: true,
|
|
266
|
+
PRESSURE_LIMITER_SAMPLE_INTERVAL: 250,
|
|
267
|
+
PRESSURE_LIMITER_MAX_EVENT_LOOP_UTILIZATION: 0.99,
|
|
268
|
+
PRESSURE_LIMITER_MAX_EVENT_LOOP_DELAY: 500,
|
|
269
|
+
PRESSURE_LIMITER_MAX_MEMORY_RSS: false,
|
|
270
|
+
PRESSURE_LIMITER_MAX_MEMORY_HEAP_USED: false,
|
|
271
|
+
PRESSURE_LIMITER_RETRY_AFTER: false,
|
|
265
272
|
};
|
|
266
273
|
// Allows us to force certain environment variable into a type, instead of relying
|
|
267
274
|
// on the auto-parsed type in processValues. ref #3705
|
package/dist/extensions.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { APP_EXTENSION_TYPES, APP_SHARED_DEPS, HYBRID_EXTENSION_TYPES, NESTED_EXTENSION_TYPES, } from '@directus/constants';
|
|
1
|
+
import { APP_EXTENSION_TYPES, APP_SHARED_DEPS, HYBRID_EXTENSION_TYPES, JAVASCRIPT_FILE_EXTS, NESTED_EXTENSION_TYPES, } from '@directus/constants';
|
|
2
2
|
import * as sharedExceptions from '@directus/exceptions';
|
|
3
3
|
import { isIn, isTypeIn, pluralize } from '@directus/utils';
|
|
4
4
|
import { ensureExtensionDirs, generateExtensionsEntrypoint, getLocalExtensions, getPackageExtensions, pathToRelativeUrl, resolvePackage, resolvePackageExtensions, } from '@directus/utils/node';
|
|
@@ -201,15 +201,14 @@ class ExtensionManager {
|
|
|
201
201
|
const extensionDirUrl = pathToRelativeUrl(env['EXTENSIONS_PATH']);
|
|
202
202
|
const localExtensionUrls = NESTED_EXTENSION_TYPES.flatMap((type) => {
|
|
203
203
|
const typeDir = path.posix.join(extensionDirUrl, pluralize(type));
|
|
204
|
-
const fileExts = ['js', 'mjs', 'cjs'];
|
|
205
204
|
if (isIn(type, HYBRID_EXTENSION_TYPES)) {
|
|
206
205
|
return [
|
|
207
|
-
path.posix.join(typeDir, '*', `app.{${
|
|
208
|
-
path.posix.join(typeDir, '*', `api.{${
|
|
206
|
+
path.posix.join(typeDir, '*', `app.{${JAVASCRIPT_FILE_EXTS.join()}}`),
|
|
207
|
+
path.posix.join(typeDir, '*', `api.{${JAVASCRIPT_FILE_EXTS.join()}}`),
|
|
209
208
|
];
|
|
210
209
|
}
|
|
211
210
|
else {
|
|
212
|
-
return path.posix.join(typeDir, '*', `index.{${
|
|
211
|
+
return path.posix.join(typeDir, '*', `index.{${JAVASCRIPT_FILE_EXTS.join()}}`);
|
|
213
212
|
}
|
|
214
213
|
});
|
|
215
214
|
this.watcher = chokidar.watch([path.resolve('package.json'), path.posix.join(extensionDirUrl, '*', 'package.json'), ...localExtensionUrls], {
|
package/dist/flows.d.ts
CHANGED
|
@@ -13,7 +13,10 @@ declare class FlowManager {
|
|
|
13
13
|
addOperation(id: string, operation: OperationHandler): void;
|
|
14
14
|
clearOperations(): void;
|
|
15
15
|
runOperationFlow(id: string, data: unknown, context: Record<string, unknown>): Promise<unknown>;
|
|
16
|
-
runWebhookFlow(id: string, data: unknown, context: Record<string, unknown>): Promise<
|
|
16
|
+
runWebhookFlow(id: string, data: unknown, context: Record<string, unknown>): Promise<{
|
|
17
|
+
result: unknown;
|
|
18
|
+
cacheEnabled: boolean;
|
|
19
|
+
}>;
|
|
17
20
|
private load;
|
|
18
21
|
private unload;
|
|
19
22
|
private executeFlow;
|
package/dist/flows.js
CHANGED
|
@@ -9,7 +9,6 @@ import getDatabase from './database/index.js';
|
|
|
9
9
|
import emitter from './emitter.js';
|
|
10
10
|
import env from './env.js';
|
|
11
11
|
import * as exceptions from './exceptions/index.js';
|
|
12
|
-
import { BaseException } from '@directus/exceptions';
|
|
13
12
|
import logger from './logger.js';
|
|
14
13
|
import { getMessenger } from './messenger.js';
|
|
15
14
|
import { ActivityService } from './services/activity.js';
|
|
@@ -20,6 +19,7 @@ import { constructFlowTree } from './utils/construct-flow-tree.js';
|
|
|
20
19
|
import { getSchema } from './utils/get-schema.js';
|
|
21
20
|
import { JobQueue } from './utils/job-queue.js';
|
|
22
21
|
import { mapValuesDeep } from './utils/map-values-deep.js';
|
|
22
|
+
import { sanitizeError } from './utils/sanitize-error.js';
|
|
23
23
|
let flowManager;
|
|
24
24
|
const redactLogs = fastRedact({
|
|
25
25
|
censor: '--redacted--',
|
|
@@ -167,16 +167,20 @@ class FlowManager {
|
|
|
167
167
|
this.operationFlowHandlers[flow.id] = handler;
|
|
168
168
|
}
|
|
169
169
|
else if (flow.trigger === 'webhook') {
|
|
170
|
-
const
|
|
170
|
+
const method = flow.options?.['method'] ?? 'GET';
|
|
171
|
+
const handler = async (data, context) => {
|
|
172
|
+
let cacheEnabled = true;
|
|
173
|
+
if (method === 'GET') {
|
|
174
|
+
cacheEnabled = flow.options['cacheEnabled'] !== false;
|
|
175
|
+
}
|
|
171
176
|
if (flow.options['async']) {
|
|
172
177
|
this.executeFlow(flow, data, context);
|
|
173
|
-
return undefined;
|
|
178
|
+
return { result: undefined, cacheEnabled };
|
|
174
179
|
}
|
|
175
180
|
else {
|
|
176
|
-
return this.executeFlow(flow, data, context);
|
|
181
|
+
return { result: await this.executeFlow(flow, data, context), cacheEnabled };
|
|
177
182
|
}
|
|
178
183
|
};
|
|
179
|
-
const method = flow.options?.['method'] ?? 'GET';
|
|
180
184
|
// Default return to $last for webhooks
|
|
181
185
|
flow.options['return'] = flow.options['return'] ?? '$last';
|
|
182
186
|
this.webhookFlowHandlers[`${method}-${flow.id}`] = handler;
|
|
@@ -325,11 +329,9 @@ class FlowManager {
|
|
|
325
329
|
}
|
|
326
330
|
catch (error) {
|
|
327
331
|
let data;
|
|
328
|
-
if (error instanceof
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
else if (error instanceof Error) {
|
|
332
|
-
data = { message: error.message };
|
|
332
|
+
if (error instanceof Error) {
|
|
333
|
+
// make sure we dont expose the stack trace
|
|
334
|
+
data = sanitizeError(error);
|
|
333
335
|
}
|
|
334
336
|
else if (typeof error === 'string') {
|
|
335
337
|
// If the error is a JSON string, parse it and use that as the error data
|
|
@@ -60,7 +60,7 @@ export class CollectionsService {
|
|
|
60
60
|
// Directus heavily relies on the primary key of a collection, so we have to make sure that
|
|
61
61
|
// every collection that is created has a primary key. If no primary key field is created
|
|
62
62
|
// while making the collection, we default to an auto incremented id named `id`
|
|
63
|
-
if (!payload.fields)
|
|
63
|
+
if (!payload.fields) {
|
|
64
64
|
payload.fields = [
|
|
65
65
|
{
|
|
66
66
|
field: 'id',
|
|
@@ -76,6 +76,7 @@ export class CollectionsService {
|
|
|
76
76
|
},
|
|
77
77
|
},
|
|
78
78
|
];
|
|
79
|
+
}
|
|
79
80
|
// Ensure that every field meta has the field/collection fields filled correctly
|
|
80
81
|
payload.fields = payload.fields.map((field) => {
|
|
81
82
|
if (field.meta) {
|
|
@@ -98,8 +98,9 @@ export class GraphQLService {
|
|
|
98
98
|
const formattedResult = {};
|
|
99
99
|
if (result['data'])
|
|
100
100
|
formattedResult.data = result['data'];
|
|
101
|
-
if (result['errors'])
|
|
101
|
+
if (result['errors']) {
|
|
102
102
|
formattedResult.errors = result['errors'].map((error) => processError(this.accountability, error));
|
|
103
|
+
}
|
|
103
104
|
if (result['extensions'])
|
|
104
105
|
formattedResult.extensions = result['extensions'];
|
|
105
106
|
return formattedResult;
|
|
@@ -285,8 +286,9 @@ export class GraphQLService {
|
|
|
285
286
|
type = new GraphQLNonNull(type);
|
|
286
287
|
}
|
|
287
288
|
if (collection.primary === field.field) {
|
|
288
|
-
if (!field.defaultValue && !field.special.includes('uuid') && action === 'create')
|
|
289
|
+
if (!field.defaultValue && !field.special.includes('uuid') && action === 'create') {
|
|
289
290
|
type = new GraphQLNonNull(GraphQLID);
|
|
291
|
+
}
|
|
290
292
|
else if (['create', 'update'].includes(action))
|
|
291
293
|
type = GraphQLID;
|
|
292
294
|
else
|
package/dist/services/items.js
CHANGED
|
@@ -144,7 +144,7 @@ export class ItemsService {
|
|
|
144
144
|
// to read from it
|
|
145
145
|
payload[primaryKeyField] = primaryKey;
|
|
146
146
|
}
|
|
147
|
-
const { revisions: revisionsO2M, nestedActionEvents: nestedActionEventsO2M } = await payloadService.processO2M(
|
|
147
|
+
const { revisions: revisionsO2M, nestedActionEvents: nestedActionEventsO2M } = await payloadService.processO2M(payloadWithPresets, primaryKey, opts);
|
|
148
148
|
nestedActionEvents.push(...nestedActionEventsM2O);
|
|
149
149
|
nestedActionEvents.push(...nestedActionEventsA2O);
|
|
150
150
|
nestedActionEvents.push(...nestedActionEventsO2M);
|
|
@@ -50,31 +50,49 @@ export class PermissionsService extends ItemsService {
|
|
|
50
50
|
async createOne(data, opts) {
|
|
51
51
|
const res = await super.createOne(data, opts);
|
|
52
52
|
await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache });
|
|
53
|
+
if (this.cache && opts?.autoPurgeCache !== false) {
|
|
54
|
+
await this.cache.clear();
|
|
55
|
+
}
|
|
53
56
|
return res;
|
|
54
57
|
}
|
|
55
58
|
async createMany(data, opts) {
|
|
56
59
|
const res = await super.createMany(data, opts);
|
|
57
60
|
await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache });
|
|
61
|
+
if (this.cache && opts?.autoPurgeCache !== false) {
|
|
62
|
+
await this.cache.clear();
|
|
63
|
+
}
|
|
58
64
|
return res;
|
|
59
65
|
}
|
|
60
66
|
async updateBatch(data, opts) {
|
|
61
67
|
const res = await super.updateBatch(data, opts);
|
|
62
68
|
await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache });
|
|
69
|
+
if (this.cache && opts?.autoPurgeCache !== false) {
|
|
70
|
+
await this.cache.clear();
|
|
71
|
+
}
|
|
63
72
|
return res;
|
|
64
73
|
}
|
|
65
74
|
async updateMany(keys, data, opts) {
|
|
66
75
|
const res = await super.updateMany(keys, data, opts);
|
|
67
76
|
await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache });
|
|
77
|
+
if (this.cache && opts?.autoPurgeCache !== false) {
|
|
78
|
+
await this.cache.clear();
|
|
79
|
+
}
|
|
68
80
|
return res;
|
|
69
81
|
}
|
|
70
82
|
async upsertMany(payloads, opts) {
|
|
71
83
|
const res = await super.upsertMany(payloads, opts);
|
|
72
84
|
await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache });
|
|
85
|
+
if (this.cache && opts?.autoPurgeCache !== false) {
|
|
86
|
+
await this.cache.clear();
|
|
87
|
+
}
|
|
73
88
|
return res;
|
|
74
89
|
}
|
|
75
90
|
async deleteMany(keys, opts) {
|
|
76
91
|
const res = await super.deleteMany(keys, opts);
|
|
77
92
|
await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache });
|
|
93
|
+
if (this.cache && opts?.autoPurgeCache !== false) {
|
|
94
|
+
await this.cache.clear();
|
|
95
|
+
}
|
|
78
96
|
return res;
|
|
79
97
|
}
|
|
80
98
|
}
|
|
@@ -188,8 +188,9 @@ export function applyFilter(knex, schema, rootQuery, rootFilter, collection, ali
|
|
|
188
188
|
if (key === '_or' || key === '_and') {
|
|
189
189
|
// If the _or array contains an empty object (full permissions), we should short-circuit and ignore all other
|
|
190
190
|
// permission checks, as {} already matches full permissions.
|
|
191
|
-
if (key === '_or' && value.some((subFilter) => Object.keys(subFilter).length === 0))
|
|
191
|
+
if (key === '_or' && value.some((subFilter) => Object.keys(subFilter).length === 0)) {
|
|
192
192
|
continue;
|
|
193
|
+
}
|
|
193
194
|
value.forEach((subFilter) => {
|
|
194
195
|
addJoins(dbQuery, subFilter, collection);
|
|
195
196
|
});
|
|
@@ -484,8 +485,10 @@ export async function applySearch(schema, dbQuery, searchQuery, collection) {
|
|
|
484
485
|
}
|
|
485
486
|
else if (['bigInteger', 'integer', 'decimal', 'float'].includes(field.type)) {
|
|
486
487
|
const number = Number(searchQuery);
|
|
487
|
-
|
|
488
|
+
// only cast finite base10 numeric values
|
|
489
|
+
if (validateNumber(searchQuery, number)) {
|
|
488
490
|
this.orWhere({ [`${collection}.${name}`]: number });
|
|
491
|
+
}
|
|
489
492
|
}
|
|
490
493
|
else if (field.type === 'uuid' && validate(searchQuery)) {
|
|
491
494
|
this.orWhere({ [`${collection}.${name}`]: searchQuery });
|
|
@@ -493,6 +496,13 @@ export async function applySearch(schema, dbQuery, searchQuery, collection) {
|
|
|
493
496
|
});
|
|
494
497
|
});
|
|
495
498
|
}
|
|
499
|
+
function validateNumber(value, parsed) {
|
|
500
|
+
if (isNaN(parsed) || !Number.isFinite(parsed))
|
|
501
|
+
return false;
|
|
502
|
+
// casting parsed value back to string should be equal the original value
|
|
503
|
+
// (prevent unintended number parsing, e.g. String(7) !== "ob111")
|
|
504
|
+
return String(parsed) === value;
|
|
505
|
+
}
|
|
496
506
|
export function applyAggregate(dbQuery, aggregate, collection) {
|
|
497
507
|
for (const [operation, fields] of Object.entries(aggregate)) {
|
|
498
508
|
if (!fields)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function sanitizeError<T extends Error>(error: T): T;
|
|
@@ -25,7 +25,7 @@ export function sanitizeQuery(rawQuery, accountability) {
|
|
|
25
25
|
if (rawQuery['filter']) {
|
|
26
26
|
query.filter = sanitizeFilter(rawQuery['filter'], accountability || null);
|
|
27
27
|
}
|
|
28
|
-
if (rawQuery['offset']) {
|
|
28
|
+
if (rawQuery['offset'] !== undefined) {
|
|
29
29
|
query.offset = sanitizeOffset(rawQuery['offset']);
|
|
30
30
|
}
|
|
31
31
|
if (rawQuery['page']) {
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
export function resolvePreset(input, file) {
|
|
2
|
-
const transforms = input.transforms
|
|
3
|
-
if (input.format || input.quality)
|
|
2
|
+
const transforms = input.transforms ? [...input.transforms] : [];
|
|
3
|
+
if (input.format || input.quality) {
|
|
4
4
|
transforms.push([
|
|
5
5
|
'toFormat',
|
|
6
6
|
input.format || file.type.split('/')[1],
|
|
@@ -8,7 +8,8 @@ export function resolvePreset(input, file) {
|
|
|
8
8
|
quality: input.quality ? Number(input.quality) : undefined,
|
|
9
9
|
},
|
|
10
10
|
]);
|
|
11
|
-
|
|
11
|
+
}
|
|
12
|
+
if (input.width || input.height) {
|
|
12
13
|
transforms.push([
|
|
13
14
|
'resize',
|
|
14
15
|
{
|
|
@@ -18,6 +19,7 @@ export function resolvePreset(input, file) {
|
|
|
18
19
|
withoutEnlargement: input.withoutEnlargement ? Boolean(input.withoutEnlargement) : undefined,
|
|
19
20
|
},
|
|
20
21
|
]);
|
|
22
|
+
}
|
|
21
23
|
return transforms;
|
|
22
24
|
}
|
|
23
25
|
/**
|
package/license
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
Licensor: Monospace, Inc.
|
|
2
|
+
|
|
3
|
+
Licensed Work: Directus
|
|
4
|
+
The Licensed Work is Copyright © 2023 Monospace, Inc.
|
|
5
|
+
|
|
6
|
+
Additional Use Grant: You may use the Licensed Work in production as long as
|
|
7
|
+
your Total Finances do not exceed US $5,000,000 for the
|
|
8
|
+
most recent 12-month period, provided that Monospace, Inc.
|
|
9
|
+
will not be liable to you in any way, including for any
|
|
10
|
+
damages, including general, special, incidental or
|
|
11
|
+
consequential damages, arising out of such use.
|
|
12
|
+
|
|
13
|
+
References to: “Total Finances” mean the largest of your
|
|
14
|
+
aggregate gross revenues, entire budget, and/or funding
|
|
15
|
+
(no matter the source); “you” and “your” include (without
|
|
16
|
+
limitation) any individual or entity agreeing to these
|
|
17
|
+
terms and any affiliates of such individual or entity; and
|
|
18
|
+
“production” mean any use other than (i) development of
|
|
19
|
+
(including evaluation of the Licensed Work), debugging, or
|
|
20
|
+
testing your offerings, or (ii) making the Licensed Work
|
|
21
|
+
available standalone in unmodified object code form.
|
|
22
|
+
|
|
23
|
+
Change Date: Three years from release date
|
|
24
|
+
|
|
25
|
+
Change License: GNU General Public License (GPL) v3
|
|
26
|
+
|
|
27
|
+
For information about alternative licensing arrangements, please visit
|
|
28
|
+
https://directus.io/pricing.
|
|
29
|
+
|
|
30
|
+
--------------------------------------------------------------------------------
|
|
31
|
+
|
|
32
|
+
Business Source License 1.1
|
|
33
|
+
|
|
34
|
+
Terms
|
|
35
|
+
|
|
36
|
+
The Licensor hereby grants you the right to copy, modify, create derivative
|
|
37
|
+
works, redistribute, and make non-production use of the Licensed Work. The
|
|
38
|
+
Licensor may make an Additional Use Grant, above, permitting limited production
|
|
39
|
+
use.
|
|
40
|
+
|
|
41
|
+
Effective on the Change Date, or the fourth anniversary of the first publicly
|
|
42
|
+
available distribution of a specific version of the Licensed Work under this
|
|
43
|
+
License, whichever comes first, the Licensor hereby grants you rights under the
|
|
44
|
+
terms of the Change License, and the rights granted in the paragraph above
|
|
45
|
+
terminate.
|
|
46
|
+
|
|
47
|
+
If your use of the Licensed Work does not comply with the requirements currently
|
|
48
|
+
in effect as described in this License, you must purchase a commercial license
|
|
49
|
+
from the Licensor, its affiliated entities, or authorized resellers, or you must
|
|
50
|
+
refrain from using the Licensed Work.
|
|
51
|
+
|
|
52
|
+
All copies of the original and modified Licensed Work, and derivative works of
|
|
53
|
+
the Licensed Work, are subject to this License. This License applies separately
|
|
54
|
+
for each version of the Licensed Work and the Change Date may vary for each
|
|
55
|
+
version of the Licensed Work released by Licensor.
|
|
56
|
+
|
|
57
|
+
You must conspicuously display this License on each original or modified copy of
|
|
58
|
+
the Licensed Work. If you receive the Licensed Work in original or modified form
|
|
59
|
+
from a third party, the terms and conditions set forth in this License apply to
|
|
60
|
+
your use of that work.
|
|
61
|
+
|
|
62
|
+
Any use of the Licensed Work in violation of this License will automatically
|
|
63
|
+
terminate your rights under this License for the current and all other versions
|
|
64
|
+
of the Licensed Work.
|
|
65
|
+
|
|
66
|
+
This License does not grant you any right in any trademark or logo of Licensor
|
|
67
|
+
or its affiliates (provided that you may use a trademark or logo of Licensor as
|
|
68
|
+
expressly required by this License).
|
|
69
|
+
|
|
70
|
+
TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON AN
|
|
71
|
+
“AS IS” BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS, EXPRESS
|
|
72
|
+
OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF MERCHANTABILITY,
|
|
73
|
+
FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND TITLE.
|
|
74
|
+
|
|
75
|
+
MariaDB hereby grants you permission to use this License’s text to license your
|
|
76
|
+
works, and to refer to it using the trademark “Business Source License”, as long
|
|
77
|
+
as you comply with the Covenants of Licensor below.
|
|
78
|
+
|
|
79
|
+
Covenants of Licensor
|
|
80
|
+
|
|
81
|
+
In consideration of the right to use this License’s text and the “Business
|
|
82
|
+
Source License” name and trademark, Licensor covenants to MariaDB, and to all
|
|
83
|
+
other recipients of the licensed work to be provided by Licensor:
|
|
84
|
+
|
|
85
|
+
1. To specify as the Change License the GPL Version 2.0 or any later version,
|
|
86
|
+
or a license that is compatible with GPL Version 2.0 or a later version,
|
|
87
|
+
where “compatible” means that software provided under the Change License can
|
|
88
|
+
be included in a program with software provided under GPL Version 2.0 or a
|
|
89
|
+
later version. Licensor may specify additional Change Licenses without
|
|
90
|
+
limitation.
|
|
91
|
+
|
|
92
|
+
2. To either: (a) specify an additional grant of rights to use that does not
|
|
93
|
+
impose any additional restriction on the right granted in this License, as
|
|
94
|
+
the Additional Use Grant; or (b) insert the text “None”.
|
|
95
|
+
|
|
96
|
+
3. To specify a Change Date.
|
|
97
|
+
|
|
98
|
+
4. Not to modify this License in any other way.
|
|
99
|
+
|
|
100
|
+
Notice
|
|
101
|
+
|
|
102
|
+
The Business Source License (this document, or the "License") is not an Open
|
|
103
|
+
Source license. However, the Licensed Work will eventually be made available
|
|
104
|
+
under an Open Source License, as stated in this License.
|
|
105
|
+
|
|
106
|
+
License text copyright © 2023 MariaDB plc, All Rights Reserved.
|
|
107
|
+
“Business Source License” is a trademark of MariaDB plc.
|