@directus/api 10.0.0 → 10.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (113) hide show
  1. package/dist/__mocks__/cache.d.mts +5 -0
  2. package/dist/__mocks__/cache.mjs +7 -0
  3. package/dist/__utils__/items-utils.d.ts +2 -0
  4. package/dist/__utils__/items-utils.js +31 -0
  5. package/dist/__utils__/schemas.d.ts +13 -0
  6. package/dist/__utils__/schemas.js +301 -0
  7. package/dist/__utils__/snapshots.d.ts +5 -0
  8. package/dist/__utils__/snapshots.js +894 -0
  9. package/dist/app.js +17 -0
  10. package/dist/auth/drivers/oauth2.js +1 -1
  11. package/dist/auth/drivers/openid.js +1 -1
  12. package/dist/cli/commands/bootstrap/index.js +1 -0
  13. package/dist/cli/utils/create-env/env-stub.liquid +4 -0
  14. package/dist/controllers/assets.js +6 -10
  15. package/dist/controllers/flows.js +4 -1
  16. package/dist/controllers/schema.js +20 -16
  17. package/dist/controllers/utils.js +4 -2
  18. package/dist/database/migrations/run.js +3 -2
  19. package/dist/database/run-ast.js +3 -3
  20. package/dist/env.js +10 -0
  21. package/dist/extensions.js +4 -5
  22. package/dist/flows.d.ts +4 -1
  23. package/dist/flows.js +12 -10
  24. package/dist/services/assets.d.ts +2 -2
  25. package/dist/services/collections.js +2 -1
  26. package/dist/services/fields.js +3 -1
  27. package/dist/services/graphql/index.js +13 -2
  28. package/dist/services/items.js +1 -1
  29. package/dist/services/permissions.js +18 -0
  30. package/dist/services/server.js +4 -0
  31. package/dist/types/assets.d.ts +6 -1
  32. package/dist/utils/apply-query.js +14 -4
  33. package/dist/utils/sanitize-error.d.ts +1 -0
  34. package/dist/utils/sanitize-error.js +7 -0
  35. package/dist/utils/sanitize-query.js +11 -2
  36. package/dist/utils/transformations.d.ts +2 -2
  37. package/dist/utils/transformations.js +29 -10
  38. package/dist/utils/validate-query.js +3 -1
  39. package/package.json +22 -23
  40. package/dist/app.test.d.ts +0 -1
  41. package/dist/controllers/files.test.d.ts +0 -1
  42. package/dist/database/migrations/run.test.d.ts +0 -1
  43. package/dist/env.test.d.ts +0 -1
  44. package/dist/logger.test.d.ts +0 -1
  45. package/dist/middleware/authenticate.test.d.ts +0 -1
  46. package/dist/middleware/extract-token.test.d.ts +0 -1
  47. package/dist/middleware/validate-batch.test.d.ts +0 -1
  48. package/dist/operations/condition/index.test.d.ts +0 -1
  49. package/dist/operations/exec/index.test.d.ts +0 -1
  50. package/dist/operations/item-create/index.test.d.ts +0 -1
  51. package/dist/operations/item-delete/index.test.d.ts +0 -1
  52. package/dist/operations/item-read/index.test.d.ts +0 -1
  53. package/dist/operations/item-update/index.test.d.ts +0 -1
  54. package/dist/operations/log/index.test.d.ts +0 -1
  55. package/dist/operations/mail/index.test.d.ts +0 -1
  56. package/dist/operations/notification/index.test.d.ts +0 -1
  57. package/dist/operations/request/index.test.d.ts +0 -1
  58. package/dist/operations/sleep/index.test.d.ts +0 -1
  59. package/dist/operations/transform/index.test.d.ts +0 -1
  60. package/dist/operations/trigger/index.test.d.ts +0 -1
  61. package/dist/request/index.test.d.ts +0 -1
  62. package/dist/request/request-interceptor.test.d.ts +0 -1
  63. package/dist/request/response-interceptor.test.d.ts +0 -1
  64. package/dist/request/validate-ip.test.d.ts +0 -1
  65. package/dist/services/files.test.d.ts +0 -1
  66. package/dist/services/graphql/utils/process-error.test.d.ts +0 -1
  67. package/dist/services/items.test.d.ts +0 -1
  68. package/dist/services/payload.test.d.ts +0 -1
  69. package/dist/services/roles.test.d.ts +0 -1
  70. package/dist/services/schema.test.d.ts +0 -1
  71. package/dist/services/specifications.test.d.ts +0 -1
  72. package/dist/services/users.test.d.ts +0 -1
  73. package/dist/services/webhooks.test.d.ts +0 -1
  74. package/dist/storage/get-storage-driver.test.d.ts +0 -1
  75. package/dist/storage/index.test.d.ts +0 -1
  76. package/dist/storage/register-drivers.test.d.ts +0 -1
  77. package/dist/storage/register-locations.test.d.ts +0 -1
  78. package/dist/utils/apply-diff.test.d.ts +0 -1
  79. package/dist/utils/apply-function-to-column-name.test.d.ts +0 -1
  80. package/dist/utils/apply-snapshot.test.d.ts +0 -1
  81. package/dist/utils/async-handler.test.d.ts +0 -1
  82. package/dist/utils/calculate-field-depth.test.d.ts +0 -1
  83. package/dist/utils/filter-items.test.d.ts +0 -1
  84. package/dist/utils/get-auth-providers.test.d.ts +0 -1
  85. package/dist/utils/get-cache-headers.test.d.ts +0 -1
  86. package/dist/utils/get-cache-key.test.d.ts +0 -1
  87. package/dist/utils/get-collection-from-alias.test.d.ts +0 -1
  88. package/dist/utils/get-column-path.test.d.ts +0 -1
  89. package/dist/utils/get-config-from-env.test.d.ts +0 -1
  90. package/dist/utils/get-date-formatted.test.d.ts +0 -1
  91. package/dist/utils/get-graphql-query-and-variables.test.d.ts +0 -1
  92. package/dist/utils/get-milliseconds.test.d.ts +0 -1
  93. package/dist/utils/get-relation-info.test.d.ts +0 -1
  94. package/dist/utils/get-relation-type.test.d.ts +0 -1
  95. package/dist/utils/get-string-byte-size.test.d.ts +0 -1
  96. package/dist/utils/get-versioned-hash.test.d.ts +0 -1
  97. package/dist/utils/is-directus-jwt.test.d.ts +0 -1
  98. package/dist/utils/jwt.test.d.ts +0 -1
  99. package/dist/utils/map-values-deep.test.d.ts +0 -1
  100. package/dist/utils/md.test.d.ts +0 -1
  101. package/dist/utils/merge-permissions.test.d.ts +0 -1
  102. package/dist/utils/sanitize-query.test.d.ts +0 -1
  103. package/dist/utils/sanitize-schema.test.d.ts +0 -1
  104. package/dist/utils/should-skip-cache.test.d.ts +0 -1
  105. package/dist/utils/stall.test.d.ts +0 -1
  106. package/dist/utils/strip-function.test.d.ts +0 -1
  107. package/dist/utils/url.test.d.ts +0 -1
  108. package/dist/utils/user-name.test.d.ts +0 -1
  109. package/dist/utils/validate-diff.test.d.ts +0 -1
  110. package/dist/utils/validate-env.test.d.ts +0 -1
  111. package/dist/utils/validate-keys.test.d.ts +0 -1
  112. package/dist/utils/validate-query.test.d.ts +0 -1
  113. package/dist/utils/validate-snapshot.test.d.ts +0 -1
package/dist/app.js CHANGED
@@ -1,3 +1,4 @@
1
+ import { handlePressure } from '@directus/pressure';
1
2
  import cookieParser from 'cookie-parser';
2
3
  import express from 'express';
3
4
  import { merge } from 'lodash-es';
@@ -38,6 +39,7 @@ import { isInstalled, validateDatabaseConnection, validateDatabaseExtensions, va
38
39
  import emitter from './emitter.js';
39
40
  import env from './env.js';
40
41
  import { InvalidPayloadException } from './exceptions/invalid-payload.js';
42
+ import { ServiceUnavailableException } from './exceptions/service-unavailable.js';
41
43
  import { getExtensionManager } from './extensions.js';
42
44
  import { getFlowManager } from './flows.js';
43
45
  import logger, { expressLogger } from './logger.js';
@@ -84,6 +86,21 @@ export default async function createApp() {
84
86
  app.disable('x-powered-by');
85
87
  app.set('trust proxy', env['IP_TRUST_PROXY']);
86
88
  app.set('query parser', (str) => qs.parse(str, { depth: 10 }));
89
+ if (env['PRESSURE_LIMITER_ENABLED']) {
90
+ const sampleInterval = Number(env['PRESSURE_LIMITER_SAMPLE_INTERVAL']);
91
+ if (Number.isNaN(sampleInterval) === true || Number.isFinite(sampleInterval) === false) {
92
+ throw new Error(`Invalid value for PRESSURE_LIMITER_SAMPLE_INTERVAL environment variable`);
93
+ }
94
+ app.use(handlePressure({
95
+ sampleInterval,
96
+ maxEventLoopUtilization: env['PRESSURE_LIMITER_MAX_EVENT_LOOP_UTILIZATION'],
97
+ maxEventLoopDelay: env['PRESSURE_LIMITER_MAX_EVENT_LOOP_DELAY'],
98
+ maxMemoryRss: env['PRESSURE_LIMITER_MAX_MEMORY_RSS'],
99
+ maxMemoryHeapUsed: env['PRESSURE_LIMITER_MAX_MEMORY_HEAP_USED'],
100
+ error: new ServiceUnavailableException('Under pressure', { service: 'api' }),
101
+ retryAfter: env['PRESSURE_LIMITER_RETRY_AFTER'],
102
+ }));
103
+ }
87
104
  app.use(helmet.contentSecurityPolicy(merge({
88
105
  useDefaults: true,
89
106
  directives: {
@@ -117,7 +117,7 @@ export class OAuth2AuthDriver extends LocalAuthDriver {
117
117
  if (userId) {
118
118
  // Run hook so the end user has the chance to augment the
119
119
  // user that is about to be updated
120
- const updatedUserPayload = await emitter.emitFilter(`auth.update`, {}, {
120
+ const updatedUserPayload = await emitter.emitFilter(`auth.update`, { auth_data: userPayload.auth_data ?? null }, {
121
121
  identifier,
122
122
  provider: this.config['provider'],
123
123
  providerPayload: { accessToken: tokenSet.access_token, userInfo },
@@ -136,7 +136,7 @@ export class OpenIDAuthDriver extends LocalAuthDriver {
136
136
  if (userId) {
137
137
  // Run hook so the end user has the chance to augment the
138
138
  // user that is about to be updated
139
- const updatedUserPayload = await emitter.emitFilter(`auth.update`, {}, {
139
+ const updatedUserPayload = await emitter.emitFilter(`auth.update`, { auth_data: userPayload.auth_data ?? null }, {
140
140
  identifier,
141
141
  provider: this.config['provider'],
142
142
  providerPayload: { accessToken: tokenSet.access_token, userInfo },
@@ -34,6 +34,7 @@ export default async function bootstrap({ skipAdminInit }) {
34
34
  logger.info('Running migrations...');
35
35
  await runMigrations(database, 'latest');
36
36
  }
37
+ await database.destroy();
37
38
  logger.info('Done');
38
39
  process.exit(0);
39
40
  }
@@ -43,6 +43,10 @@ PUBLIC_URL="/"
43
43
  # Whether or not to enable GraphQL Introspection [true]
44
44
  # GRAPHQL_INTROSPECTION=true
45
45
 
46
+ # Limit the maximum amount of items that can get requested in one query.
47
+ # QUERY_LIMIT_DEFAULT=100
48
+ # QUERY_LIMIT_MAX=Infinity
49
+
46
50
  # The maximum number of items for batch mutations when creating, updating and deleting. ["Infinity"]
47
51
  # MAX_BATCH_MUTATION="Infinity"
48
52
 
@@ -106,21 +106,17 @@ asyncHandler(async (req, res) => {
106
106
  schema: req.schema,
107
107
  });
108
108
  const vary = ['Origin', 'Cache-Control'];
109
- const transformation = res.locals['transformation'].key
109
+ const transformationParams = res.locals['transformation'].key
110
110
  ? res.locals['shortcuts'].find((transformation) => transformation['key'] === res.locals['transformation'].key)
111
111
  : res.locals['transformation'];
112
- if (transformation.format === 'auto') {
113
- let format;
112
+ let acceptFormat;
113
+ if (transformationParams.format === 'auto') {
114
114
  if (req.headers.accept?.includes('image/avif')) {
115
- format = 'avif';
115
+ acceptFormat = 'avif';
116
116
  }
117
117
  else if (req.headers.accept?.includes('image/webp')) {
118
- format = 'webp';
118
+ acceptFormat = 'webp';
119
119
  }
120
- else {
121
- format = 'jpg';
122
- }
123
- transformation.format = format;
124
120
  vary.push('Accept');
125
121
  }
126
122
  let range = undefined;
@@ -140,7 +136,7 @@ asyncHandler(async (req, res) => {
140
136
  }
141
137
  }
142
138
  }
143
- const { stream, file, stat } = await service.getAsset(id, transformation, range);
139
+ const { stream, file, stat } = await service.getAsset(id, { transformationParams, acceptFormat }, range);
144
140
  const filename = req.params['filename'] ?? file.filename_download;
145
141
  res.attachment(filename);
146
142
  res.setHeader('Content-Type', file.type);
@@ -13,7 +13,7 @@ const router = express.Router();
13
13
  router.use(useCollection('directus_flows'));
14
14
  const webhookFlowHandler = asyncHandler(async (req, res, next) => {
15
15
  const flowManager = getFlowManager();
16
- const result = await flowManager.runWebhookFlow(`${req.method}-${req.params['pk']}`, {
16
+ const { result, cacheEnabled } = await flowManager.runWebhookFlow(`${req.method}-${req.params['pk']}`, {
17
17
  path: req.path,
18
18
  query: req.query,
19
19
  body: req.body,
@@ -23,6 +23,9 @@ const webhookFlowHandler = asyncHandler(async (req, res, next) => {
23
23
  accountability: req.accountability,
24
24
  schema: req.schema,
25
25
  });
26
+ if (!cacheEnabled) {
27
+ res.locals['cache'] = false;
28
+ }
26
29
  res.locals['payload'] = result;
27
30
  return next();
28
31
  });
@@ -15,20 +15,17 @@ router.get('/snapshot', asyncHandler(async (req, res, next) => {
15
15
  res.locals['payload'] = { data: currentSnapshot };
16
16
  return next();
17
17
  }), respond);
18
- router.post('/apply', asyncHandler(async (req, _res, next) => {
19
- const service = new SchemaService({ accountability: req.accountability });
20
- await service.apply(req.body);
21
- return next();
22
- }), respond);
23
18
  const schemaMultipartHandler = (req, res, next) => {
24
19
  if (req.is('application/json')) {
25
- if (Object.keys(req.body).length === 0)
20
+ if (Object.keys(req.body).length === 0) {
26
21
  throw new InvalidPayloadException(`No data was included in the body`);
27
- res.locals['uploadedSnapshot'] = req.body;
22
+ }
23
+ res.locals['upload'] = req.body;
28
24
  return next();
29
25
  }
30
- if (!req.is('multipart/form-data'))
26
+ if (!req.is('multipart/form-data')) {
31
27
  throw new UnsupportedMediaTypeException(`Unsupported Content-Type header`);
28
+ }
32
29
  const headers = req.headers['content-type']
33
30
  ? req.headers
34
31
  : {
@@ -37,7 +34,7 @@ const schemaMultipartHandler = (req, res, next) => {
37
34
  };
38
35
  const busboy = Busboy({ headers });
39
36
  let isFileIncluded = false;
40
- let uploadedSnapshot = null;
37
+ let upload = null;
41
38
  busboy.on('file', async (_, fileStream, { mimeType }) => {
42
39
  if (isFileIncluded)
43
40
  return next(new InvalidPayloadException(`More than one file was included in the body`));
@@ -47,25 +44,26 @@ const schemaMultipartHandler = (req, res, next) => {
47
44
  const uploadedString = await readableStreamToString(fileStream);
48
45
  if (mimeType === 'application/json') {
49
46
  try {
50
- uploadedSnapshot = parseJSON(uploadedString);
47
+ upload = parseJSON(uploadedString);
51
48
  }
52
49
  catch (err) {
53
50
  logger.warn(err);
54
- throw new InvalidPayloadException('Invalid JSON schema snapshot');
51
+ throw new InvalidPayloadException('The provided JSON is invalid.');
55
52
  }
56
53
  }
57
54
  else {
58
55
  try {
59
- uploadedSnapshot = (await loadYaml(uploadedString));
56
+ upload = await loadYaml(uploadedString);
60
57
  }
61
58
  catch (err) {
62
59
  logger.warn(err);
63
- throw new InvalidPayloadException('Invalid YAML schema snapshot');
60
+ throw new InvalidPayloadException('The provided YAML is invalid.');
64
61
  }
65
62
  }
66
- if (!uploadedSnapshot)
63
+ if (!upload) {
67
64
  throw new InvalidPayloadException(`No file was included in the body`);
68
- res.locals['uploadedSnapshot'] = uploadedSnapshot;
65
+ }
66
+ res.locals['upload'] = upload;
69
67
  return next();
70
68
  }
71
69
  catch (error) {
@@ -81,7 +79,7 @@ const schemaMultipartHandler = (req, res, next) => {
81
79
  };
82
80
  router.post('/diff', asyncHandler(schemaMultipartHandler), asyncHandler(async (req, res, next) => {
83
81
  const service = new SchemaService({ accountability: req.accountability });
84
- const snapshot = res.locals['uploadedSnapshot'];
82
+ const snapshot = res.locals['upload'];
85
83
  const currentSnapshot = await service.snapshot();
86
84
  const snapshotDiff = await service.diff(snapshot, { currentSnapshot, force: 'force' in req.query });
87
85
  if (!snapshotDiff)
@@ -90,4 +88,10 @@ router.post('/diff', asyncHandler(schemaMultipartHandler), asyncHandler(async (r
90
88
  res.locals['payload'] = { data: { hash: currentSnapshotHash, diff: snapshotDiff } };
91
89
  return next();
92
90
  }), respond);
91
+ router.post('/apply', asyncHandler(schemaMultipartHandler), asyncHandler(async (req, res, next) => {
92
+ const service = new SchemaService({ accountability: req.accountability });
93
+ const diff = res.locals['upload'];
94
+ await service.apply(diff);
95
+ return next();
96
+ }), respond);
93
97
  export default router;
@@ -15,8 +15,9 @@ import { sanitizeQuery } from '../utils/sanitize-query.js';
15
15
  const router = Router();
16
16
  router.get('/random/string', asyncHandler(async (req, res) => {
17
17
  const { nanoid } = await import('nanoid');
18
- if (req.query && req.query['length'] && Number(req.query['length']) > 500)
18
+ if (req.query && req.query['length'] && Number(req.query['length']) > 500) {
19
19
  throw new InvalidQueryException(`"length" can't be more than 500 characters`);
20
+ }
20
21
  const string = nanoid(req.query?.['length'] ? Number(req.query['length']) : 32);
21
22
  return res.json({ data: string });
22
23
  }));
@@ -61,8 +62,9 @@ router.post('/revert/:revision', asyncHandler(async (req, _res, next) => {
61
62
  next();
62
63
  }), respond);
63
64
  router.post('/import/:collection', collectionExists, asyncHandler(async (req, res, next) => {
64
- if (req.is('multipart/form-data') === false)
65
+ if (req.is('multipart/form-data') === false) {
65
66
  throw new UnsupportedMediaTypeException(`Unsupported Content-Type header`);
67
+ }
66
68
  const service = new ImportService({
67
69
  accountability: req.accountability,
68
70
  schema: req.schema,
@@ -7,13 +7,14 @@ import path from 'path';
7
7
  import { flushCaches } from '../../cache.js';
8
8
  import env from '../../env.js';
9
9
  import logger from '../../logger.js';
10
+ import getModuleDefault from '../../utils/get-module-default.js';
10
11
  const __dirname = dirname(fileURLToPath(import.meta.url));
11
12
  export default async function run(database, direction, log = true) {
12
13
  let migrationFiles = await fse.readdir(__dirname);
13
14
  const customMigrationsPath = path.resolve(env['EXTENSIONS_PATH'], 'migrations');
14
15
  let customMigrationFiles = ((await fse.pathExists(customMigrationsPath)) && (await fse.readdir(customMigrationsPath))) || [];
15
16
  migrationFiles = migrationFiles.filter((file) => /^[0-9]+[A-Z]-[^.]+\.(?:js|ts)$/.test(file));
16
- customMigrationFiles = customMigrationFiles.filter((file) => file.endsWith('.js'));
17
+ customMigrationFiles = customMigrationFiles.filter((file) => /\.(c|m)?js$/.test(file));
17
18
  const completedMigrations = await database.select('*').from('directus_migrations').orderBy('version');
18
19
  const migrations = [
19
20
  ...migrationFiles.map((path) => parseFilePath(path)),
@@ -84,7 +85,7 @@ export default async function run(database, direction, log = true) {
84
85
  for (const migration of migrations) {
85
86
  if (migration.completed === false) {
86
87
  needsCacheFlush = true;
87
- const { up } = await import(`file://${migration.file}`);
88
+ const { up } = getModuleDefault(await import(`file://${migration.file}`));
88
89
  if (log) {
89
90
  logger.info(`Applying ${migration.name}...`);
90
91
  }
@@ -151,7 +151,7 @@ async function getDBQuery(schema, knex, table, fieldNodes, query) {
151
151
  const preProcess = getColumnPreprocessor(knex, schema, table);
152
152
  const queryCopy = clone(query);
153
153
  const helpers = getHelpers(knex);
154
- queryCopy.limit = typeof queryCopy.limit === 'number' ? queryCopy.limit : 100;
154
+ queryCopy.limit = typeof queryCopy.limit === 'number' ? queryCopy.limit : Number(env['QUERY_LIMIT_DEFAULT']);
155
155
  // Queries with aggregates and groupBy will not have duplicate result
156
156
  if (queryCopy.aggregate || queryCopy.group) {
157
157
  const flatQuery = knex.select(fieldNodes.map(preProcess)).from(table);
@@ -322,13 +322,13 @@ function mergeWithParentItems(schema, nestedItem, parentItem, nestedNode) {
322
322
  });
323
323
  parentItem[nestedNode.fieldKey].push(...itemChildren);
324
324
  if (nestedNode.query.page && nestedNode.query.page > 1) {
325
- parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].slice((nestedNode.query.limit ?? 100) * (nestedNode.query.page - 1));
325
+ parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].slice((nestedNode.query.limit ?? Number(env['QUERY_LIMIT_DEFAULT'])) * (nestedNode.query.page - 1));
326
326
  }
327
327
  if (nestedNode.query.offset && nestedNode.query.offset >= 0) {
328
328
  parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].slice(nestedNode.query.offset);
329
329
  }
330
330
  if (nestedNode.query.limit !== -1) {
331
- parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].slice(0, nestedNode.query.limit ?? 100);
331
+ parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].slice(0, nestedNode.query.limit ?? Number(env['QUERY_LIMIT_DEFAULT']));
332
332
  }
333
333
  parentItem[nestedNode.fieldKey] = parentItem[nestedNode.fieldKey].sort((a, b) => {
334
334
  // This is pre-filled in get-ast-from-query
package/dist/env.js CHANGED
@@ -25,6 +25,8 @@ const allowedEnvironmentVars = [
25
25
  'GRAPHQL_INTROSPECTION',
26
26
  'MAX_BATCH_MUTATION',
27
27
  'LOGGER_.+',
28
+ 'QUERY_LIMIT_MAX',
29
+ 'QUERY_LIMIT_DEFAULT',
28
30
  'ROBOTS_TXT',
29
31
  // server
30
32
  'SERVER_.+',
@@ -196,6 +198,7 @@ const defaults = {
196
198
  PUBLIC_URL: '/',
197
199
  MAX_PAYLOAD_SIZE: '1mb',
198
200
  MAX_RELATIONAL_DEPTH: 10,
201
+ QUERY_LIMIT_DEFAULT: 100,
199
202
  MAX_BATCH_MUTATION: Infinity,
200
203
  ROBOTS_TXT: 'User-agent: *\nDisallow: /',
201
204
  DB_EXCLUDE_TABLES: 'spatial_ref_sys,sysdiagrams',
@@ -262,6 +265,13 @@ const defaults = {
262
265
  GRAPHQL_INTROSPECTION: true,
263
266
  FLOWS_EXEC_ALLOWED_MODULES: false,
264
267
  FLOWS_ENV_ALLOW_LIST: false,
268
+ PRESSURE_LIMITER_ENABLED: true,
269
+ PRESSURE_LIMITER_SAMPLE_INTERVAL: 250,
270
+ PRESSURE_LIMITER_MAX_EVENT_LOOP_UTILIZATION: 0.99,
271
+ PRESSURE_LIMITER_MAX_EVENT_LOOP_DELAY: 500,
272
+ PRESSURE_LIMITER_MAX_MEMORY_RSS: false,
273
+ PRESSURE_LIMITER_MAX_MEMORY_HEAP_USED: false,
274
+ PRESSURE_LIMITER_RETRY_AFTER: false,
265
275
  };
266
276
  // Allows us to force certain environment variable into a type, instead of relying
267
277
  // on the auto-parsed type in processValues. ref #3705
@@ -1,4 +1,4 @@
1
- import { APP_EXTENSION_TYPES, APP_SHARED_DEPS, HYBRID_EXTENSION_TYPES, NESTED_EXTENSION_TYPES, } from '@directus/constants';
1
+ import { APP_EXTENSION_TYPES, APP_SHARED_DEPS, HYBRID_EXTENSION_TYPES, JAVASCRIPT_FILE_EXTS, NESTED_EXTENSION_TYPES, } from '@directus/constants';
2
2
  import * as sharedExceptions from '@directus/exceptions';
3
3
  import { isIn, isTypeIn, pluralize } from '@directus/utils';
4
4
  import { ensureExtensionDirs, generateExtensionsEntrypoint, getLocalExtensions, getPackageExtensions, pathToRelativeUrl, resolvePackage, resolvePackageExtensions, } from '@directus/utils/node';
@@ -201,15 +201,14 @@ class ExtensionManager {
201
201
  const extensionDirUrl = pathToRelativeUrl(env['EXTENSIONS_PATH']);
202
202
  const localExtensionUrls = NESTED_EXTENSION_TYPES.flatMap((type) => {
203
203
  const typeDir = path.posix.join(extensionDirUrl, pluralize(type));
204
- const fileExts = ['js', 'mjs', 'cjs'];
205
204
  if (isIn(type, HYBRID_EXTENSION_TYPES)) {
206
205
  return [
207
- path.posix.join(typeDir, '*', `app.{${fileExts.join()}}`),
208
- path.posix.join(typeDir, '*', `api.{${fileExts.join()}}`),
206
+ path.posix.join(typeDir, '*', `app.{${JAVASCRIPT_FILE_EXTS.join()}}`),
207
+ path.posix.join(typeDir, '*', `api.{${JAVASCRIPT_FILE_EXTS.join()}}`),
209
208
  ];
210
209
  }
211
210
  else {
212
- return path.posix.join(typeDir, '*', `index.{${fileExts.join()}}`);
211
+ return path.posix.join(typeDir, '*', `index.{${JAVASCRIPT_FILE_EXTS.join()}}`);
213
212
  }
214
213
  });
215
214
  this.watcher = chokidar.watch([path.resolve('package.json'), path.posix.join(extensionDirUrl, '*', 'package.json'), ...localExtensionUrls], {
package/dist/flows.d.ts CHANGED
@@ -13,7 +13,10 @@ declare class FlowManager {
13
13
  addOperation(id: string, operation: OperationHandler): void;
14
14
  clearOperations(): void;
15
15
  runOperationFlow(id: string, data: unknown, context: Record<string, unknown>): Promise<unknown>;
16
- runWebhookFlow(id: string, data: unknown, context: Record<string, unknown>): Promise<unknown>;
16
+ runWebhookFlow(id: string, data: unknown, context: Record<string, unknown>): Promise<{
17
+ result: unknown;
18
+ cacheEnabled: boolean;
19
+ }>;
17
20
  private load;
18
21
  private unload;
19
22
  private executeFlow;
package/dist/flows.js CHANGED
@@ -9,7 +9,6 @@ import getDatabase from './database/index.js';
9
9
  import emitter from './emitter.js';
10
10
  import env from './env.js';
11
11
  import * as exceptions from './exceptions/index.js';
12
- import { BaseException } from '@directus/exceptions';
13
12
  import logger from './logger.js';
14
13
  import { getMessenger } from './messenger.js';
15
14
  import { ActivityService } from './services/activity.js';
@@ -20,6 +19,7 @@ import { constructFlowTree } from './utils/construct-flow-tree.js';
20
19
  import { getSchema } from './utils/get-schema.js';
21
20
  import { JobQueue } from './utils/job-queue.js';
22
21
  import { mapValuesDeep } from './utils/map-values-deep.js';
22
+ import { sanitizeError } from './utils/sanitize-error.js';
23
23
  let flowManager;
24
24
  const redactLogs = fastRedact({
25
25
  censor: '--redacted--',
@@ -167,16 +167,20 @@ class FlowManager {
167
167
  this.operationFlowHandlers[flow.id] = handler;
168
168
  }
169
169
  else if (flow.trigger === 'webhook') {
170
- const handler = (data, context) => {
170
+ const method = flow.options?.['method'] ?? 'GET';
171
+ const handler = async (data, context) => {
172
+ let cacheEnabled = true;
173
+ if (method === 'GET') {
174
+ cacheEnabled = flow.options['cacheEnabled'] !== false;
175
+ }
171
176
  if (flow.options['async']) {
172
177
  this.executeFlow(flow, data, context);
173
- return undefined;
178
+ return { result: undefined, cacheEnabled };
174
179
  }
175
180
  else {
176
- return this.executeFlow(flow, data, context);
181
+ return { result: await this.executeFlow(flow, data, context), cacheEnabled };
177
182
  }
178
183
  };
179
- const method = flow.options?.['method'] ?? 'GET';
180
184
  // Default return to $last for webhooks
181
185
  flow.options['return'] = flow.options['return'] ?? '$last';
182
186
  this.webhookFlowHandlers[`${method}-${flow.id}`] = handler;
@@ -325,11 +329,9 @@ class FlowManager {
325
329
  }
326
330
  catch (error) {
327
331
  let data;
328
- if (error instanceof BaseException) {
329
- data = { message: error.message, code: error.code, extensions: error.extensions, status: error.status };
330
- }
331
- else if (error instanceof Error) {
332
- data = { message: error.message };
332
+ if (error instanceof Error) {
333
+ // make sure we dont expose the stack trace
334
+ data = sanitizeError(error);
333
335
  }
334
336
  else if (typeof error === 'string') {
335
337
  // If the error is a JSON string, parse it and use that as the error data
@@ -3,14 +3,14 @@ import type { Range, Stat } from '@directus/storage';
3
3
  import type { Accountability } from '@directus/types';
4
4
  import type { Knex } from 'knex';
5
5
  import type { Readable } from 'node:stream';
6
- import type { AbstractServiceOptions, TransformationParams } from '../types/index.js';
6
+ import type { AbstractServiceOptions, TransformationSet } from '../types/index.js';
7
7
  import { AuthorizationService } from './authorization.js';
8
8
  export declare class AssetsService {
9
9
  knex: Knex;
10
10
  accountability: Accountability | null;
11
11
  authorizationService: AuthorizationService;
12
12
  constructor(options: AbstractServiceOptions);
13
- getAsset(id: string, transformation: TransformationParams, range?: Range): Promise<{
13
+ getAsset(id: string, transformation: TransformationSet, range?: Range): Promise<{
14
14
  stream: Readable;
15
15
  file: any;
16
16
  stat: Stat;
@@ -60,7 +60,7 @@ export class CollectionsService {
60
60
  // Directus heavily relies on the primary key of a collection, so we have to make sure that
61
61
  // every collection that is created has a primary key. If no primary key field is created
62
62
  // while making the collection, we default to an auto incremented id named `id`
63
- if (!payload.fields)
63
+ if (!payload.fields) {
64
64
  payload.fields = [
65
65
  {
66
66
  field: 'id',
@@ -76,6 +76,7 @@ export class CollectionsService {
76
76
  },
77
77
  },
78
78
  ];
79
+ }
79
80
  // Ensure that every field meta has the field/collection fields filled correctly
80
81
  payload.fields = payload.fields.map((field) => {
81
82
  if (field.meta) {
@@ -311,7 +311,9 @@ export class FieldsService {
311
311
  }
312
312
  if (hookAdjustedField.schema) {
313
313
  const existingColumn = await this.schemaInspector.columnInfo(collection, hookAdjustedField.field);
314
- if (!isEqual(sanitizeColumn(existingColumn), hookAdjustedField.schema)) {
314
+ // Sanitize column only when applying snapshot diff as opts is only passed from /utils/apply-diff.ts
315
+ const columnToCompare = opts?.bypassLimits && opts.autoPurgeSystemCache === false ? sanitizeColumn(existingColumn) : existingColumn;
316
+ if (!isEqual(columnToCompare, hookAdjustedField.schema)) {
315
317
  try {
316
318
  await this.knex.schema.alterTable(collection, (table) => {
317
319
  if (!hookAdjustedField.schema)
@@ -98,8 +98,9 @@ export class GraphQLService {
98
98
  const formattedResult = {};
99
99
  if (result['data'])
100
100
  formattedResult.data = result['data'];
101
- if (result['errors'])
101
+ if (result['errors']) {
102
102
  formattedResult.errors = result['errors'].map((error) => processError(this.accountability, error));
103
+ }
103
104
  if (result['extensions'])
104
105
  formattedResult.extensions = result['extensions'];
105
106
  return formattedResult;
@@ -285,8 +286,9 @@ export class GraphQLService {
285
286
  type = new GraphQLNonNull(type);
286
287
  }
287
288
  if (collection.primary === field.field) {
288
- if (!field.defaultValue && !field.special.includes('uuid') && action === 'create')
289
+ if (!field.defaultValue && !field.special.includes('uuid') && action === 'create') {
289
290
  type = new GraphQLNonNull(GraphQLID);
291
+ }
290
292
  else if (['create', 'update'].includes(action))
291
293
  type = GraphQLID;
292
294
  else
@@ -1537,6 +1539,15 @@ export class GraphQLService {
1537
1539
  },
1538
1540
  }),
1539
1541
  },
1542
+ queryLimit: {
1543
+ type: new GraphQLObjectType({
1544
+ name: 'server_info_query_limit',
1545
+ fields: {
1546
+ default: { type: GraphQLInt },
1547
+ max: { type: GraphQLInt },
1548
+ },
1549
+ }),
1550
+ },
1540
1551
  });
1541
1552
  }
1542
1553
  if (this.accountability?.admin === true) {
@@ -144,7 +144,7 @@ export class ItemsService {
144
144
  // to read from it
145
145
  payload[primaryKeyField] = primaryKey;
146
146
  }
147
- const { revisions: revisionsO2M, nestedActionEvents: nestedActionEventsO2M } = await payloadService.processO2M(payload, primaryKey, opts);
147
+ const { revisions: revisionsO2M, nestedActionEvents: nestedActionEventsO2M } = await payloadService.processO2M(payloadWithPresets, primaryKey, opts);
148
148
  nestedActionEvents.push(...nestedActionEventsM2O);
149
149
  nestedActionEvents.push(...nestedActionEventsA2O);
150
150
  nestedActionEvents.push(...nestedActionEventsO2M);
@@ -50,31 +50,49 @@ export class PermissionsService extends ItemsService {
50
50
  async createOne(data, opts) {
51
51
  const res = await super.createOne(data, opts);
52
52
  await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache });
53
+ if (this.cache && opts?.autoPurgeCache !== false) {
54
+ await this.cache.clear();
55
+ }
53
56
  return res;
54
57
  }
55
58
  async createMany(data, opts) {
56
59
  const res = await super.createMany(data, opts);
57
60
  await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache });
61
+ if (this.cache && opts?.autoPurgeCache !== false) {
62
+ await this.cache.clear();
63
+ }
58
64
  return res;
59
65
  }
60
66
  async updateBatch(data, opts) {
61
67
  const res = await super.updateBatch(data, opts);
62
68
  await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache });
69
+ if (this.cache && opts?.autoPurgeCache !== false) {
70
+ await this.cache.clear();
71
+ }
63
72
  return res;
64
73
  }
65
74
  async updateMany(keys, data, opts) {
66
75
  const res = await super.updateMany(keys, data, opts);
67
76
  await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache });
77
+ if (this.cache && opts?.autoPurgeCache !== false) {
78
+ await this.cache.clear();
79
+ }
68
80
  return res;
69
81
  }
70
82
  async upsertMany(payloads, opts) {
71
83
  const res = await super.upsertMany(payloads, opts);
72
84
  await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache });
85
+ if (this.cache && opts?.autoPurgeCache !== false) {
86
+ await this.cache.clear();
87
+ }
73
88
  return res;
74
89
  }
75
90
  async deleteMany(keys, opts) {
76
91
  const res = await super.deleteMany(keys, opts);
77
92
  await clearSystemCache({ autoPurgeCache: opts?.autoPurgeCache });
93
+ if (this.cache && opts?.autoPurgeCache !== false) {
94
+ await this.cache.clear();
95
+ }
78
96
  return res;
79
97
  }
80
98
  }
@@ -64,6 +64,10 @@ export class ServerService {
64
64
  info['flows'] = {
65
65
  execAllowedModules: env['FLOWS_EXEC_ALLOWED_MODULES'] ? toArray(env['FLOWS_EXEC_ALLOWED_MODULES']) : [],
66
66
  };
67
+ info['queryLimit'] = {
68
+ default: env['QUERY_LIMIT_DEFAULT'],
69
+ max: Number.isFinite(env['QUERY_LIMIT_MAX']) ? env['QUERY_LIMIT_MAX'] : -1,
70
+ };
67
71
  }
68
72
  if (this.accountability?.admin === true) {
69
73
  const { osType, osVersion } = getOSInfo();
@@ -6,10 +6,15 @@ export type TransformationMap = {
6
6
  };
7
7
  export type Transformation = TransformationMap[keyof TransformationMap];
8
8
  export type TransformationResize = Pick<ResizeOptions, 'width' | 'height' | 'fit' | 'withoutEnlargement'>;
9
+ export type TransformationFormat = 'jpg' | 'jpeg' | 'png' | 'webp' | 'tiff' | 'avif';
9
10
  export type TransformationParams = {
10
11
  key?: string;
11
12
  transforms?: Transformation[];
12
- format?: 'auto' | 'jpg' | 'jpeg' | 'png' | 'webp' | 'tiff' | 'avif';
13
+ format?: TransformationFormat | 'auto';
13
14
  quality?: number;
14
15
  } & TransformationResize;
16
+ export type TransformationSet = {
17
+ transformationParams: TransformationParams;
18
+ acceptFormat?: TransformationFormat | undefined;
19
+ };
15
20
  export {};