@directus/api 18.0.0 → 18.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,7 +6,8 @@ const __dirname = dirname(fileURLToPath(import.meta.url));
6
6
  export async function up(knex) {
7
7
  await knex.schema.alterTable('directus_extensions', (table) => {
8
8
  table.uuid('id').nullable();
9
- table.string('source', 255);
9
+ table.string('folder');
10
+ table.string('source');
10
11
  table.uuid('bundle');
11
12
  });
12
13
  const installedExtensions = await knex.select('name').from('directus_extensions');
@@ -30,7 +31,7 @@ export async function up(knex) {
30
31
  catch {
31
32
  source = 'local';
32
33
  }
33
- await knex('directus_extensions').update({ id, source }).where({ name });
34
+ await knex('directus_extensions').update({ id, source, folder: name }).where({ name });
34
35
  idMap.set(name, id);
35
36
  }
36
37
  }
@@ -47,22 +48,41 @@ export async function up(knex) {
47
48
  if (!bundleParentId)
48
49
  continue;
49
50
  await knex('directus_extensions')
50
- .update({ bundle: bundleParentId, name: name.substring(bundleParentName.length + 1) })
51
- .where({ name });
51
+ .update({ bundle: bundleParentId, folder: name.substring(bundleParentName.length + 1) })
52
+ .where({ folder: name });
52
53
  }
53
54
  await knex.schema.alterTable('directus_extensions', (table) => {
54
- table.dropPrimary();
55
+ table.dropColumn('name');
55
56
  table.uuid('id').alter().primary().notNullable();
56
- table.string('source', 255).alter().notNullable().defaultTo('local');
57
- table.renameColumn('name', 'folder');
57
+ table.string('source').alter().notNullable();
58
+ table.string('folder').alter().notNullable();
58
59
  });
59
60
  }
61
+ /*
62
+ * Note: For local extensions having a different package & folder name,
63
+ * we aren't able to revert to the exact same state as before.
64
+ * But we still need to do the name convertion, in order for the migration to succeed.
65
+ */
60
66
  export async function down(knex) {
61
67
  await knex.schema.alterTable('directus_extensions', (table) => {
62
- table.dropColumns('id', 'source', 'bundle');
63
- table.renameColumn('folder', 'name');
68
+ table.string('name');
64
69
  });
70
+ const installedExtensions = await knex.select(['id', 'folder', 'bundle']).from('directus_extensions');
71
+ const idMap = new Map(installedExtensions.map((extension) => [extension.id, extension.folder]));
72
+ for (const { id, folder, bundle, source } of installedExtensions) {
73
+ if (source === 'registry') {
74
+ await knex('directus_extensions').delete().where({ id });
75
+ continue;
76
+ }
77
+ let name = folder;
78
+ if (bundle) {
79
+ const bundleParentName = idMap.get(bundle);
80
+ name = `${bundleParentName}/${name}`;
81
+ }
82
+ await knex('directus_extensions').update({ name }).where({ id });
83
+ }
65
84
  await knex.schema.alterTable('directus_extensions', (table) => {
66
- table.string('name', 255).primary().alter();
85
+ table.dropColumns('id', 'folder', 'source', 'bundle');
86
+ table.string('name').alter().primary().notNullable();
67
87
  });
68
88
  }
@@ -66,7 +66,7 @@ export function generateApiExtensionsSandboxEntrypoint(type, name, endpointRoute
66
66
 
67
67
  const registerOperation = ${generateHostFunctionReference(index, ['id', 'handler'], { async: false })}
68
68
 
69
- const operationConfig = extensionExport();
69
+ const operationConfig = extensionExport;
70
70
 
71
71
  registerOperation(operationConfig.id, operationConfig.handler);
72
72
  `;
@@ -28,8 +28,8 @@ export function requestGenerator(requestedScopes) {
28
28
  if (body !== undefined && body.typeof !== 'undefined' && body.typeof !== 'string' && body.typeof !== 'object') {
29
29
  throw new TypeError('Request body has to be of type string or object');
30
30
  }
31
- if (headers !== undefined && headers.typeof !== 'undefined' && headers.typeof !== 'array') {
32
- throw new TypeError('Request headers has to be of type array');
31
+ if (headers !== undefined && headers.typeof !== 'undefined' && headers.typeof !== 'object') {
32
+ throw new TypeError('Request headers has to be of type object');
33
33
  }
34
34
  const methodCopied = await method?.copy();
35
35
  const bodyCopied = await body?.copy();
@@ -18,7 +18,7 @@ export async function instantiateSandboxSdk(isolate, requestedScopes) {
18
18
  const handlerCode = sdk
19
19
  .map(({ name, args, async }) => `sdk.${name} = ${generateHostFunctionReference(index, args, { async })}`)
20
20
  .join('\n');
21
- await apiContext.evalClosure(handlerCode, sdk.map(({ generator, async }) => (async ? wrap(generator(requestedScopes)) : generator(requestedScopes))), { filename: '<extensions-sdk>', arguments: { reference: true } });
21
+ await apiContext.evalClosure(handlerCode, sdk.map(({ name, generator, async }) => async ? wrap(name, generator(requestedScopes)) : generator(requestedScopes)), { filename: '<extensions-sdk>', arguments: { reference: true } });
22
22
  const exportCode = sdk.map(({ name }) => `export const ${name} = sdk.${name};`).join('\n');
23
23
  const apiModule = await isolate.compileModule(exportCode);
24
24
  await apiModule.instantiate(apiContext, () => {
@@ -3,9 +3,11 @@
3
3
  *
4
4
  * This is needed as isolated-vm doesn't allow the isolate to catch errors that are thrown in the
5
5
  * host. Instead, we'll wrap the output in a known shape which allows the isolated sdk context to
6
- * re-throw the error in the correct context
6
+ * re-throw the error in the correct context.
7
+ *
8
+ * @see https://github.com/laverdet/isolated-vm/issues/417
7
9
  */
8
- export declare function wrap(util: (...args: any[]) => any): (...args: any[]) => Promise<{
10
+ export declare function wrap(name: string, util: (...args: any[]) => any): (...args: any[]) => Promise<{
9
11
  result: any;
10
12
  error: boolean;
11
13
  }>;
@@ -3,15 +3,49 @@
3
3
  *
4
4
  * This is needed as isolated-vm doesn't allow the isolate to catch errors that are thrown in the
5
5
  * host. Instead, we'll wrap the output in a known shape which allows the isolated sdk context to
6
- * re-throw the error in the correct context
6
+ * re-throw the error in the correct context.
7
+ *
8
+ * @see https://github.com/laverdet/isolated-vm/issues/417
7
9
  */
8
- export function wrap(util) {
10
+ export function wrap(name, util) {
9
11
  return async (...args) => {
10
12
  try {
11
13
  return { result: await util(...args), error: false };
12
14
  }
13
- catch (e) {
14
- return { result: e, error: true };
15
+ catch (error) {
16
+ // isolated-vm expects objects thrown from within the vm to be an instance of `Error`
17
+ let result;
18
+ if (error instanceof Error) {
19
+ // Don't expose the stack trace to the vm
20
+ delete error.stack;
21
+ // Serialize the remaining error properties
22
+ for (const key of Object.getOwnPropertyNames(error)) {
23
+ const value = error[key];
24
+ if (!value || typeof value !== 'object')
25
+ continue;
26
+ error[key] = JSON.stringify(value, getCircularReplacer());
27
+ }
28
+ result = error;
29
+ }
30
+ else if (error && typeof error !== 'object') {
31
+ result = error;
32
+ }
33
+ else {
34
+ result = new Error(`Unknown error in "${name}" Sandbox SDK function`);
35
+ }
36
+ return { result, error: true };
37
+ }
38
+ };
39
+ }
40
+ function getCircularReplacer() {
41
+ const seen = new WeakSet();
42
+ return (_key, value) => {
43
+ if (value !== null && typeof value === 'object') {
44
+ if (seen.has(value)) {
45
+ return '[Circular]';
46
+ }
47
+ seen.add(value);
15
48
  }
49
+ return value;
16
50
  };
17
51
  }
@@ -1 +1,3 @@
1
- export declare const syncExtensions: () => Promise<void>;
1
+ export declare const syncExtensions: (options?: {
2
+ force: boolean;
3
+ }) => Promise<void>;
@@ -7,56 +7,64 @@ import { dirname, join, relative, resolve, sep } from 'node:path';
7
7
  import { pipeline } from 'node:stream/promises';
8
8
  import Queue from 'p-queue';
9
9
  import { useBus } from '../../bus/index.js';
10
+ import { useLock } from '../../lock/index.js';
10
11
  import { useLogger } from '../../logger.js';
11
12
  import { getStorage } from '../../storage/index.js';
12
13
  import { getExtensionsPath } from './get-extensions-path.js';
13
14
  import { SyncStatus, getSyncStatus, setSyncStatus } from './sync-status.js';
14
- export const syncExtensions = async () => {
15
+ export const syncExtensions = async (options) => {
16
+ const lock = useLock();
17
+ const messenger = useBus();
15
18
  const env = useEnv();
16
19
  const logger = useLogger();
17
- const extensionsPath = getExtensionsPath();
18
- const storageExtensionsPath = env['EXTENSIONS_PATH'];
19
- const messenger = useBus();
20
- const isPrimaryProcess = String(process.env['NODE_APP_INSTANCE']) === '0' || process.env['NODE_APP_INSTANCE'] === undefined;
21
- const id = await mid.machineId();
22
- const message = `extensions-sync/${id}`;
23
- if (isPrimaryProcess === false) {
20
+ if (!options?.force) {
24
21
  const isDone = (await getSyncStatus()) === SyncStatus.DONE;
25
22
  if (isDone)
26
23
  return;
24
+ }
25
+ const machineId = await mid.machineId();
26
+ const machineKey = `extensions-sync/${machineId}`;
27
+ const processId = await lock.increment(machineKey);
28
+ const currentProcessShouldHandleSync = processId === 1;
29
+ if (currentProcessShouldHandleSync === false) {
27
30
  logger.trace('Extensions already being synced to this machine from another process.');
28
- /**
29
- * Wait until the process that called the lock publishes a message that the syncing is complete
30
- */
31
+ // Wait until the process that called the lock publishes a message that the syncing is complete
31
32
  return new Promise((resolve) => {
32
- messenger.subscribe(message, () => resolve());
33
+ messenger.subscribe(machineKey, () => resolve());
33
34
  });
34
35
  }
35
- if (await exists(extensionsPath)) {
36
- // In case the FS still contains the cached extensions from a previous invocation. We have to
37
- // clear them out to ensure the remote extensions folder remains the source of truth for all
38
- // extensions that are loaded.
39
- await rm(extensionsPath, { recursive: true, force: true });
36
+ try {
37
+ const extensionsPath = getExtensionsPath();
38
+ const storageExtensionsPath = env['EXTENSIONS_PATH'];
39
+ if (await exists(extensionsPath)) {
40
+ // In case the FS still contains the cached extensions from a previous invocation. We have to
41
+ // clear them out to ensure the remote extensions folder remains the source of truth for all
42
+ // extensions that are loaded.
43
+ await rm(extensionsPath, { recursive: true, force: true });
44
+ }
45
+ // Ensure that the local extensions cache path exists
46
+ await mkdir(extensionsPath, { recursive: true });
47
+ await setSyncStatus(SyncStatus.SYNCING);
48
+ logger.trace('Syncing extensions from configured storage location...');
49
+ const storage = await getStorage();
50
+ const disk = storage.location(env['EXTENSIONS_LOCATION']);
51
+ // Make sure we don't overload the file handles
52
+ const queue = new Queue({ concurrency: 1000 });
53
+ for await (const filepath of disk.list(storageExtensionsPath)) {
54
+ const readStream = await disk.read(filepath);
55
+ // We want files to be stored in the root of `$TEMP_PATH/extensions`, so gotta remove the
56
+ // extensions path on disk from the start of the file path
57
+ const destPath = join(extensionsPath, relative(resolve(sep, storageExtensionsPath), resolve(sep, filepath)));
58
+ // Ensure that the directory path exists
59
+ await mkdir(dirname(destPath), { recursive: true });
60
+ const writeStream = createWriteStream(destPath);
61
+ queue.add(() => pipeline(readStream, writeStream));
62
+ }
63
+ await queue.onIdle();
64
+ await setSyncStatus(SyncStatus.DONE);
65
+ messenger.publish(machineKey, { ready: true });
40
66
  }
41
- // Ensure that the local extensions cache path exists
42
- await mkdir(extensionsPath, { recursive: true });
43
- await setSyncStatus(SyncStatus.SYNCING);
44
- logger.trace('Syncing extensions from configured storage location...');
45
- const storage = await getStorage();
46
- const disk = storage.location(env['EXTENSIONS_LOCATION']);
47
- // Make sure we don't overload the file handles
48
- const queue = new Queue({ concurrency: 1000 });
49
- for await (const filepath of disk.list(storageExtensionsPath)) {
50
- const readStream = await disk.read(filepath);
51
- // We want files to be stored in the root of `$TEMP_PATH/extensions`, so gotta remove the
52
- // extensions path on disk from the start of the file path
53
- const destPath = join(extensionsPath, relative(resolve(sep, storageExtensionsPath), resolve(sep, filepath)));
54
- // Ensure that the directory path exists
55
- await mkdir(dirname(destPath), { recursive: true });
56
- const writeStream = createWriteStream(destPath);
57
- queue.add(() => pipeline(readStream, writeStream));
67
+ finally {
68
+ await lock.delete(machineKey);
58
69
  }
59
- await queue.onIdle();
60
- await setSyncStatus(SyncStatus.DONE);
61
- messenger.publish(message, { ready: true });
62
70
  };
@@ -92,7 +92,9 @@ export declare class ExtensionManager {
92
92
  /**
93
93
  * Reload all the extensions. Will unload if extensions have already been loaded
94
94
  */
95
- reload(): Promise<unknown>;
95
+ reload(options?: {
96
+ forceSync: boolean;
97
+ }): Promise<unknown>;
96
98
  /**
97
99
  * Return the previously generated app extensions bundle
98
100
  */
@@ -2,7 +2,7 @@ import { useEnv } from '@directus/env';
2
2
  import { APP_SHARED_DEPS, HYBRID_EXTENSION_TYPES } from '@directus/extensions';
3
3
  import { generateExtensionsEntrypoint } from '@directus/extensions/node';
4
4
  import { isTypeIn, toBoolean } from '@directus/utils';
5
- import { getNodeEnv, pathToRelativeUrl, processId } from '@directus/utils/node';
5
+ import { pathToRelativeUrl, processId } from '@directus/utils/node';
6
6
  import aliasDefault from '@rollup/plugin-alias';
7
7
  import nodeResolveDefault from '@rollup/plugin-node-resolve';
8
8
  import virtualDefault from '@rollup/plugin-virtual';
@@ -45,7 +45,7 @@ const __dirname = dirname(fileURLToPath(import.meta.url));
45
45
  const env = useEnv();
46
46
  const defaultOptions = {
47
47
  schedule: true,
48
- watch: env['EXTENSIONS_AUTO_RELOAD'] && getNodeEnv() !== 'development',
48
+ watch: env['EXTENSIONS_AUTO_RELOAD'],
49
49
  };
50
50
  export class ExtensionManager {
51
51
  options = defaultOptions;
@@ -170,22 +170,22 @@ export class ExtensionManager {
170
170
  */
171
171
  async install(versionId) {
172
172
  await this.installationManager.install(versionId);
173
- await this.reload();
173
+ await this.reload({ forceSync: true });
174
174
  await this.messenger.publish(this.reloadChannel, { origin: this.processId });
175
175
  }
176
176
  async uninstall(folder) {
177
177
  await this.installationManager.uninstall(folder);
178
- await this.reload();
178
+ await this.reload({ forceSync: true });
179
179
  await this.messenger.publish(this.reloadChannel, { origin: this.processId });
180
180
  }
181
181
  /**
182
182
  * Load all extensions from disk and register them in their respective places
183
183
  */
184
- async load() {
184
+ async load(options) {
185
185
  const logger = useLogger();
186
186
  if (env['EXTENSIONS_LOCATION']) {
187
187
  try {
188
- await syncExtensions();
188
+ await syncExtensions({ force: options?.forceSync ?? false });
189
189
  }
190
190
  catch (error) {
191
191
  logger.error(`Failed to sync extensions`);
@@ -221,7 +221,7 @@ export class ExtensionManager {
221
221
  /**
222
222
  * Reload all the extensions. Will unload if extensions have already been loaded
223
223
  */
224
- reload() {
224
+ reload(options) {
225
225
  if (this.reloadQueue.size > 0) {
226
226
  // The pending job in the queue will already handle the additional changes
227
227
  return Promise.resolve();
@@ -237,7 +237,7 @@ export class ExtensionManager {
237
237
  if (this.isLoaded) {
238
238
  const prevExtensions = clone(this.extensions);
239
239
  await this.unload();
240
- await this.load();
240
+ await this.load(options);
241
241
  logger.info('Extensions reloaded');
242
242
  const added = this.extensions.filter((extension) => !prevExtensions.some((prevExtension) => extension.path === prevExtension.path));
243
243
  const removed = prevExtensions.filter((prevExtension) => !this.extensions.some((extension) => prevExtension.path === extension.path));
@@ -294,7 +294,8 @@ export class ExtensionManager {
294
294
  logger.info('Watching extensions for changes...');
295
295
  const extensionDirUrl = pathToRelativeUrl(getExtensionsPath());
296
296
  this.watcher = chokidar.watch([path.resolve('package.json'), path.posix.join(extensionDirUrl, '*', 'package.json')], {
297
- ignoreInitial: true, // dotdirs are watched by default and frequently found in 'node_modules'
297
+ ignoreInitial: true,
298
+ // dotdirs are watched by default and frequently found in 'node_modules'
298
299
  ignored: `${extensionDirUrl}/**/node_modules/**`,
299
300
  // on macOS dotdirs in linked extensions are watched too
300
301
  followSymlinks: os.platform() === 'darwin' ? false : true,
@@ -318,20 +319,20 @@ export class ExtensionManager {
318
319
  * removed
319
320
  */
320
321
  updateWatchedExtensions(added, removed = []) {
321
- if (this.watcher) {
322
- const toPackageExtensionPaths = (extensions) => extensions
323
- .filter((extension) => !extension.local || extension.type === 'bundle')
324
- .flatMap((extension) => isTypeIn(extension, HYBRID_EXTENSION_TYPES) || extension.type === 'bundle'
325
- ? [
326
- path.resolve(extension.path, extension.entrypoint.app),
327
- path.resolve(extension.path, extension.entrypoint.api),
328
- ]
329
- : path.resolve(extension.path, extension.entrypoint));
330
- const addedPackageExtensionPaths = toPackageExtensionPaths(added);
331
- const removedPackageExtensionPaths = toPackageExtensionPaths(removed);
332
- this.watcher.add(addedPackageExtensionPaths);
333
- this.watcher.unwatch(removedPackageExtensionPaths);
334
- }
322
+ if (!this.watcher)
323
+ return;
324
+ const extensionDir = path.resolve(getExtensionsPath());
325
+ const registryDir = path.join(extensionDir, '.registry');
326
+ const toPackageExtensionPaths = (extensions) => extensions
327
+ .filter((extension) => extension.local && extension.path.startsWith(extensionDir) && !extension.path.startsWith(registryDir))
328
+ .flatMap((extension) => isTypeIn(extension, HYBRID_EXTENSION_TYPES) || extension.type === 'bundle'
329
+ ? [
330
+ path.resolve(extension.path, extension.entrypoint.app),
331
+ path.resolve(extension.path, extension.entrypoint.api),
332
+ ]
333
+ : path.resolve(extension.path, extension.entrypoint));
334
+ this.watcher.add(toPackageExtensionPaths(added));
335
+ this.watcher.unwatch(toPackageExtensionPaths(removed));
335
336
  }
336
337
  /**
337
338
  * Uses rollup to bundle the app extensions together into a single file the app can download and
@@ -399,7 +400,8 @@ export class ExtensionManager {
399
400
  });
400
401
  this.unregisterFunctionMap.set(extension.name, async () => {
401
402
  await unregisterFunction();
402
- isolate.dispose();
403
+ if (!isolate.isDisposed)
404
+ isolate.dispose();
403
405
  });
404
406
  }
405
407
  async registerApiExtensions() {
package/dist/flows.js CHANGED
@@ -17,7 +17,6 @@ import { getSchema } from './utils/get-schema.js';
17
17
  import { JobQueue } from './utils/job-queue.js';
18
18
  import { mapValuesDeep } from './utils/map-values-deep.js';
19
19
  import { redactObject } from './utils/redact-object.js';
20
- import { sanitizeError } from './utils/sanitize-error.js';
21
20
  import { scheduleSynchronizedJob, validateCron } from './utils/schedule.js';
22
21
  import { isSystemCollection } from '@directus/system-data';
23
22
  let flowManager;
@@ -341,8 +340,9 @@ class FlowManager {
341
340
  catch (error) {
342
341
  let data;
343
342
  if (error instanceof Error) {
344
- // make sure we don't expose the stack trace
345
- data = sanitizeError(error);
343
+ // Don't expose the stack trace to the next operation
344
+ delete error.stack;
345
+ data = error;
346
346
  }
347
347
  else if (typeof error === 'string') {
348
348
  // If the error is a JSON string, parse it and use that as the error data
@@ -0,0 +1 @@
1
+ export * from './lib/use-lock.js';
@@ -0,0 +1 @@
1
+ export * from './lib/use-lock.js';
@@ -0,0 +1,8 @@
1
+ import { type Kv } from '@directus/memory';
2
+ export declare const _cache: {
3
+ lock: Kv | undefined;
4
+ };
5
+ /**
6
+ * Returns globally shared lock kv instance.
7
+ */
8
+ export declare const useLock: () => Kv;
@@ -0,0 +1,20 @@
1
+ import { createKv } from '@directus/memory';
2
+ import { redisConfigAvailable, useRedis } from '../../redis/index.js';
3
+ export const _cache = {
4
+ lock: undefined,
5
+ };
6
+ /**
7
+ * Returns globally shared lock kv instance.
8
+ */
9
+ export const useLock = () => {
10
+ if (_cache.lock) {
11
+ return _cache.lock;
12
+ }
13
+ if (redisConfigAvailable()) {
14
+ _cache.lock = createKv({ type: 'redis', redis: useRedis(), namespace: 'directus:lock' });
15
+ }
16
+ else {
17
+ _cache.lock = createKv({ type: 'local' });
18
+ }
19
+ return _cache.lock;
20
+ };
@@ -1,6 +1,8 @@
1
1
  import { defineOperationApi } from '@directus/extensions';
2
2
  import { MailService } from '../../services/mail/index.js';
3
3
  import { md } from '../../utils/md.js';
4
+ import { useLogger } from '../../logger.js';
5
+ const logger = useLogger();
4
6
  export default defineOperationApi({
5
7
  id: 'mail',
6
8
  handler: async ({ body, template, data, to, type, subject }, { accountability, database, getSchema }) => {
@@ -16,6 +18,8 @@ export default defineOperationApi({
16
18
  else {
17
19
  mailObject.html = type === 'wysiwyg' ? safeBody : md(safeBody);
18
20
  }
19
- await mailService.send(mailObject);
21
+ mailService.send(mailObject).catch((error) => {
22
+ logger.error(error, 'Could not send mail in "mail" operation');
23
+ });
20
24
  },
21
25
  });
@@ -14,7 +14,7 @@ export declare class MailService {
14
14
  knex: Knex;
15
15
  mailer: Transporter;
16
16
  constructor(opts: AbstractServiceOptions);
17
- send(options: EmailOptions): Promise<void>;
17
+ send<T>(options: EmailOptions): Promise<T>;
18
18
  private renderTemplate;
19
19
  private getDefaultTemplateData;
20
20
  }
@@ -5,7 +5,6 @@ import { Liquid } from 'liquidjs';
5
5
  import path from 'path';
6
6
  import { fileURLToPath } from 'url';
7
7
  import getDatabase from '../../database/index.js';
8
- import { getExtensionsPath } from '../../extensions/lib/get-extensions-path.js';
9
8
  import { useLogger } from '../../logger.js';
10
9
  import getMailer from '../../mailer.js';
11
10
  import { Url } from '../../utils/url.js';
@@ -13,7 +12,7 @@ const env = useEnv();
13
12
  const logger = useLogger();
14
13
  const __dirname = path.dirname(fileURLToPath(import.meta.url));
15
14
  const liquidEngine = new Liquid({
16
- root: [path.resolve(getExtensionsPath(), 'templates'), path.resolve(__dirname, 'templates')],
15
+ root: [path.resolve(env['EMAIL_TEMPLATES_PATH']), path.resolve(__dirname, 'templates')],
17
16
  extname: '.liquid',
18
17
  });
19
18
  export class MailService {
@@ -55,13 +54,11 @@ export class MailService {
55
54
  .map((line) => line.trim())
56
55
  .join('\n');
57
56
  }
58
- this.mailer.sendMail({ ...emailOptions, from, html }).catch((error) => {
59
- logger.warn(`Email send failed:`);
60
- logger.warn(error);
61
- });
57
+ const info = await this.mailer.sendMail({ ...emailOptions, from, html });
58
+ return info;
62
59
  }
63
60
  async renderTemplate(template, variables) {
64
- const customTemplatePath = path.resolve(getExtensionsPath(), 'templates', template + '.liquid');
61
+ const customTemplatePath = path.resolve(env['EMAIL_TEMPLATES_PATH'], template + '.liquid');
65
62
  const systemTemplatePath = path.join(__dirname, 'templates', template + '.liquid');
66
63
  const templatePath = (await fse.pathExists(customTemplatePath)) ? customTemplatePath : systemTemplatePath;
67
64
  if ((await fse.pathExists(templatePath)) === false) {
@@ -37,19 +37,18 @@ export class NotificationsService extends ItemsService {
37
37
  .toString();
38
38
  const html = data.message ? md(data.message) : '';
39
39
  if (user['email'] && user['email_notifications'] === true) {
40
- try {
41
- await this.mailService.send({
42
- template: {
43
- name: 'base',
44
- data: user['role']?.app_access ? { url: manageUserAccountUrl, html } : { html },
45
- },
46
- to: user['email'],
47
- subject: data.subject,
48
- });
49
- }
50
- catch (error) {
51
- logger.error(error.message);
52
- }
40
+ this.mailService
41
+ .send({
42
+ template: {
43
+ name: 'base',
44
+ data: user['role']?.app_access ? { url: manageUserAccountUrl, html } : { html },
45
+ },
46
+ to: user['email'],
47
+ subject: data.subject,
48
+ })
49
+ .catch((error) => {
50
+ logger.error(error, `Could not send notification via mail`);
51
+ });
53
52
  }
54
53
  }
55
54
  }
@@ -10,7 +10,9 @@ import { AuthorizationService } from './authorization.js';
10
10
  import { ItemsService } from './items.js';
11
11
  import { MailService } from './mail/index.js';
12
12
  import { UsersService } from './users.js';
13
+ import { useLogger } from '../logger.js';
13
14
  const env = useEnv();
15
+ const logger = useLogger();
14
16
  export class SharesService extends ItemsService {
15
17
  authorizationService;
16
18
  constructor(options) {
@@ -119,7 +121,8 @@ ${userName(userInfo)} has invited you to view an item in ${share['collection']}.
119
121
  [Open](${new Url(env['PUBLIC_URL']).addPath('admin', 'shared', payload.share).toString()})
120
122
  `;
121
123
  for (const email of payload.emails) {
122
- await mailService.send({
124
+ mailService
125
+ .send({
123
126
  template: {
124
127
  name: 'base',
125
128
  data: {
@@ -128,6 +131,9 @@ ${userName(userInfo)} has invited you to view an item in ${share['collection']}.
128
131
  },
129
132
  to: email,
130
133
  subject: `${userName(userInfo)} has shared an item with you`,
134
+ })
135
+ .catch((error) => {
136
+ logger.error(error, `Could not send share notification mail`);
131
137
  });
132
138
  }
133
139
  }
@@ -14,7 +14,9 @@ import { Url } from '../utils/url.js';
14
14
  import { ItemsService } from './items.js';
15
15
  import { MailService } from './mail/index.js';
16
16
  import { SettingsService } from './settings.js';
17
+ import { useLogger } from '../logger.js';
17
18
  const env = useEnv();
19
+ const logger = useLogger();
18
20
  export class UsersService extends ItemsService {
19
21
  constructor(options) {
20
22
  super('directus_users', options);
@@ -335,7 +337,8 @@ export class UsersService extends ItemsService {
335
337
  // Send invite for new and already invited users
336
338
  if (isEmpty(user) || user.status === 'invited') {
337
339
  const subjectLine = subject ?? "You've been invited";
338
- await mailService.send({
340
+ mailService
341
+ .send({
339
342
  to: user?.email ?? email,
340
343
  subject: subjectLine,
341
344
  template: {
@@ -345,6 +348,9 @@ export class UsersService extends ItemsService {
345
348
  email: user?.email ?? email,
346
349
  },
347
350
  },
351
+ })
352
+ .catch((error) => {
353
+ logger.error(error, `Could not send user invitation mail`);
348
354
  });
349
355
  }
350
356
  }
@@ -386,7 +392,8 @@ export class UsersService extends ItemsService {
386
392
  ? new Url(url).setQuery('token', token).toString()
387
393
  : new Url(env['PUBLIC_URL']).addPath('admin', 'reset-password').setQuery('token', token).toString();
388
394
  const subjectLine = subject ? subject : 'Password Reset Request';
389
- await mailService.send({
395
+ mailService
396
+ .send({
390
397
  to: user.email,
391
398
  subject: subjectLine,
392
399
  template: {
@@ -396,6 +403,9 @@ export class UsersService extends ItemsService {
396
403
  email: user.email,
397
404
  },
398
405
  },
406
+ })
407
+ .catch((error) => {
408
+ logger.error(error, `Could not send password reset mail`);
399
409
  });
400
410
  await stall(STALL_TIME, timeStart);
401
411
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@directus/api",
3
- "version": "18.0.0",
3
+ "version": "18.1.1",
4
4
  "description": "Directus is a real-time API and App dashboard for managing SQL database content",
5
5
  "keywords": [
6
6
  "directus",
@@ -59,7 +59,7 @@
59
59
  ],
60
60
  "dependencies": {
61
61
  "@authenio/samlify-node-xmllint": "2.0.0",
62
- "@aws-sdk/client-ses": "3.525.0",
62
+ "@aws-sdk/client-ses": "3.529.0",
63
63
  "@directus/format-title": "10.1.0",
64
64
  "@godaddy/terminus": "4.12.1",
65
65
  "@rollup/plugin-alias": "5.1.0",
@@ -120,7 +120,7 @@
120
120
  "nodemailer": "6.9.11",
121
121
  "object-hash": "3.0.0",
122
122
  "openapi3-ts": "4.2.2",
123
- "openid-client": "5.6.4",
123
+ "openid-client": "5.6.5",
124
124
  "ora": "8.0.1",
125
125
  "otplib": "12.0.1",
126
126
  "p-limit": "5.0.0",
@@ -130,7 +130,7 @@
130
130
  "pino-http": "9.0.0",
131
131
  "pino-http-print": "3.1.0",
132
132
  "pino-pretty": "10.3.1",
133
- "qs": "6.11.2",
133
+ "qs": "6.12.0",
134
134
  "rate-limiter-flexible": "5.0.0",
135
135
  "rollup": "4.12.0",
136
136
  "samlify": "2.8.11",
@@ -144,28 +144,28 @@
144
144
  "ws": "8.16.0",
145
145
  "zod": "3.22.4",
146
146
  "zod-validation-error": "3.0.3",
147
- "@directus/app": "11.0.0",
148
- "@directus/constants": "11.0.3",
149
- "@directus/env": "1.0.3",
147
+ "@directus/app": "11.0.2",
150
148
  "@directus/errors": "0.2.4",
151
- "@directus/extensions-registry": "1.0.0",
152
- "@directus/extensions": "1.0.0",
153
- "@directus/extensions-sdk": "11.0.0",
154
- "@directus/memory": "1.0.4",
149
+ "@directus/constants": "11.0.3",
150
+ "@directus/extensions": "1.0.1",
151
+ "@directus/extensions-sdk": "11.0.1",
152
+ "@directus/extensions-registry": "1.0.1",
155
153
  "@directus/pressure": "1.0.17",
154
+ "@directus/memory": "1.0.5",
156
155
  "@directus/schema": "11.0.1",
157
- "@directus/specs": "10.2.7",
158
156
  "@directus/storage": "10.0.11",
157
+ "@directus/specs": "10.2.7",
159
158
  "@directus/storage-driver-azure": "10.0.18",
160
- "@directus/storage-driver-cloudinary": "10.0.18",
159
+ "@directus/env": "1.0.4",
161
160
  "@directus/storage-driver-gcs": "10.0.18",
162
- "@directus/storage-driver-s3": "10.0.19",
161
+ "@directus/storage-driver-cloudinary": "10.0.18",
163
162
  "@directus/storage-driver-local": "10.0.18",
164
- "@directus/storage-driver-supabase": "1.0.10",
163
+ "@directus/storage-driver-s3": "10.0.19",
165
164
  "@directus/system-data": "1.0.1",
166
165
  "@directus/utils": "11.0.6",
167
- "directus": "10.10.0",
168
- "@directus/validation": "0.0.13"
166
+ "@directus/storage-driver-supabase": "1.0.10",
167
+ "@directus/validation": "0.0.13",
168
+ "directus": "10.10.2"
169
169
  },
170
170
  "devDependencies": {
171
171
  "@ngneat/falso": "7.2.0",
@@ -227,7 +227,7 @@
227
227
  "scripts": {
228
228
  "build": "tsc --project tsconfig.prod.json && copyfiles \"src/**/*.{yaml,liquid}\" -u 1 dist",
229
229
  "cli": "NODE_ENV=development SERVE_APP=false tsx src/cli/run.ts",
230
- "dev": "NODE_ENV=development SERVE_APP=true tsx watch --clear-screen=false src/start.ts",
230
+ "dev": "NODE_ENV=development SERVE_APP=true tsx watch --ignore extensions --clear-screen=false src/start.ts",
231
231
  "test": "vitest --watch=false"
232
232
  }
233
233
  }
@@ -1 +0,0 @@
1
- export declare function sanitizeError<T extends Error>(error: T): T;
@@ -1,7 +0,0 @@
1
- export function sanitizeError(error) {
2
- // clear the stack
3
- if (error.stack !== undefined) {
4
- delete error.stack;
5
- }
6
- return error;
7
- }