@directus/api 23.1.3 → 23.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/dist/app.js +7 -4
  2. package/dist/auth/drivers/openid.js +1 -1
  3. package/dist/controllers/activity.js +2 -88
  4. package/dist/controllers/comments.js +0 -7
  5. package/dist/controllers/tus.d.ts +0 -1
  6. package/dist/controllers/tus.js +0 -16
  7. package/dist/controllers/versions.js +1 -8
  8. package/dist/database/migrations/20240909A-separate-comments.js +1 -6
  9. package/dist/database/migrations/20240924A-migrate-legacy-comments.d.ts +3 -0
  10. package/dist/database/migrations/20240924A-migrate-legacy-comments.js +59 -0
  11. package/dist/database/migrations/20240924B-populate-versioning-deltas.d.ts +3 -0
  12. package/dist/database/migrations/20240924B-populate-versioning-deltas.js +32 -0
  13. package/dist/database/run-ast/utils/apply-parent-filters.js +4 -0
  14. package/dist/schedules/retention.d.ts +14 -0
  15. package/dist/schedules/retention.js +96 -0
  16. package/dist/{telemetry/lib/init-telemetry.d.ts → schedules/telemetry.d.ts} +2 -2
  17. package/dist/{telemetry/lib/init-telemetry.js → schedules/telemetry.js} +6 -6
  18. package/dist/schedules/tus.d.ts +6 -0
  19. package/dist/schedules/tus.js +23 -0
  20. package/dist/services/assets.js +4 -3
  21. package/dist/services/comments.d.ts +4 -22
  22. package/dist/services/comments.js +16 -252
  23. package/dist/services/graphql/index.d.ts +1 -2
  24. package/dist/services/graphql/index.js +1 -75
  25. package/dist/services/specifications.js +12 -1
  26. package/dist/services/users.js +1 -1
  27. package/dist/services/versions.d.ts +0 -1
  28. package/dist/services/versions.js +9 -29
  29. package/dist/telemetry/index.d.ts +0 -1
  30. package/dist/telemetry/index.js +0 -1
  31. package/dist/utils/apply-diff.js +15 -3
  32. package/dist/utils/get-service.js +1 -1
  33. package/dist/utils/get-snapshot-diff.js +17 -1
  34. package/dist/websocket/controllers/base.js +2 -1
  35. package/dist/websocket/controllers/graphql.js +2 -1
  36. package/package.json +18 -18
package/dist/app.js CHANGED
@@ -38,11 +38,14 @@ import serverRouter from './controllers/server.js';
38
38
  import settingsRouter from './controllers/settings.js';
39
39
  import sharesRouter from './controllers/shares.js';
40
40
  import translationsRouter from './controllers/translations.js';
41
- import { default as tusRouter, scheduleTusCleanup } from './controllers/tus.js';
41
+ import tusRouter from './controllers/tus.js';
42
42
  import usersRouter from './controllers/users.js';
43
43
  import utilsRouter from './controllers/utils.js';
44
44
  import versionsRouter from './controllers/versions.js';
45
45
  import webhooksRouter from './controllers/webhooks.js';
46
+ import retentionSchedule from './schedules/retention.js';
47
+ import telemetrySchedule from './schedules/telemetry.js';
48
+ import tusSchedule from './schedules/tus.js';
46
49
  import { isInstalled, validateDatabaseConnection, validateDatabaseExtensions, validateMigrations, } from './database/index.js';
47
50
  import emitter from './emitter.js';
48
51
  import { getExtensionManager } from './extensions/index.js';
@@ -57,7 +60,6 @@ import rateLimiterGlobal from './middleware/rate-limiter-global.js';
57
60
  import rateLimiter from './middleware/rate-limiter-ip.js';
58
61
  import sanitizeQuery from './middleware/sanitize-query.js';
59
62
  import schema from './middleware/schema.js';
60
- import { initTelemetry } from './telemetry/index.js';
61
63
  import { getConfigFromEnv } from './utils/get-config-from-env.js';
62
64
  import { Url } from './utils/url.js';
63
65
  import { validateStorage } from './utils/validate-storage.js';
@@ -246,8 +248,9 @@ export default async function createApp() {
246
248
  app.use(notFoundHandler);
247
249
  app.use(errorHandler);
248
250
  await emitter.emitInit('routes.after', { app });
249
- initTelemetry();
250
- scheduleTusCleanup();
251
+ await retentionSchedule();
252
+ await telemetrySchedule();
253
+ await tusSchedule();
251
254
  await emitter.emitInit('app.after', { app });
252
255
  return app;
253
256
  }
@@ -259,7 +259,7 @@ export function createOpenIDAuthRouter(providerName) {
259
259
  throw new InvalidPayloadError({ reason: `URL "${redirect}" can't be used to redirect after login` });
260
260
  }
261
261
  const token = jwt.sign({ verifier: codeVerifier, redirect, prompt }, getSecret(), {
262
- expiresIn: '5m',
262
+ expiresIn: (env[`AUTH_${providerName.toUpperCase()}_LOGIN_TIMEOUT`] ?? '5m'),
263
263
  issuer: 'directus',
264
264
  });
265
265
  res.cookie(`openid.${providerName}`, token, {
@@ -1,11 +1,8 @@
1
- import { ErrorCode, InvalidPayloadError, isDirectusError } from '@directus/errors';
2
1
  import express from 'express';
3
- import Joi from 'joi';
4
2
  import { respond } from '../middleware/respond.js';
5
3
  import useCollection from '../middleware/use-collection.js';
6
4
  import { validateBatch } from '../middleware/validate-batch.js';
7
5
  import { ActivityService } from '../services/activity.js';
8
- import { CommentsService } from '../services/comments.js';
9
6
  import { MetaService } from '../services/meta.js';
10
7
  import asyncHandler from '../utils/async-handler.js';
11
8
  const router = express.Router();
@@ -20,7 +17,6 @@ const readHandler = asyncHandler(async (req, res, next) => {
20
17
  schema: req.schema,
21
18
  });
22
19
  let result;
23
- let isComment;
24
20
  if (req.singleton) {
25
21
  result = await service.readSingleton(req.sanitizedQuery);
26
22
  }
@@ -28,24 +24,9 @@ const readHandler = asyncHandler(async (req, res, next) => {
28
24
  result = await service.readMany(req.body.keys, req.sanitizedQuery);
29
25
  }
30
26
  else {
31
- const sanitizedFilter = req.sanitizedQuery.filter;
32
- if (sanitizedFilter &&
33
- '_and' in sanitizedFilter &&
34
- Array.isArray(sanitizedFilter['_and']) &&
35
- sanitizedFilter['_and'].find((andItem) => 'action' in andItem && '_eq' in andItem['action'] && andItem['action']['_eq'] === 'comment')) {
36
- const commentsService = new CommentsService({
37
- accountability: req.accountability,
38
- schema: req.schema,
39
- serviceOrigin: 'activity',
40
- });
41
- result = await commentsService.readByQuery(req.sanitizedQuery);
42
- isComment = true;
43
- }
44
- else {
45
- result = await service.readByQuery(req.sanitizedQuery);
46
- }
27
+ result = await service.readByQuery(req.sanitizedQuery);
47
28
  }
48
- const meta = await metaService.getMetaForQuery(isComment ? 'directus_comments' : 'directus_activity', req.sanitizedQuery);
29
+ const meta = await metaService.getMetaForQuery('directus_activity', req.sanitizedQuery);
49
30
  res.locals['payload'] = {
50
31
  data: result,
51
32
  meta,
@@ -65,71 +46,4 @@ router.get('/:pk', asyncHandler(async (req, res, next) => {
65
46
  };
66
47
  return next();
67
48
  }), respond);
68
- const createCommentSchema = Joi.object({
69
- comment: Joi.string().required(),
70
- collection: Joi.string().required(),
71
- item: [Joi.number().required(), Joi.string().required()],
72
- });
73
- router.post('/comment', asyncHandler(async (req, res, next) => {
74
- const service = new CommentsService({
75
- accountability: req.accountability,
76
- schema: req.schema,
77
- serviceOrigin: 'activity',
78
- });
79
- const { error } = createCommentSchema.validate(req.body);
80
- if (error) {
81
- throw new InvalidPayloadError({ reason: error.message });
82
- }
83
- const primaryKey = await service.createOne(req.body);
84
- try {
85
- const record = await service.readOne(primaryKey, req.sanitizedQuery);
86
- res.locals['payload'] = {
87
- data: record || null,
88
- };
89
- }
90
- catch (error) {
91
- if (isDirectusError(error, ErrorCode.Forbidden)) {
92
- return next();
93
- }
94
- throw error;
95
- }
96
- return next();
97
- }), respond);
98
- const updateCommentSchema = Joi.object({
99
- comment: Joi.string().required(),
100
- });
101
- router.patch('/comment/:pk', asyncHandler(async (req, res, next) => {
102
- const commentsService = new CommentsService({
103
- accountability: req.accountability,
104
- schema: req.schema,
105
- serviceOrigin: 'activity',
106
- });
107
- const { error } = updateCommentSchema.validate(req.body);
108
- if (error) {
109
- throw new InvalidPayloadError({ reason: error.message });
110
- }
111
- const primaryKey = await commentsService.updateOne(req.params['pk'], req.body);
112
- try {
113
- const record = await commentsService.readOne(primaryKey, req.sanitizedQuery);
114
- res.locals['payload'] = {
115
- data: record || null,
116
- };
117
- }
118
- catch (error) {
119
- if (isDirectusError(error, ErrorCode.Forbidden)) {
120
- return next();
121
- }
122
- throw error;
123
- }
124
- return next();
125
- }), respond);
126
- router.delete('/comment/:pk', asyncHandler(async (req, _res, next) => {
127
- const commentsService = new CommentsService({
128
- accountability: req.accountability,
129
- schema: req.schema,
130
- serviceOrigin: 'activity',
131
- });
132
- await commentsService.deleteOne(req.params['pk']);
133
- return next();
134
- }), respond);
135
49
  export default router;
@@ -13,7 +13,6 @@ router.post('/', asyncHandler(async (req, res, next) => {
13
13
  const service = new CommentsService({
14
14
  accountability: req.accountability,
15
15
  schema: req.schema,
16
- serviceOrigin: 'comments',
17
16
  });
18
17
  const savedKeys = [];
19
18
  if (Array.isArray(req.body)) {
@@ -46,7 +45,6 @@ const readHandler = asyncHandler(async (req, res, next) => {
46
45
  const service = new CommentsService({
47
46
  accountability: req.accountability,
48
47
  schema: req.schema,
49
- serviceOrigin: 'comments',
50
48
  });
51
49
  const metaService = new MetaService({
52
50
  accountability: req.accountability,
@@ -69,7 +67,6 @@ router.get('/:pk', asyncHandler(async (req, res, next) => {
69
67
  const service = new CommentsService({
70
68
  accountability: req.accountability,
71
69
  schema: req.schema,
72
- serviceOrigin: 'comments',
73
70
  });
74
71
  const record = await service.readOne(req.params['pk'], req.sanitizedQuery);
75
72
  res.locals['payload'] = { data: record || null };
@@ -79,7 +76,6 @@ router.patch('/', validateBatch('update'), asyncHandler(async (req, res, next) =
79
76
  const service = new CommentsService({
80
77
  accountability: req.accountability,
81
78
  schema: req.schema,
82
- serviceOrigin: 'comments',
83
79
  });
84
80
  let keys = [];
85
81
  if (Array.isArray(req.body)) {
@@ -108,7 +104,6 @@ router.patch('/:pk', asyncHandler(async (req, res, next) => {
108
104
  const service = new CommentsService({
109
105
  accountability: req.accountability,
110
106
  schema: req.schema,
111
- serviceOrigin: 'comments',
112
107
  });
113
108
  const primaryKey = await service.updateOne(req.params['pk'], req.body);
114
109
  try {
@@ -127,7 +122,6 @@ router.delete('/', validateBatch('delete'), asyncHandler(async (req, _res, next)
127
122
  const service = new CommentsService({
128
123
  accountability: req.accountability,
129
124
  schema: req.schema,
130
- serviceOrigin: 'comments',
131
125
  });
132
126
  if (Array.isArray(req.body)) {
133
127
  await service.deleteMany(req.body);
@@ -145,7 +139,6 @@ router.delete('/:pk', asyncHandler(async (req, _res, next) => {
145
139
  const service = new CommentsService({
146
140
  accountability: req.accountability,
147
141
  schema: req.schema,
148
- serviceOrigin: 'comments',
149
142
  });
150
143
  await service.deleteOne(req.params['pk']);
151
144
  return next();
@@ -1,3 +1,2 @@
1
- export declare function scheduleTusCleanup(): void;
2
1
  declare const router: import("express-serve-static-core").Router;
3
2
  export default router;
@@ -1,11 +1,8 @@
1
1
  import { Router } from 'express';
2
- import { RESUMABLE_UPLOADS } from '../constants.js';
3
2
  import getDatabase from '../database/index.js';
4
3
  import { validateAccess } from '../permissions/modules/validate-access/validate-access.js';
5
4
  import { createTusServer } from '../services/tus/index.js';
6
5
  import asyncHandler from '../utils/async-handler.js';
7
- import { getSchema } from '../utils/get-schema.js';
8
- import { scheduleSynchronizedJob, validateCron } from '../utils/schedule.js';
9
6
  const mapAction = (method) => {
10
7
  switch (method) {
11
8
  case 'POST':
@@ -40,19 +37,6 @@ const handler = asyncHandler(async (req, res) => {
40
37
  await tusServer.handle(req, res);
41
38
  cleanupServer();
42
39
  });
43
- export function scheduleTusCleanup() {
44
- if (!RESUMABLE_UPLOADS.ENABLED)
45
- return;
46
- if (validateCron(RESUMABLE_UPLOADS.SCHEDULE)) {
47
- scheduleSynchronizedJob('tus-cleanup', RESUMABLE_UPLOADS.SCHEDULE, async () => {
48
- const [tusServer, cleanupServer] = await createTusServer({
49
- schema: await getSchema(),
50
- });
51
- await tusServer.cleanUpExpiredUploads();
52
- cleanupServer();
53
- });
54
- }
55
- }
56
40
  const router = Router();
57
41
  router.post('/', checkFileAccess, handler);
58
42
  router.patch('/:id', checkFileAccess, handler);
@@ -154,14 +154,7 @@ router.get('/:pk/compare', asyncHandler(async (req, res, next) => {
154
154
  });
155
155
  const version = await service.readOne(req.params['pk']);
156
156
  const { outdated, mainHash } = await service.verifyHash(version['collection'], version['item'], version['hash']);
157
- let current;
158
- if (version['delta']) {
159
- current = version['delta'];
160
- }
161
- else {
162
- const saves = await service.getVersionSavesById(version['id']);
163
- current = assign({}, ...saves);
164
- }
157
+ const current = assign({}, version['delta']);
165
158
  const main = await service.getMainItem(version['collection'], version['item']);
166
159
  res.locals['payload'] = { data: { outdated, mainHash, current, main } };
167
160
  return next();
@@ -2,12 +2,7 @@ import { Action } from '@directus/constants';
2
2
  export async function up(knex) {
3
3
  await knex.schema.createTable('directus_comments', (table) => {
4
4
  table.uuid('id').primary().notNullable();
5
- table
6
- .string('collection', 64)
7
- .notNullable()
8
- .references('collection')
9
- .inTable('directus_collections')
10
- .onDelete('CASCADE');
5
+ table.string('collection', 64).notNullable();
11
6
  table.string('item').notNullable();
12
7
  table.text('comment').notNullable();
13
8
  table.timestamp('date_created').defaultTo(knex.fn.now());
@@ -0,0 +1,3 @@
1
+ import type { Knex } from 'knex';
2
+ export declare function up(knex: Knex): Promise<void>;
3
+ export declare function down(knex: Knex): Promise<void>;
@@ -0,0 +1,59 @@
1
+ import { Action } from '@directus/constants';
2
+ import { randomUUID } from 'node:crypto';
3
+ export async function up(knex) {
4
+ // remove foreign key constraint for projects already migrated to retentions-p1
5
+ try {
6
+ await knex.schema.alterTable('directus_comments', (table) => {
7
+ table.dropForeign('collection');
8
+ });
9
+ }
10
+ catch {
11
+ // ignore
12
+ }
13
+ const rowsLimit = 50;
14
+ let hasMore = true;
15
+ while (hasMore) {
16
+ const legacyComments = await knex
17
+ .select('*')
18
+ .from('directus_activity')
19
+ .where('action', '=', Action.COMMENT)
20
+ .limit(rowsLimit);
21
+ if (legacyComments.length === 0) {
22
+ hasMore = false;
23
+ break;
24
+ }
25
+ await knex.transaction(async (trx) => {
26
+ for (const legacyComment of legacyComments) {
27
+ let primaryKey;
28
+ // Migrate legacy comment
29
+ if (legacyComment['action'] === Action.COMMENT) {
30
+ primaryKey = randomUUID();
31
+ await trx('directus_comments').insert({
32
+ id: primaryKey,
33
+ collection: legacyComment.collection,
34
+ item: legacyComment.item,
35
+ comment: legacyComment.comment,
36
+ user_created: legacyComment.user,
37
+ date_created: legacyComment.timestamp,
38
+ });
39
+ await trx('directus_activity')
40
+ .update({
41
+ action: Action.CREATE,
42
+ collection: 'directus_comments',
43
+ item: primaryKey,
44
+ comment: null,
45
+ })
46
+ .where('id', '=', legacyComment.id);
47
+ }
48
+ }
49
+ });
50
+ }
51
+ await knex.schema.alterTable('directus_activity', (table) => {
52
+ table.dropColumn('comment');
53
+ });
54
+ }
55
+ export async function down(knex) {
56
+ await knex.schema.alterTable('directus_activity', (table) => {
57
+ table.text('comment');
58
+ });
59
+ }
@@ -0,0 +1,3 @@
1
+ import type { Knex } from 'knex';
2
+ export declare function up(knex: Knex): Promise<void>;
3
+ export declare function down(): Promise<void>;
@@ -0,0 +1,32 @@
1
+ import { parseJSON } from '@directus/utils';
2
+ import { assign } from 'lodash-es';
3
+ export async function up(knex) {
4
+ const rowsLimit = 50;
5
+ let hasMore = true;
6
+ while (hasMore) {
7
+ const missingDeltaVersions = await knex.select('id').from('directus_versions').whereNull('delta').limit(rowsLimit);
8
+ if (missingDeltaVersions.length === 0) {
9
+ hasMore = false;
10
+ break;
11
+ }
12
+ await knex.transaction(async (trx) => {
13
+ for (const missingDeltaVersion of missingDeltaVersions) {
14
+ const revisions = await trx
15
+ .select('delta')
16
+ .from('directus_revisions')
17
+ .where('version', '=', missingDeltaVersion.id)
18
+ .orderBy('id');
19
+ const deltas = revisions.map((revision) => typeof revision.delta === 'string' ? parseJSON(revision.delta) : revision.delta ?? {});
20
+ const consolidatedDelta = assign({}, ...deltas);
21
+ await trx('directus_versions')
22
+ .update({
23
+ delta: JSON.stringify(consolidatedDelta),
24
+ })
25
+ .where('id', '=', missingDeltaVersion.id);
26
+ }
27
+ });
28
+ }
29
+ }
30
+ export async function down() {
31
+ // No down migration required
32
+ }
@@ -33,6 +33,10 @@ export function applyParentFilters(schema, nestedCollectionNodes, parentItem) {
33
33
  const foreignField = nestedNode.relation.field;
34
34
  const foreignIds = uniq(parentItems.map((res) => res[nestedNode.parentKey])).filter((id) => !isNil(id));
35
35
  merge(nestedNode, { query: { filter: { [foreignField]: { _in: foreignIds } } } });
36
+ if (nestedNode.relation.meta?.junction_field) {
37
+ const junctionField = nestedNode.relation.meta.junction_field;
38
+ merge(nestedNode, { query: { filter: { [junctionField]: { _nnull: true } } } });
39
+ }
36
40
  }
37
41
  else if (nestedNode.type === 'a2o') {
38
42
  const keysPerCollection = {};
@@ -0,0 +1,14 @@
1
+ import type { Knex } from 'knex';
2
+ export interface RetentionTask {
3
+ collection: string;
4
+ where?: readonly [string, string, Knex.Value | null];
5
+ join?: readonly [string, string, string];
6
+ timeframe: number | undefined;
7
+ }
8
+ export declare function handleRetentionJob(): Promise<void>;
9
+ /**
10
+ * Schedule the retention tracking
11
+ *
12
+ * @returns Whether or not retention has been initialized
13
+ */
14
+ export default function schedule(): Promise<boolean>;
@@ -0,0 +1,96 @@
1
+ import { Action } from '@directus/constants';
2
+ import { useEnv } from '@directus/env';
3
+ import { toBoolean } from '@directus/utils';
4
+ import getDatabase from '../database/index.js';
5
+ import { useLock } from '../lock/index.js';
6
+ import { useLogger } from '../logger/index.js';
7
+ import { getMilliseconds } from '../utils/get-milliseconds.js';
8
+ import { scheduleSynchronizedJob, validateCron } from '../utils/schedule.js';
9
+ const env = useEnv();
10
+ const retentionLockKey = 'schedule--data-retention';
11
+ const retentionLockTimeout = 10 * 60 * 1000; // 10 mins
12
+ const ACTIVITY_RETENTION_TIMEFRAME = getMilliseconds(env['ACTIVITY_RETENTION']);
13
+ const FLOW_LOGS_RETENTION_TIMEFRAME = getMilliseconds(env['FLOW_LOGS_RETENTION']);
14
+ const REVISIONS_RETENTION_TIMEFRAME = getMilliseconds(env['REVISIONS_RETENTION']);
15
+ const retentionTasks = [
16
+ {
17
+ collection: 'directus_activity',
18
+ where: ['action', '!=', Action.RUN],
19
+ timeframe: ACTIVITY_RETENTION_TIMEFRAME,
20
+ },
21
+ {
22
+ collection: 'directus_activity',
23
+ where: ['action', '=', Action.RUN],
24
+ timeframe: FLOW_LOGS_RETENTION_TIMEFRAME,
25
+ },
26
+ ];
27
+ export async function handleRetentionJob() {
28
+ const database = getDatabase();
29
+ const logger = useLogger();
30
+ const lock = useLock();
31
+ const batch = Number(env['RETENTION_BATCH']);
32
+ const lockTime = await lock.get(retentionLockKey);
33
+ const now = Date.now();
34
+ if (lockTime && Number(lockTime) > now - retentionLockTimeout) {
35
+ // ensure only one connected process
36
+ return;
37
+ }
38
+ await lock.set(retentionLockKey, Date.now());
39
+ for (const task of retentionTasks) {
40
+ let count = 0;
41
+ if (task.timeframe === undefined) {
42
+ // skip disabled tasks
43
+ continue;
44
+ }
45
+ do {
46
+ const subquery = database
47
+ .queryBuilder()
48
+ .select(`${task.collection}.id`)
49
+ .from(task.collection)
50
+ .where('timestamp', '<', Date.now() - task.timeframe)
51
+ .limit(batch);
52
+ if (task.where) {
53
+ subquery.where(...task.where);
54
+ }
55
+ if (task.join) {
56
+ subquery.join(...task.join);
57
+ }
58
+ try {
59
+ count = await database(task.collection).where('id', 'in', subquery).delete();
60
+ }
61
+ catch (error) {
62
+ logger.error(error, `Retention failed for Collection ${task.collection}`);
63
+ break;
64
+ }
65
+ // Update lock time to prevent concurrent runs
66
+ await lock.set(retentionLockKey, Date.now());
67
+ } while (count >= batch);
68
+ }
69
+ await lock.delete(retentionLockKey);
70
+ }
71
+ /**
72
+ * Schedule the retention tracking
73
+ *
74
+ * @returns Whether or not retention has been initialized
75
+ */
76
+ export default async function schedule() {
77
+ const env = useEnv();
78
+ if (!toBoolean(env['RETENTION_ENABLED'])) {
79
+ return false;
80
+ }
81
+ if (!validateCron(String(env['RETENTION_SCHEDULE']))) {
82
+ return false;
83
+ }
84
+ if (!ACTIVITY_RETENTION_TIMEFRAME ||
85
+ (ACTIVITY_RETENTION_TIMEFRAME &&
86
+ REVISIONS_RETENTION_TIMEFRAME &&
87
+ ACTIVITY_RETENTION_TIMEFRAME > REVISIONS_RETENTION_TIMEFRAME)) {
88
+ retentionTasks.push({
89
+ collection: 'directus_revisions',
90
+ join: ['directus_activity', 'directus_revisions.activity', 'directus_activity.id'],
91
+ timeframe: REVISIONS_RETENTION_TIMEFRAME,
92
+ });
93
+ }
94
+ scheduleSynchronizedJob('retention', String(env['RETENTION_SCHEDULE']), handleRetentionJob);
95
+ return true;
96
+ }
@@ -3,9 +3,9 @@
3
3
  */
4
4
  export declare const jobCallback: () => void;
5
5
  /**
6
- * Initialize the telemetry tracking. Will generate a report on start, and set a schedule to report
6
+ * Schedule the telemetry tracking. Will generate a report on start, and set a schedule to report
7
7
  * every 6 hours
8
8
  *
9
9
  * @returns Whether or not telemetry has been initialized
10
10
  */
11
- export declare const initTelemetry: () => Promise<boolean>;
11
+ export default function schedule(): Promise<boolean>;
@@ -1,8 +1,8 @@
1
1
  import { useEnv } from '@directus/env';
2
2
  import { toBoolean } from '@directus/utils';
3
- import { getCache } from '../../cache.js';
4
- import { scheduleSynchronizedJob } from '../../utils/schedule.js';
5
- import { track } from './track.js';
3
+ import { getCache } from '../cache.js';
4
+ import { scheduleSynchronizedJob } from '../utils/schedule.js';
5
+ import { track } from '../telemetry/index.js';
6
6
  /**
7
7
  * Exported to be able to test the anonymous callback function
8
8
  */
@@ -10,12 +10,12 @@ export const jobCallback = () => {
10
10
  track();
11
11
  };
12
12
  /**
13
- * Initialize the telemetry tracking. Will generate a report on start, and set a schedule to report
13
+ * Schedule the telemetry tracking. Will generate a report on start, and set a schedule to report
14
14
  * every 6 hours
15
15
  *
16
16
  * @returns Whether or not telemetry has been initialized
17
17
  */
18
- export const initTelemetry = async () => {
18
+ export default async function schedule() {
19
19
  const env = useEnv();
20
20
  if (toBoolean(env['TELEMETRY']) === false)
21
21
  return false;
@@ -27,4 +27,4 @@ export const initTelemetry = async () => {
27
27
  // Don't flush the lock. We want to debounce these calls across containers on startup
28
28
  }
29
29
  return true;
30
- };
30
+ }
@@ -0,0 +1,6 @@
1
+ /**
2
+ * Schedule the tus cleanup
3
+ *
4
+ * @returns Whether or not tus cleanup has been initialized
5
+ */
6
+ export default function schedule(): Promise<boolean>;
@@ -0,0 +1,23 @@
1
+ import { RESUMABLE_UPLOADS } from '../constants.js';
2
+ import { getSchema } from '../utils/get-schema.js';
3
+ import { createTusServer } from '../services/tus/index.js';
4
+ import { scheduleSynchronizedJob, validateCron } from '../utils/schedule.js';
5
+ /**
6
+ * Schedule the tus cleanup
7
+ *
8
+ * @returns Whether or not tus cleanup has been initialized
9
+ */
10
+ export default async function schedule() {
11
+ if (!RESUMABLE_UPLOADS.ENABLED)
12
+ return false;
13
+ if (validateCron(RESUMABLE_UPLOADS.SCHEDULE)) {
14
+ scheduleSynchronizedJob('tus-cleanup', RESUMABLE_UPLOADS.SCHEDULE, async () => {
15
+ const [tusServer, cleanupServer] = await createTusServer({
16
+ schema: await getSchema(),
17
+ });
18
+ await tusServer.cleanUpExpiredUploads();
19
+ cleanupServer();
20
+ });
21
+ }
22
+ return true;
23
+ }
@@ -87,6 +87,8 @@ export class AssetsService {
87
87
  }
88
88
  const type = file.type;
89
89
  const transforms = transformation ? TransformationUtils.resolvePreset(transformation, file) : [];
90
+ const modifiedOn = file.modified_on ? new Date(file.modified_on) : undefined;
91
+ const version = modifiedOn ? (modifiedOn.getTime() / 1000).toFixed() : undefined;
90
92
  if (type && transforms.length > 0 && SUPPORTED_IMAGE_TRANSFORM_FORMATS.includes(type)) {
91
93
  const maybeNewFormat = TransformationUtils.maybeExtractFormat(transforms);
92
94
  const assetFilename = path.basename(file.filename_disk, path.extname(file.filename_disk)) +
@@ -121,7 +123,6 @@ export class AssetsService {
121
123
  reason: 'Server too busy',
122
124
  });
123
125
  }
124
- const version = file.modified_on !== undefined ? String(Math.round(new Date(file.modified_on).getTime() / 1000)) : undefined;
125
126
  const readStream = await storage.location(file.storage).read(file.filename_disk, { range, version });
126
127
  const transformer = getSharpInstance();
127
128
  transformer.timeout({
@@ -152,13 +153,13 @@ export class AssetsService {
152
153
  }
153
154
  }
154
155
  return {
155
- stream: await storage.location(file.storage).read(assetFilename, { range }),
156
+ stream: await storage.location(file.storage).read(assetFilename, { range, version }),
156
157
  stat: await storage.location(file.storage).stat(assetFilename),
157
158
  file,
158
159
  };
159
160
  }
160
161
  else {
161
- const readStream = await storage.location(file.storage).read(file.filename_disk, { range });
162
+ const readStream = await storage.location(file.storage).read(file.filename_disk, { range, version });
162
163
  const stat = await storage.location(file.storage).stat(file.filename_disk);
163
164
  return { stream: readStream, file, stat };
164
165
  }
@@ -1,31 +1,13 @@
1
- import type { Comment, Item, PrimaryKey, Query } from '@directus/types';
1
+ import type { Comment, PrimaryKey } from '@directus/types';
2
2
  import type { AbstractServiceOptions, MutationOptions } from '../types/index.js';
3
- import { ActivityService } from './activity.js';
4
- import { ItemsService, type QueryOptions } from './items.js';
3
+ import { ItemsService } from './items.js';
5
4
  import { NotificationsService } from './notifications.js';
6
5
  import { UsersService } from './users.js';
7
- type serviceOrigin = 'activity' | 'comments';
8
6
  export declare class CommentsService extends ItemsService {
9
- activityService: ActivityService;
10
7
  notificationsService: NotificationsService;
11
8
  usersService: UsersService;
12
- serviceOrigin: serviceOrigin;
13
- constructor(options: AbstractServiceOptions & {
14
- serviceOrigin: serviceOrigin;
15
- });
16
- readOne(key: PrimaryKey, query?: Query, opts?: QueryOptions): Promise<Item>;
17
- readByQuery(query: Query, opts?: QueryOptions): Promise<Item[]>;
18
- readMany(keys: PrimaryKey[], query?: Query, opts?: QueryOptions): Promise<Item[]>;
9
+ constructor(options: AbstractServiceOptions);
19
10
  createOne(data: Partial<Comment>, opts?: MutationOptions): Promise<PrimaryKey>;
20
- updateByQuery(query: Query, data: Partial<Item>, opts?: MutationOptions): Promise<PrimaryKey[]>;
21
- updateMany(keys: PrimaryKey[], data: Partial<Item>, opts?: MutationOptions): Promise<PrimaryKey[]>;
22
- updateOne(key: PrimaryKey, data: Partial<Item>, opts?: MutationOptions): Promise<PrimaryKey>;
23
- deleteByQuery(query: Query, opts?: MutationOptions): Promise<PrimaryKey[]>;
24
- deleteMany(keys: PrimaryKey[], opts?: MutationOptions): Promise<PrimaryKey[]>;
11
+ updateOne(key: PrimaryKey, data: Partial<Comment>, opts?: MutationOptions): Promise<PrimaryKey>;
25
12
  deleteOne(key: PrimaryKey, opts?: MutationOptions): Promise<PrimaryKey>;
26
- private processPrimaryKeys;
27
- migrateLegacyComment(activityPk: PrimaryKey): Promise<PrimaryKey>;
28
- generateQuery(type: serviceOrigin, originalQuery: Query): Query;
29
- private sortLegacyResults;
30
13
  }
31
- export {};