@openneuro/server 4.46.0 → 4.47.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. package/package.json +4 -4
  2. package/src/config.ts +2 -0
  3. package/src/datalad/__tests__/contributors.spec.ts +1 -0
  4. package/src/datalad/__tests__/dataRetentionNotifications.spec.ts +315 -0
  5. package/src/datalad/__tests__/dataset.spec.ts +3 -0
  6. package/src/datalad/dataRetentionNotifications.ts +160 -0
  7. package/src/datalad/dataset.ts +0 -2
  8. package/src/graphql/resolvers/brainInitiative.ts +16 -6
  9. package/src/graphql/utils/file.ts +1 -1
  10. package/src/libs/authentication/jwt.ts +6 -3
  11. package/src/libs/email/templates/__tests__/__snapshots__/comment-created.spec.ts.snap +2 -2
  12. package/src/libs/email/templates/__tests__/__snapshots__/dataset-deleted.spec.ts.snap +2 -2
  13. package/src/libs/email/templates/__tests__/__snapshots__/owner-unsubscribed.spec.ts.snap +2 -2
  14. package/src/libs/email/templates/__tests__/__snapshots__/snapshot-created.spec.ts.snap +2 -2
  15. package/src/libs/email/templates/__tests__/__snapshots__/snapshot-reminder.spec.ts.snap +2 -2
  16. package/src/libs/email/templates/comment-created.ts +2 -2
  17. package/src/libs/email/templates/dataset-deleted.ts +2 -2
  18. package/src/libs/email/templates/dataset-import-failed.ts +2 -2
  19. package/src/libs/email/templates/dataset-imported.ts +2 -2
  20. package/src/libs/email/templates/draft-retention-deletion.ts +67 -0
  21. package/src/libs/email/templates/draft-retention-warning.ts +70 -0
  22. package/src/libs/email/templates/owner-unsubscribed.ts +2 -2
  23. package/src/libs/email/templates/snapshot-created.ts +2 -2
  24. package/src/libs/email/templates/snapshot-reminder.ts +2 -2
  25. package/src/models/dataRetention.ts +34 -0
  26. package/src/models/schedulerState.ts +23 -0
  27. package/src/queues/consumer.ts +29 -0
  28. package/src/queues/producer-methods.ts +14 -0
  29. package/src/queues/queue-schedule.ts +81 -0
  30. package/src/queues/queues.ts +46 -20
  31. package/src/queues/setup.ts +14 -3
  32. package/src/server.ts +1 -1
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@openneuro/server",
3
- "version": "4.46.0",
3
+ "version": "4.47.0",
4
4
  "description": "Core service for the OpenNeuro platform.",
5
5
  "license": "MIT",
6
6
  "main": "src/server.js",
@@ -21,7 +21,7 @@
21
21
  "@elastic/elasticsearch": "8.13.1",
22
22
  "@graphql-tools/schema": "^10.0.0",
23
23
  "@keyv/redis": "^4.5.0",
24
- "@openneuro/search": "^4.46.0",
24
+ "@openneuro/search": "^4.47.0",
25
25
  "@sentry/node": "^10.37.0",
26
26
  "@sentry/profiling-node": "^10.37.0",
27
27
  "base64url": "^3.0.0",
@@ -36,7 +36,7 @@
36
36
  "graphql-compose": "9.0.10",
37
37
  "graphql-iso-date": "^3.6.1",
38
38
  "graphql-tools": "9.0.0",
39
- "immutable": "^3.8.2",
39
+ "immutable": "^4.3.8",
40
40
  "ioredis": "^5.6.1",
41
41
  "js-yaml": "^4.1.0",
42
42
  "jsdom": "24.0.0",
@@ -89,5 +89,5 @@
89
89
  "publishConfig": {
90
90
  "access": "public"
91
91
  },
92
- "gitHead": "b40f6f5b597cbb722d09497182059a5157abf110"
92
+ "gitHead": "ce9d1b3598748ab7a9510e6dc0f50422088723a1"
93
93
  }
package/src/config.ts CHANGED
@@ -1,4 +1,5 @@
1
1
  const config = {
2
+ // Base URL for the site (e.g. `https://openneuro.org`)
2
3
  url: process.env.CRN_SERVER_URL,
3
4
  port: 8111,
4
5
  apiPrefix: "/crn/",
@@ -8,6 +9,7 @@ const config = {
8
9
  "Access-Control-Allow-Methods": "GET, POST, OPTIONS, PUT, PATCH, DELETE",
9
10
  "Access-Control-Allow-Headers": "content-type, Authorization",
10
11
  },
12
+ // Deprecated analysis feature flag
11
13
  analysis: {
12
14
  enabled: process.env.ANALYSIS_ENABLED,
13
15
  },
@@ -6,6 +6,7 @@ import { fileUrl } from "../files"
6
6
  import { datasetOrSnapshot } from "../../utils/datasetOrSnapshot"
7
7
  import { contributors } from "../contributors"
8
8
 
9
+ vi.mock("../../config.ts")
9
10
  vi.mock("../../libs/authentication/jwt", () => ({
10
11
  sign: vi.fn(() => "mock_jwt_token"),
11
12
  verify: vi.fn(() => ({ userId: "mock_user_id" })),
@@ -0,0 +1,315 @@
1
+ /* eslint-disable @typescript-eslint/no-explicit-any */
2
+ import { vi } from "vitest"
3
+
4
+ vi.mock("ioredis")
5
+ vi.mock("../../config.ts")
6
+ vi.mock("../../libs/notifications.ts", () => ({
7
+ default: { send: vi.fn() },
8
+ }))
9
+ vi.mock("../../libs/email/templates/draft-retention-warning", () => ({
10
+ draftRetentionWarning: vi.fn(() => "<html>warning</html>"),
11
+ }))
12
+ vi.mock("../../libs/email/templates/draft-retention-deletion", () => ({
13
+ draftRetentionDeletion: vi.fn(() => "<html>deletion</html>"),
14
+ }))
15
+ vi.mock("../../libs/email/templates/snapshot-reminder", () => ({
16
+ snapshotReminder: vi.fn(() => "<html>snapshot</html>"),
17
+ }))
18
+
19
+ import { MongoMemoryServer } from "mongodb-memory-server"
20
+ import { connect, disconnect } from "mongoose"
21
+ import notifications from "../../libs/notifications"
22
+ import DataRetention from "../../models/dataRetention"
23
+ import Permission from "../../models/permission"
24
+ import User from "../../models/user"
25
+ import { checkDataRetentionNotifications } from "../dataRetentionNotifications"
26
+ import * as draftModule from "../draft"
27
+ import * as snapshotsModule from "../snapshots"
28
+
29
+ const DAY = 24 * 60 * 60 * 1000
30
+
31
+ const TEST_DATASET = "ds000001"
32
+ const TEST_HEXSHA = "abc123"
33
+ const TEST_USER = {
34
+ id: "user1",
35
+ email: "test@example.com",
36
+ name: "Test User",
37
+ }
38
+
39
+ function daysAgo(days: number): Date {
40
+ return new Date(Date.now() - days * DAY)
41
+ }
42
+
43
+ describe("checkDataRetentionNotifications", () => {
44
+ let mongod: MongoMemoryServer
45
+
46
+ beforeAll(async () => {
47
+ mongod = await MongoMemoryServer.create()
48
+ await connect(mongod.getUri())
49
+ })
50
+
51
+ afterAll(async () => {
52
+ await disconnect()
53
+ await mongod.stop()
54
+ })
55
+
56
+ beforeEach(async () => {
57
+ await DataRetention.deleteMany({})
58
+ await Permission.deleteMany({})
59
+ await User.deleteMany({})
60
+ vi.mocked(notifications.send).mockClear()
61
+
62
+ // Seed a user with write permission
63
+ await User.create(TEST_USER)
64
+ await Permission.create({
65
+ datasetId: TEST_DATASET,
66
+ userId: TEST_USER.id,
67
+ level: "rw",
68
+ })
69
+ })
70
+
71
+ function mockDraft(modified: Date, hexsha = TEST_HEXSHA) {
72
+ vi.spyOn(draftModule, "getDraftInfo").mockResolvedValue({
73
+ modified,
74
+ hexsha,
75
+ } as any)
76
+ }
77
+
78
+ function mockSnapshots(snapshots: { hexsha: string }[] = []) {
79
+ vi.spyOn(snapshotsModule, "getSnapshots").mockResolvedValue(
80
+ snapshots as any,
81
+ )
82
+ }
83
+
84
+ it("does nothing when draft matches the latest snapshot", async () => {
85
+ mockDraft(daysAgo(30), TEST_HEXSHA)
86
+ mockSnapshots([{ hexsha: TEST_HEXSHA }])
87
+
88
+ await checkDataRetentionNotifications(TEST_DATASET)
89
+ expect(notifications.send).not.toHaveBeenCalled()
90
+ })
91
+
92
+ it("does nothing when draft is less than 1 day old", async () => {
93
+ mockDraft(new Date()) // just now
94
+ mockSnapshots([])
95
+
96
+ await checkDataRetentionNotifications(TEST_DATASET)
97
+ expect(notifications.send).not.toHaveBeenCalled()
98
+ })
99
+
100
+ it("sends snapshot reminder after 1 day with no snapshot", async () => {
101
+ mockDraft(daysAgo(2))
102
+ mockSnapshots([])
103
+
104
+ await checkDataRetentionNotifications(TEST_DATASET)
105
+ expect(notifications.send).toHaveBeenCalledTimes(1)
106
+ expect(notifications.send).toHaveBeenCalledWith(
107
+ expect.objectContaining({
108
+ _id: expect.stringContaining("no_snapshot_reminder"),
109
+ }),
110
+ )
111
+ })
112
+
113
+ it("skips snapshot reminder when draft is already 14+ days old", async () => {
114
+ mockDraft(daysAgo(15))
115
+ mockSnapshots([])
116
+
117
+ await checkDataRetentionNotifications(TEST_DATASET)
118
+ // Should send only the 14-day retention warning, not the snapshot reminder
119
+ expect(notifications.send).toHaveBeenCalledTimes(1)
120
+ expect(notifications.send).toHaveBeenCalledWith(
121
+ expect.objectContaining({
122
+ _id: expect.stringContaining("retention_14day"),
123
+ }),
124
+ )
125
+ })
126
+
127
+ it("sends 14-day warning when draft is 14+ days old", async () => {
128
+ mockDraft(daysAgo(15))
129
+ mockSnapshots([{ hexsha: "other" }])
130
+
131
+ await checkDataRetentionNotifications(TEST_DATASET)
132
+ expect(notifications.send).toHaveBeenCalledTimes(1)
133
+ expect(notifications.send).toHaveBeenCalledWith(
134
+ expect.objectContaining({
135
+ _id: expect.stringContaining("retention_14day"),
136
+ }),
137
+ )
138
+ })
139
+
140
+ it("sends notices in order even when draft is already past 28 days", async () => {
141
+ mockDraft(daysAgo(35))
142
+ mockSnapshots([{ hexsha: "other" }])
143
+
144
+ // First call: should send 14-day warning, NOT deletion
145
+ await checkDataRetentionNotifications(TEST_DATASET)
146
+ expect(notifications.send).toHaveBeenCalledTimes(1)
147
+ expect(notifications.send).toHaveBeenCalledWith(
148
+ expect.objectContaining({
149
+ _id: expect.stringContaining("retention_14day"),
150
+ }),
151
+ )
152
+
153
+ const record = await DataRetention.findOne({ datasetId: TEST_DATASET })
154
+ .exec()
155
+ expect(record.notifiedAt14Days).toBeTruthy()
156
+ expect(record.notifiedAt7Days).toBeNull()
157
+ expect(record.notifiedAtDeletion).toBeNull()
158
+ })
159
+
160
+ it("does not send 7-day warning until 7 days after 14-day notice", async () => {
161
+ mockDraft(daysAgo(35))
162
+ mockSnapshots([{ hexsha: "other" }])
163
+
164
+ // Seed a 14-day notice sent only 3 days ago
165
+ await DataRetention.create({
166
+ datasetId: TEST_DATASET,
167
+ hexsha: TEST_HEXSHA,
168
+ notifiedAt14Days: daysAgo(3),
169
+ })
170
+
171
+ await checkDataRetentionNotifications(TEST_DATASET)
172
+ // No new notice should be sent (14-day already sent, 7-day not due yet)
173
+ expect(notifications.send).not.toHaveBeenCalled()
174
+ })
175
+
176
+ it("sends 7-day warning 7 days after 14-day notice", async () => {
177
+ mockDraft(daysAgo(35))
178
+ mockSnapshots([{ hexsha: "other" }])
179
+
180
+ // Seed a 14-day notice sent 8 days ago
181
+ await DataRetention.create({
182
+ datasetId: TEST_DATASET,
183
+ hexsha: TEST_HEXSHA,
184
+ notifiedAt14Days: daysAgo(8),
185
+ })
186
+
187
+ await checkDataRetentionNotifications(TEST_DATASET)
188
+ expect(notifications.send).toHaveBeenCalledTimes(1)
189
+ expect(notifications.send).toHaveBeenCalledWith(
190
+ expect.objectContaining({
191
+ _id: expect.stringContaining("retention_7day"),
192
+ }),
193
+ )
194
+ })
195
+
196
+ it("sends deletion notice 7 days after 7-day warning", async () => {
197
+ mockDraft(daysAgo(40))
198
+ mockSnapshots([{ hexsha: "other" }])
199
+
200
+ // Seed both prior notices
201
+ await DataRetention.create({
202
+ datasetId: TEST_DATASET,
203
+ hexsha: TEST_HEXSHA,
204
+ notifiedAt14Days: daysAgo(15),
205
+ notifiedAt7Days: daysAgo(8),
206
+ })
207
+
208
+ await checkDataRetentionNotifications(TEST_DATASET)
209
+ expect(notifications.send).toHaveBeenCalledTimes(1)
210
+ expect(notifications.send).toHaveBeenCalledWith(
211
+ expect.objectContaining({
212
+ _id: expect.stringContaining("retention_deletion"),
213
+ }),
214
+ )
215
+ })
216
+
217
+ it("does not send deletion notice until 7 days after 7-day warning", async () => {
218
+ mockDraft(daysAgo(40))
219
+ mockSnapshots([{ hexsha: "other" }])
220
+
221
+ await DataRetention.create({
222
+ datasetId: TEST_DATASET,
223
+ hexsha: TEST_HEXSHA,
224
+ notifiedAt14Days: daysAgo(10),
225
+ notifiedAt7Days: daysAgo(3),
226
+ })
227
+
228
+ await checkDataRetentionNotifications(TEST_DATASET)
229
+ expect(notifications.send).not.toHaveBeenCalled()
230
+ })
231
+
232
+ it("resets retention notices when draft hexsha changes", async () => {
233
+ mockDraft(daysAgo(15), "new_hexsha")
234
+ mockSnapshots([{ hexsha: "other" }])
235
+
236
+ // Pre-existing record with old hexsha and all notices sent
237
+ await DataRetention.create({
238
+ datasetId: TEST_DATASET,
239
+ hexsha: "old_hexsha",
240
+ notifiedAt14Days: daysAgo(10),
241
+ notifiedAt7Days: daysAgo(3),
242
+ notifiedAtDeletion: daysAgo(1),
243
+ })
244
+
245
+ await checkDataRetentionNotifications(TEST_DATASET)
246
+
247
+ // Should send 14-day notice again (reset due to new hexsha)
248
+ expect(notifications.send).toHaveBeenCalledTimes(1)
249
+ expect(notifications.send).toHaveBeenCalledWith(
250
+ expect.objectContaining({
251
+ _id: expect.stringContaining("retention_14day"),
252
+ }),
253
+ )
254
+
255
+ const record = await DataRetention.findOne({ datasetId: TEST_DATASET })
256
+ .exec()
257
+ expect(record.hexsha).toBe("new_hexsha")
258
+ expect(record.notifiedAt14Days).toBeTruthy()
259
+ expect(record.notifiedAt7Days).toBeNull()
260
+ expect(record.notifiedAtDeletion).toBeNull()
261
+ })
262
+
263
+ it("walks through the full notification sequence with real delays", async () => {
264
+ mockSnapshots([{ hexsha: "other" }])
265
+
266
+ // Day 14: first warning
267
+ mockDraft(daysAgo(15))
268
+ await checkDataRetentionNotifications(TEST_DATASET)
269
+ expect(notifications.send).toHaveBeenCalledTimes(1)
270
+ expect(notifications.send).toHaveBeenLastCalledWith(
271
+ expect.objectContaining({
272
+ _id: expect.stringContaining("retention_14day"),
273
+ }),
274
+ )
275
+
276
+ // Simulate 7 days passing by backdating the 14-day notice
277
+ await DataRetention.updateOne(
278
+ { datasetId: TEST_DATASET },
279
+ { notifiedAt14Days: daysAgo(8) },
280
+ ).exec()
281
+ vi.mocked(notifications.send).mockClear()
282
+
283
+ // Day 21: 7-day warning
284
+ mockDraft(daysAgo(22))
285
+ await checkDataRetentionNotifications(TEST_DATASET)
286
+ expect(notifications.send).toHaveBeenCalledTimes(1)
287
+ expect(notifications.send).toHaveBeenLastCalledWith(
288
+ expect.objectContaining({
289
+ _id: expect.stringContaining("retention_7day"),
290
+ }),
291
+ )
292
+
293
+ // Simulate 7 more days passing
294
+ await DataRetention.updateOne(
295
+ { datasetId: TEST_DATASET },
296
+ { notifiedAt7Days: daysAgo(8) },
297
+ ).exec()
298
+ vi.mocked(notifications.send).mockClear()
299
+
300
+ // Day 28: deletion notice
301
+ mockDraft(daysAgo(30))
302
+ await checkDataRetentionNotifications(TEST_DATASET)
303
+ expect(notifications.send).toHaveBeenCalledTimes(1)
304
+ expect(notifications.send).toHaveBeenLastCalledWith(
305
+ expect.objectContaining({
306
+ _id: expect.stringContaining("retention_deletion"),
307
+ }),
308
+ )
309
+
310
+ // No further notices after deletion
311
+ vi.mocked(notifications.send).mockClear()
312
+ await checkDataRetentionNotifications(TEST_DATASET)
313
+ expect(notifications.send).not.toHaveBeenCalled()
314
+ })
315
+ })
@@ -1,3 +1,4 @@
1
+ /* eslint-disable @typescript-eslint/no-explicit-any */
1
2
  import { vi } from "vitest"
2
3
  import request from "superagent"
3
4
  import { createDataset, datasetsFilter, testBlacklist } from "../dataset"
@@ -11,6 +12,8 @@ vi.mock("ioredis")
11
12
  vi.mock("../../libs/redis")
12
13
  vi.mock("../../config.ts")
13
14
  vi.mock("../../libs/notifications")
15
+ vi.mock("../draft")
16
+ vi.mock("../snapshots")
14
17
 
15
18
  describe("dataset model operations", () => {
16
19
  describe("createDataset()", () => {
@@ -0,0 +1,160 @@
1
+ import config from "../config"
2
+ import notifications from "../libs/notifications"
3
+ import User from "../models/user"
4
+ import Permission from "../models/permission"
5
+ import DataRetention from "../models/dataRetention"
6
+ import { getDraftInfo } from "./draft"
7
+ import { getSnapshots } from "./snapshots"
8
+ import { draftRetentionWarning } from "../libs/email/templates/draft-retention-warning"
9
+ import { draftRetentionDeletion } from "../libs/email/templates/draft-retention-deletion"
10
+ import { snapshotReminder } from "../libs/email/templates/snapshot-reminder"
11
+
12
+ const DAY = 24 * 60 * 60 * 1000
13
+
14
+ /**
15
+ * Notify all users with write or admin access to a dataset.
16
+ */
17
+ async function notifyWriteUsers(
18
+ datasetId: string,
19
+ makeEmail: (user: { _id: string; email: string; name: string }) => object,
20
+ ) {
21
+ const permissions = await Permission.find({
22
+ datasetId,
23
+ level: { $in: ["rw", "admin"] },
24
+ }).exec()
25
+ for (const permission of permissions) {
26
+ const user = await User.findOne({ id: permission.userId }).exec()
27
+ if (user) {
28
+ notifications.send(makeEmail(user))
29
+ }
30
+ }
31
+ }
32
+
33
+ /**
34
+ * Check and send data retention notifications for a dataset.
35
+ *
36
+ * Retention warnings (14-day, 7-day, deletion) are tied to the current draft
37
+ * hexsha and reset whenever the draft changes. The no-snapshot 24h notice is
38
+ * sent once regardless of future draft changes.
39
+ */
40
+ export async function checkDataRetentionNotifications(
41
+ datasetId: string,
42
+ ): Promise<void> {
43
+ const draft = await getDraftInfo(datasetId)
44
+ const snapshots = await getSnapshots(datasetId)
45
+ const lastSnapshot = snapshots?.length
46
+ ? snapshots[snapshots.length - 1]
47
+ : null
48
+
49
+ // Draft is in sync with the last snapshot — no retention action needed
50
+ if (lastSnapshot && draft.hexsha === lastSnapshot.hexsha) {
51
+ return
52
+ }
53
+
54
+ const now = new Date()
55
+ const age = now.getTime() - new Date(draft.modified).getTime()
56
+
57
+ // Upsert the retention record, resetting retention notices on hexsha change
58
+ let record = await DataRetention.findOne({ datasetId }).exec()
59
+ if (!record) {
60
+ record = await DataRetention.create({ datasetId, hexsha: draft.hexsha })
61
+ } else if (record.hexsha !== draft.hexsha) {
62
+ await DataRetention.updateOne(
63
+ { datasetId },
64
+ {
65
+ hexsha: draft.hexsha,
66
+ notifiedAt14Days: null,
67
+ notifiedAt7Days: null,
68
+ notifiedAtDeletion: null,
69
+ },
70
+ ).exec()
71
+ record = await DataRetention.findOne({ datasetId }).exec()
72
+ }
73
+
74
+ // One-time notice: no snapshot created within 24h of initial upload
75
+ if (
76
+ !lastSnapshot && age >= DAY && age < 14 * DAY && !record.notifiedNoSnapshot
77
+ ) {
78
+ await notifyWriteUsers(datasetId, (user) => ({
79
+ _id: `${datasetId}_${user._id}_no_snapshot_reminder`,
80
+ type: "email",
81
+ email: {
82
+ to: user.email,
83
+ name: user.name,
84
+ subject: "Reminder: Create a Snapshot",
85
+ html: snapshotReminder({
86
+ name: user.name,
87
+ datasetName: datasetId,
88
+ datasetId,
89
+ siteUrl: config.url,
90
+ }),
91
+ },
92
+ }))
93
+ await DataRetention.updateOne({ datasetId }, { notifiedNoSnapshot: now })
94
+ .exec()
95
+ }
96
+
97
+ // Retention warnings sent in order from 14 days, 7 days, and 0 days.
98
+ if (age >= 14 * DAY && !record.notifiedAt14Days) {
99
+ await notifyWriteUsers(datasetId, (user) => ({
100
+ _id: `${datasetId}_${user._id}_retention_14day`,
101
+ type: "email",
102
+ email: {
103
+ to: user.email,
104
+ name: user.name,
105
+ subject: "Dataset Draft Deletion Warning: 14 Days Remaining",
106
+ html: draftRetentionWarning({
107
+ name: user.name,
108
+ datasetId,
109
+ daysRemaining: 14,
110
+ siteUrl: config.url,
111
+ }),
112
+ },
113
+ }))
114
+ await DataRetention.updateOne({ datasetId }, { notifiedAt14Days: now })
115
+ .exec()
116
+ } else if (
117
+ record.notifiedAt14Days &&
118
+ !record.notifiedAt7Days &&
119
+ now.getTime() - new Date(record.notifiedAt14Days).getTime() >= 7 * DAY
120
+ ) {
121
+ await notifyWriteUsers(datasetId, (user) => ({
122
+ _id: `${datasetId}_${user._id}_retention_7day`,
123
+ type: "email",
124
+ email: {
125
+ to: user.email,
126
+ name: user.name,
127
+ subject: "Dataset Draft Deletion Warning: 7 Days Remaining",
128
+ html: draftRetentionWarning({
129
+ name: user.name,
130
+ datasetId,
131
+ daysRemaining: 7,
132
+ siteUrl: config.url,
133
+ }),
134
+ },
135
+ }))
136
+ await DataRetention.updateOne({ datasetId }, { notifiedAt7Days: now })
137
+ .exec()
138
+ } else if (
139
+ record.notifiedAt7Days &&
140
+ !record.notifiedAtDeletion &&
141
+ now.getTime() - new Date(record.notifiedAt7Days).getTime() >= 7 * DAY
142
+ ) {
143
+ await notifyWriteUsers(datasetId, (user) => ({
144
+ _id: `${datasetId}_${user._id}_retention_deletion`,
145
+ type: "email",
146
+ email: {
147
+ to: user.email,
148
+ name: user.name,
149
+ subject: "Dataset Draft Pending Deletion",
150
+ html: draftRetentionDeletion({
151
+ name: user.name,
152
+ datasetId,
153
+ siteUrl: config.url,
154
+ }),
155
+ },
156
+ }))
157
+ await DataRetention.updateOne({ datasetId }, { notifiedAtDeletion: now })
158
+ .exec()
159
+ }
160
+ }
@@ -25,7 +25,6 @@ import Subscription from "../models/subscription"
25
25
  import BadAnnexObject from "../models/badAnnexObject"
26
26
  import { datasetsConnection } from "./pagination"
27
27
  import { getDatasetWorker } from "../libs/datalad-service"
28
- import notifications from "../libs/notifications"
29
28
  import { createEvent, updateEvent } from "../libs/events"
30
29
 
31
30
  export const giveUploaderPermission = (datasetId, userId) => {
@@ -70,7 +69,6 @@ export const createDataset = async (
70
69
  // Creation is complete here, mark successful
71
70
  await updateEvent(event)
72
71
  await subscriptions.subscribe(datasetId, uploader)
73
- await notifications.snapshotReminder(datasetId)
74
72
  return ds
75
73
  } catch (e) {
76
74
  Sentry.captureException(e)
@@ -36,9 +36,14 @@ export const brainInitiative = async (
36
36
  // Fetch snapshot if metadata didn't match
37
37
  const snapshot = await latestSnapshot(dataset, null, context)
38
38
  const snapshotDescription = await description(snapshot)
39
- for (const funding of snapshotDescription.Funding) {
40
- if (funding.match(brainInitiativeMatch)) {
41
- return true
39
+ if (
40
+ snapshotDescription?.Funding &&
41
+ Array.isArray(snapshotDescription.Funding)
42
+ ) {
43
+ for (const funding of snapshotDescription.Funding) {
44
+ if (funding.match(brainInitiativeMatch)) {
45
+ return true
46
+ }
42
47
  }
43
48
  }
44
49
  // Check for grant ids too - filter to only alphanumeric to improve matching across format differences
@@ -55,9 +60,14 @@ export const brainInitiative = async (
55
60
  ) {
56
61
  return true
57
62
  }
58
- for (const funding of snapshotDescription.Funding) {
59
- if (funding.replace(/[^a-zA-Z0-9]/g, "").includes(grant)) {
60
- return true
63
+ if (
64
+ snapshotDescription?.Funding &&
65
+ Array.isArray(snapshotDescription.Funding)
66
+ ) {
67
+ for (const funding of snapshotDescription.Funding) {
68
+ if (funding.replace(/[^a-zA-Z0-9]/g, "").includes(grant)) {
69
+ return true
70
+ }
61
71
  }
62
72
  }
63
73
  }
@@ -3,7 +3,7 @@ import BadAnnexObject from "../../models/badAnnexObject"
3
3
  export const filterRemovedAnnexObjects =
4
4
  (datasetId, userInfo) => async (files) => {
5
5
  const removedAnnexObjectKeys = (
6
- await BadAnnexObject.find({ datasetId }).exec()
6
+ await BadAnnexObject.find({ datasetId, removed: true }).exec()
7
7
  ).map(({ annexKey }) => annexKey)
8
8
  // keep files that haven't had their annex objects removed
9
9
  return userInfo?.admin
@@ -156,9 +156,12 @@ export const decodeJWT = (token: string): OpenNeuroTokenProfile => {
156
156
  }
157
157
 
158
158
  export const parsedJwtFromRequest = (req) => {
159
- const jwt = jwtFromRequest(req)
160
- if (jwt) return decodeJWT(jwt)
161
- else return null
159
+ try {
160
+ const jwt = decodeJWT(jwtFromRequest(req))
161
+ return jwt || null
162
+ } catch (_err) {
163
+ return null
164
+ }
162
165
  }
163
166
 
164
167
  const refreshToken = async (jwt) => {
@@ -57,7 +57,7 @@ exports[`email template -> comment created > renders with expected arguments 1`]
57
57
  </head>
58
58
  <body>
59
59
  <div class="top-bar">
60
- <img src="https://openneuro.org/assets/email-header.1cb8bf76.png" />
60
+ <img src="https://openneuro.org/assets/email-header-GR_ZGg8w.png" />
61
61
  </div>
62
62
  <div class="content">
63
63
  <h2>Hi, J. Doe</h2>
@@ -76,7 +76,7 @@ exports[`email template -> comment created > renders with expected arguments 1`]
76
76
 
77
77
  <p>
78
78
  Sincerely,
79
- The CRN Team
79
+ The OpenNeuro Team
80
80
  </p>
81
81
 
82
82
  </div>
@@ -39,7 +39,7 @@ exports[`email template -> comment created > renders with expected arguments 1`]
39
39
  </head>
40
40
  <body>
41
41
  <div class="top-bar">
42
- <img src="https://openneuro.org/assets/email-header.1cb8bf76.png" />
42
+ <img src="https://openneuro.org/assets/email-header-GR_ZGg8w.png" />
43
43
  </div>
44
44
  <div class="content">
45
45
  <h2>Hi, J. Doe</h2>
@@ -50,7 +50,7 @@ exports[`email template -> comment created > renders with expected arguments 1`]
50
50
 
51
51
  <p>
52
52
  Sincerely,
53
- The CRN Team
53
+ The OpenNeuro Team
54
54
  </p>
55
55
  </div>
56
56
  </body>
@@ -39,7 +39,7 @@ exports[`email template -> comment created > renders with expected arguments 1`]
39
39
  </head>
40
40
  <body>
41
41
  <div class="top-bar">
42
- <img src="https://openneuro.org/assets/email-header.1cb8bf76.png" />
42
+ <img src="https://openneuro.org/assets/email-header-GR_ZGg8w.png" />
43
43
  </div>
44
44
  <div class="content">
45
45
  <h2>Hi, J. Doe</h2>
@@ -50,7 +50,7 @@ exports[`email template -> comment created > renders with expected arguments 1`]
50
50
 
51
51
  <p>
52
52
  Sincerely,
53
- The CRN Team
53
+ The OpenNeuro Team
54
54
  </p>
55
55
  </div>
56
56
  </body>
@@ -57,7 +57,7 @@ exports[`email template -> comment created > renders with expected arguments 1`]
57
57
  </head>
58
58
  <body>
59
59
  <div class="top-bar">
60
- <img src="https://openneuro.org/assets/email-header.1cb8bf76.png" />
60
+ <img src="https://openneuro.org/assets/email-header-GR_ZGg8w.png" />
61
61
  </div>
62
62
  <div class="content">
63
63
  <h2>Hi, J. Doe</h2>
@@ -77,7 +77,7 @@ exports[`email template -> comment created > renders with expected arguments 1`]
77
77
 
78
78
  <p>
79
79
  Sincerely,
80
- The CRN Team
80
+ The OpenNeuro Team
81
81
  </p>
82
82
  </div>
83
83
  </body>
@@ -39,7 +39,7 @@ exports[`email template -> comment created > renders with expected arguments 1`]
39
39
  </head>
40
40
  <body>
41
41
  <div class="top-bar">
42
- <img src="https://openneuro.org/assets/email-header.1cb8bf76.png" />
42
+ <img src="https://openneuro.org/assets/email-header-GR_ZGg8w.png" />
43
43
  </div>
44
44
  <div class="content">
45
45
  <h2>Hi, J. Doe</h2>
@@ -50,7 +50,7 @@ exports[`email template -> comment created > renders with expected arguments 1`]
50
50
 
51
51
  <p>
52
52
  Sincerely,
53
- The CRN Team
53
+ The OpenNeuro Team
54
54
  </p>
55
55
 
56
56
  <a class="dataset-link" href="https://openneuro.org/datasets/ds12345678/snapshot">Create a snapshot. &raquo;</a>
@@ -75,7 +75,7 @@ export const commentCreated = ({
75
75
  </head>
76
76
  <body>
77
77
  <div class="top-bar">
78
- <img src="${siteUrl}/assets/email-header.1cb8bf76.png" />
78
+ <img src="${siteUrl}/assets/email-header-GR_ZGg8w.png" />
79
79
  </div>
80
80
  <div class="content">
81
81
  <h2>Hi, ${name}</h2>
@@ -94,7 +94,7 @@ export const commentCreated = ({
94
94
 
95
95
  <p>
96
96
  Sincerely,
97
- The CRN Team
97
+ The OpenNeuro Team
98
98
  </p>
99
99
 
100
100
  </div>
@@ -45,7 +45,7 @@ export const datasetDeleted = ({
45
45
  </head>
46
46
  <body>
47
47
  <div class="top-bar">
48
- <img src="${siteUrl}/assets/email-header.1cb8bf76.png" />
48
+ <img src="${siteUrl}/assets/email-header-GR_ZGg8w.png" />
49
49
  </div>
50
50
  <div class="content">
51
51
  <h2>Hi, ${name}</h2>
@@ -56,7 +56,7 @@ export const datasetDeleted = ({
56
56
 
57
57
  <p>
58
58
  Sincerely,
59
- The CRN Team
59
+ The OpenNeuro Team
60
60
  </p>
61
61
  </div>
62
62
  </body>
@@ -67,7 +67,7 @@ export const datasetImportFailed = ({
67
67
  </head>
68
68
  <body>
69
69
  <div class="top-bar">
70
- <img src="${siteUrl}/assets/email-header.1cb8bf76.png" />
70
+ <img src="${siteUrl}/assets/email-header-GR_ZGg8w.png" />
71
71
  </div>
72
72
  <div class="content">
73
73
  <h2>Hi, ${name}</h2>
@@ -87,7 +87,7 @@ export const datasetImportFailed = ({
87
87
 
88
88
  <p>
89
89
  Sincerely,
90
- The CRN Team
90
+ The OpenNeuro Team
91
91
  </p>
92
92
  </div>
93
93
  </body>
@@ -63,7 +63,7 @@ export const datasetImportEmail = ({
63
63
  </head>
64
64
  <body>
65
65
  <div class="top-bar">
66
- <img src="${siteUrl}/assets/email-header.1cb8bf76.png" />
66
+ <img src="${siteUrl}/assets/email-header-GR_ZGg8w.png" />
67
67
  </div>
68
68
  <div class="content">
69
69
  <h2>Hi, ${name}</h2>
@@ -78,7 +78,7 @@ export const datasetImportEmail = ({
78
78
 
79
79
  <p>
80
80
  Sincerely,
81
- The CRN Team
81
+ The OpenNeuro Team
82
82
  </p>
83
83
  </div>
84
84
  </body>
@@ -0,0 +1,67 @@
1
+ export const draftRetentionDeletion = ({
2
+ siteUrl,
3
+ name,
4
+ datasetId,
5
+ }: {
6
+ siteUrl: string
7
+ name: string
8
+ datasetId: string
9
+ }): string =>
10
+ `<html>
11
+ <head>
12
+ <style>
13
+ body {
14
+ font-family: sans-serif;
15
+ font-weight: lighter;
16
+ background: #F5F5F5;
17
+ }
18
+ .top-bar {
19
+ width: 100%;
20
+ background: #333;
21
+ padding: 8px 0px 8px 15px;
22
+ }
23
+ .content {
24
+ padding: 15px;
25
+ }
26
+ p {
27
+ font-size: 16px;
28
+ font-weight: lighter;
29
+ }
30
+ b {
31
+ font-weight: bold;
32
+ }
33
+ .dataset-link {
34
+ display: inline-block;
35
+ background: #d9534f;
36
+ color: #FFF;
37
+ font-size: 20px;
38
+ padding: 8px 15px;
39
+ text-decoration: none;
40
+ cursor: pointer;
41
+ }
42
+ </style>
43
+ </head>
44
+ <body>
45
+ <div class="top-bar">
46
+ <img src="${siteUrl}/assets/email-header-GR_ZGg8w.png" />
47
+ </div>
48
+ <div class="content">
49
+ <h2>Hi, ${name}</h2>
50
+
51
+ <p>
52
+ The draft data for your dataset <b>${datasetId}</b> has not been updated in some time and may be automatically removed at any time.
53
+ </p>
54
+
55
+ <p>
56
+ Once data has been removed you will need to re-upload your files to continue working on this dataset. Please see our data retention policy for more information: <a href="https://docs.openneuro.org/policy/data_retention.html">Data Retention Policy.</a>
57
+ </p>
58
+
59
+ <p>
60
+ Sincerely,
61
+ The OpenNeuro Team
62
+ </p>
63
+
64
+ <a class="dataset-link" href="${siteUrl}/datasets/${datasetId}">View dataset. &raquo;</a>
65
+ </div>
66
+ </body>
67
+ </html>`
@@ -0,0 +1,70 @@
1
+ export const draftRetentionWarning = ({
2
+ siteUrl,
3
+ name,
4
+ datasetId,
5
+ daysRemaining,
6
+ }: {
7
+ siteUrl: string
8
+ name: string
9
+ datasetId: string
10
+ daysRemaining: number
11
+ }): string =>
12
+ `<html>
13
+ <head>
14
+ <style>
15
+ body {
16
+ font-family: sans-serif;
17
+ font-weight: lighter;
18
+ background: #F5F5F5;
19
+ }
20
+ .top-bar {
21
+ width: 100%;
22
+ background: #333;
23
+ padding: 8px 0px 8px 15px;
24
+ }
25
+ .content {
26
+ padding: 15px;
27
+ }
28
+ p {
29
+ font-size: 16px;
30
+ font-weight: lighter;
31
+ }
32
+ b {
33
+ font-weight: bold;
34
+ }
35
+ .dataset-link {
36
+ display: inline-block;
37
+ background: #5cb85c;
38
+ color: #FFF;
39
+ font-size: 20px;
40
+ padding: 8px 15px;
41
+ text-decoration: none;
42
+ cursor: pointer;
43
+ }
44
+ </style>
45
+ </head>
46
+ <body>
47
+ <div class="top-bar">
48
+ <img src="${siteUrl}/assets/email-header-GR_ZGg8w.png" />
49
+ </div>
50
+ <div class="content">
51
+ <h2>Hi, ${name}</h2>
52
+
53
+ <p>
54
+ Your dataset <b>${datasetId}</b> has an unpublished draft that has not been updated in some time.
55
+ If no snapshot is created within <b>${daysRemaining} days</b>, the draft data will be automatically removed.
56
+ </p>
57
+
58
+ <p>
59
+ To preserve your data, please create a snapshot or update your draft. Please see our data retention policy for more information: <a href="https://docs.openneuro.org/policy/data_retention.html">Data Retention Policy.</a>
60
+ </p>
61
+
62
+ <p>
63
+ Sincerely,
64
+ The OpenNeuro Team
65
+ </p>
66
+
67
+ <a class="dataset-link" href="${siteUrl}/datasets/${datasetId}/snapshot">Create a snapshot. &raquo;</a>
68
+ </div>
69
+ </body>
70
+ </html>`
@@ -45,7 +45,7 @@ export const ownerUnsubscribed = ({
45
45
  </head>
46
46
  <body>
47
47
  <div class="top-bar">
48
- <img src="${siteUrl}/assets/email-header.1cb8bf76.png" />
48
+ <img src="${siteUrl}/assets/email-header-GR_ZGg8w.png" />
49
49
  </div>
50
50
  <div class="content">
51
51
  <h2>Hi, ${name}</h2>
@@ -56,7 +56,7 @@ export const ownerUnsubscribed = ({
56
56
 
57
57
  <p>
58
58
  Sincerely,
59
- The CRN Team
59
+ The OpenNeuro Team
60
60
  </p>
61
61
  </div>
62
62
  </body>
@@ -69,7 +69,7 @@ export const snapshotCreated = ({
69
69
  </head>
70
70
  <body>
71
71
  <div class="top-bar">
72
- <img src="${siteUrl}/assets/email-header.1cb8bf76.png" />
72
+ <img src="${siteUrl}/assets/email-header-GR_ZGg8w.png" />
73
73
  </div>
74
74
  <div class="content">
75
75
  <h2>Hi, ${name}</h2>
@@ -89,7 +89,7 @@ export const snapshotCreated = ({
89
89
 
90
90
  <p>
91
91
  Sincerely,
92
- The CRN Team
92
+ The OpenNeuro Team
93
93
  </p>
94
94
  </div>
95
95
  </body>
@@ -47,7 +47,7 @@ export const snapshotReminder = ({
47
47
  </head>
48
48
  <body>
49
49
  <div class="top-bar">
50
- <img src="${siteUrl}/assets/email-header.1cb8bf76.png" />
50
+ <img src="${siteUrl}/assets/email-header-GR_ZGg8w.png" />
51
51
  </div>
52
52
  <div class="content">
53
53
  <h2>Hi, ${name}</h2>
@@ -58,7 +58,7 @@ export const snapshotReminder = ({
58
58
 
59
59
  <p>
60
60
  Sincerely,
61
- The CRN Team
61
+ The OpenNeuro Team
62
62
  </p>
63
63
 
64
64
  <a class="dataset-link" href="${siteUrl}/datasets/${datasetId}/snapshot">Create a snapshot. &raquo;</a>
@@ -0,0 +1,34 @@
1
+ import mongoose from "mongoose"
2
+ import type { Document } from "mongoose"
3
+ const { Schema, model } = mongoose
4
+
5
+ export interface DataRetentionDocument extends Document {
6
+ datasetId: string
7
+ // Current draft revision — when this changes, retention notices reset
8
+ hexsha: string
9
+ // Retention warning notices (reset when hexsha changes)
10
+ notifiedAt14Days: Date | null
11
+ notifiedAt7Days: Date | null
12
+ notifiedAtDeletion: Date | null
13
+ // One-time notice: no snapshot created within 24h of upload
14
+ notifiedNoSnapshot: Date | null
15
+ }
16
+
17
+ const dataRetentionSchema = new Schema({
18
+ datasetId: { type: String, required: true, unique: true },
19
+ hexsha: { type: String, required: true },
20
+ notifiedAt14Days: { type: Date, default: null },
21
+ notifiedAt7Days: { type: Date, default: null },
22
+ notifiedAtDeletion: { type: Date, default: null },
23
+ notifiedNoSnapshot: { type: Date, default: null },
24
+ })
25
+
26
+ /**
27
+ * Data retention notification status for datasets
28
+ */
29
+ const DataRetention = model<DataRetentionDocument>(
30
+ "DataRetention",
31
+ dataRetentionSchema,
32
+ )
33
+
34
+ export default DataRetention
@@ -0,0 +1,23 @@
1
+ import mongoose from "mongoose"
2
+ import type { Document } from "mongoose"
3
+ const { Schema, model } = mongoose
4
+
5
+ export interface SchedulerStateDocument extends Document {
6
+ key: string
7
+ lastRun: Date | null
8
+ }
9
+
10
+ const schedulerStateSchema = new Schema({
11
+ key: { type: String, required: true, unique: true },
12
+ lastRun: { type: Date, default: null },
13
+ })
14
+
15
+ /**
16
+ * Coordinates last run of any scheduled tasks across multiple server instances
17
+ */
18
+ const SchedulerState = model<SchedulerStateDocument>(
19
+ "SchedulerState",
20
+ schedulerStateSchema,
21
+ )
22
+
23
+ export default SchedulerState
@@ -2,6 +2,7 @@ import type { Consumer } from "redis-smq"
2
2
  import { reindexDataset } from "../elasticsearch/reindex-dataset"
3
3
  import { OpenNeuroQueues } from "./queues"
4
4
  import * as Sentry from "@sentry/node"
5
+ import { checkDataRetentionNotifications } from "../datalad/dataRetentionNotifications"
5
6
 
6
7
  export function startConsumer(consumer: Consumer) {
7
8
  const reindexMessageHandler = async (msg, cb) => {
@@ -9,10 +10,38 @@ export function startConsumer(consumer: Consumer) {
9
10
  reindexDataset(msg.body.datasetId).then(cb)
10
11
  }
11
12
 
13
+ const dataRetentionMessageHandler = async (msg, cb) => {
14
+ // Check data retention and send notifications for a dataset
15
+ try {
16
+ await checkDataRetentionNotifications(msg.body.datasetId)
17
+ cb()
18
+ } catch (err) {
19
+ Sentry.captureException(err)
20
+ cb()
21
+ }
22
+ }
23
+
12
24
  consumer.consume(OpenNeuroQueues.INDEXING, reindexMessageHandler, (err) => {
13
25
  if (err) {
14
26
  Sentry.captureException(err)
15
27
  }
16
28
  })
29
+
30
+ consumer.consume(
31
+ OpenNeuroQueues.DATARETENTION,
32
+ dataRetentionMessageHandler,
33
+ (err) => {
34
+ if (err) {
35
+ Sentry.captureException(err)
36
+ }
37
+ },
38
+ )
39
+
40
+ consumer.run((err) => {
41
+ if (err) {
42
+ Sentry.captureException(err)
43
+ }
44
+ })
45
+
17
46
  return consumer
18
47
  }
@@ -16,3 +16,17 @@ export function queueIndexDataset(datasetId: string) {
16
16
  }
17
17
  })
18
18
  }
19
+
20
+ /**
21
+ * Queue search indexing for a dataset
22
+ * @param datasetId Dataset to index
23
+ */
24
+ export function queueDataRetentionCheck(datasetId: string) {
25
+ const msg = new ProducibleMessage()
26
+ msg.setQueue(OpenNeuroQueues.DATARETENTION).setBody({ datasetId })
27
+ producer.produce(msg, (err) => {
28
+ if (err) {
29
+ Sentry.captureException(err)
30
+ }
31
+ })
32
+ }
@@ -0,0 +1,81 @@
1
+ import * as Sentry from "@sentry/node"
2
+ import Dataset from "../models/dataset"
3
+ import SchedulerState from "../models/schedulerState"
4
+ import { queueDataRetentionCheck } from "./producer-methods"
5
+
6
+ const DAY_MS = 24 * 60 * 60 * 1000
7
+ const POLL_INTERVAL_MS = 30 * 60 * 1000 // Check every 30 minutes
8
+ const SCHEDULER_KEY = "dataRetentionScan"
9
+
10
+ /**
11
+ * Iterate over all datasets and enqueue any per dataset actions.
12
+ */
13
+ async function enqueueAllDatasetChecks(): Promise<void> {
14
+ const cursor = Dataset.find({}, "id").cursor()
15
+ for await (const dataset of cursor) {
16
+ // Check data retention policy status and send notifications
17
+ queueDataRetentionCheck(dataset.id)
18
+ }
19
+ }
20
+
21
+ /**
22
+ * Ensure the scheduler state document exists in MongoDB.
23
+ */
24
+ async function initSchedulerState(): Promise<void> {
25
+ try {
26
+ await SchedulerState.updateOne(
27
+ { key: SCHEDULER_KEY },
28
+ { $setOnInsert: { lastRun: null } },
29
+ { upsert: true },
30
+ )
31
+ } catch (err) {
32
+ // Ignore duplicate key errors on insert race condition
33
+ if (!(err instanceof Error && "code" in err && err.code === 11000)) {
34
+ throw err
35
+ }
36
+ }
37
+ }
38
+
39
+ /**
40
+ * Attempt to atomically claim the daily data retention scan run.
41
+ * Returns true if this instance successfully claimed the run.
42
+ */
43
+ async function claimDailyRun(): Promise<boolean> {
44
+ const threshold = new Date(Date.now() - DAY_MS)
45
+ const result = await SchedulerState.findOneAndUpdate(
46
+ {
47
+ key: SCHEDULER_KEY,
48
+ $or: [{ lastRun: null }, { lastRun: { $lt: threshold } }],
49
+ },
50
+ { $set: { lastRun: new Date() } },
51
+ { new: true },
52
+ )
53
+ return result !== null
54
+ }
55
+
56
+ async function runDailyCheck(): Promise<void> {
57
+ const claimed = await claimDailyRun()
58
+ if (!claimed) return
59
+ await enqueueAllDatasetChecks()
60
+ }
61
+
62
+ /**
63
+ * Start the daily per dataset check schedule.
64
+ * Polls every 30 minutes; uses a MongoDB distributed lock so only one server
65
+ * instance runs the scan per day, durable across restarts.
66
+ */
67
+ export async function startDailySchedule(): Promise<void> {
68
+ const run = () => {
69
+ runDailyCheck().catch((err) => {
70
+ Sentry.captureException(err)
71
+ // eslint-disable-next-line no-console
72
+ console.error(err)
73
+ })
74
+ }
75
+
76
+ await initSchedulerState()
77
+
78
+ // Check shortly after startup, then poll every 30 minutes
79
+ setTimeout(run, 60 * 1000)
80
+ setInterval(run, POLL_INTERVAL_MS)
81
+ }
@@ -1,34 +1,60 @@
1
1
  import { Queue } from "redis-smq"
2
2
  import { EQueueDeliveryModel, EQueueType, QueueRateLimit } from "redis-smq"
3
- import * as Sentry from "@sentry/node"
4
3
 
5
4
  export enum OpenNeuroQueues {
6
5
  INDEXING = "elasticsearch_indexing",
6
+ DATARETENTION = "data_retention",
7
7
  }
8
8
 
9
- export function setupQueues() {
10
- const indexingQueue = new Queue()
11
- indexingQueue.save(
12
- OpenNeuroQueues.INDEXING,
13
- EQueueType.FIFO_QUEUE,
14
- EQueueDeliveryModel.POINT_TO_POINT,
15
- (err) => {
16
- // The queue may already exist, don't log that error
9
+ function saveQueue(
10
+ name: string,
11
+ type: EQueueType,
12
+ delivery: EQueueDeliveryModel,
13
+ ): Promise<void> {
14
+ return new Promise((resolve, reject) => {
15
+ const queue = new Queue()
16
+ queue.save(name, type, delivery, (err) => {
17
17
  if (err && err.name !== "QueueQueueExistsError") {
18
- Sentry.captureException(err)
18
+ reject(err)
19
+ } else {
20
+ resolve()
19
21
  }
20
- },
21
- )
22
+ })
23
+ })
24
+ }
22
25
 
23
- // Limit indexing queue to 8 runs per minute to avoid stacking indexing excessively
24
- const queueRateLimit = new QueueRateLimit()
25
- queueRateLimit.set(
26
- OpenNeuroQueues.INDEXING,
27
- { limit: 8, interval: 60000 },
28
- (err) => {
26
+ function setRateLimit(
27
+ name: string,
28
+ limit: number,
29
+ interval: number,
30
+ ): Promise<void> {
31
+ return new Promise((resolve, reject) => {
32
+ const queueRateLimit = new QueueRateLimit()
33
+ queueRateLimit.set(name, { limit, interval }, (err) => {
29
34
  if (err) {
30
- Sentry.captureException(err)
35
+ reject(err)
36
+ } else {
37
+ resolve()
31
38
  }
32
- },
39
+ })
40
+ })
41
+ }
42
+
43
+ export async function setupQueues(): Promise<void> {
44
+ await saveQueue(
45
+ OpenNeuroQueues.INDEXING,
46
+ EQueueType.FIFO_QUEUE,
47
+ EQueueDeliveryModel.POINT_TO_POINT,
33
48
  )
49
+ await saveQueue(
50
+ OpenNeuroQueues.DATARETENTION,
51
+ EQueueType.FIFO_QUEUE,
52
+ EQueueDeliveryModel.POINT_TO_POINT,
53
+ )
54
+
55
+ // Limit indexing queue to 8 runs per minute to avoid stacking indexing excessively
56
+ await setRateLimit(OpenNeuroQueues.INDEXING, 8, 60000)
57
+
58
+ // Rate limit data retention queue to 16 runs per minute
59
+ await setRateLimit(OpenNeuroQueues.DATARETENTION, 16, 60000)
34
60
  }
@@ -3,6 +3,7 @@ import type { IRedisSMQConfig } from "redis-smq"
3
3
  import { ERedisConfigClient } from "redis-smq-common"
4
4
  import { startConsumer } from "./consumer"
5
5
  import { setupQueues } from "./queues"
6
+ import { startDailySchedule } from "./queue-schedule"
6
7
  import config from "../config"
7
8
 
8
9
  const smqConfig: IRedisSMQConfig = {
@@ -19,11 +20,21 @@ const smqConfig: IRedisSMQConfig = {
19
20
 
20
21
  Configuration.getSetConfig(smqConfig)
21
22
 
22
- // Producer starts automatically
23
23
  export const producer = new Producer()
24
24
  export const consumer = new Consumer()
25
25
 
26
- export function initQueues() {
27
- setupQueues()
26
+ function runProducer(): Promise<void> {
27
+ return new Promise((resolve, reject) => {
28
+ producer.run((err) => {
29
+ if (err) reject(err)
30
+ else resolve()
31
+ })
32
+ })
33
+ }
34
+
35
+ export async function initQueues() {
36
+ await setupQueues()
37
+ await runProducer()
28
38
  startConsumer(consumer)
39
+ await startDailySchedule()
29
40
  }
package/src/server.ts CHANGED
@@ -13,7 +13,7 @@ void mongoose.connect(config.mongo.url, {
13
13
 
14
14
  async function init() {
15
15
  // Start redis message queues
16
- initQueues()
16
+ await initQueues()
17
17
  const app = await expressApolloSetup()
18
18
  const server = createServer(app)
19
19
  server.listen(config.port, () => {