@openneuro/server 4.33.4 → 4.34.0-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@openneuro/server",
3
- "version": "4.33.4",
3
+ "version": "4.34.0-alpha.0",
4
4
  "description": "Core service for the OpenNeuro platform.",
5
5
  "license": "MIT",
6
6
  "main": "src/server.js",
@@ -21,7 +21,7 @@
21
21
  "@elastic/elasticsearch": "8.13.1",
22
22
  "@graphql-tools/schema": "^10.0.0",
23
23
  "@keyv/redis": "^2.7.0",
24
- "@openneuro/search": "^4.33.4",
24
+ "@openneuro/search": "^4.34.0-alpha.0",
25
25
  "@sentry/node": "^8.25.0",
26
26
  "@sentry/profiling-node": "^8.25.0",
27
27
  "base64url": "^3.0.0",
@@ -85,5 +85,5 @@
85
85
  "publishConfig": {
86
86
  "access": "public"
87
87
  },
88
- "gitHead": "6ea88e06a699358523ad7b47f8c8e9ac3ef8a2c2"
88
+ "gitHead": "7711b48292c2e7f40bb45759617def2bbd07c88b"
89
89
  }
@@ -87,6 +87,25 @@ describe("datalad dataset descriptions", () => {
87
87
  expect(Array.isArray(repaired.ReferencesAndLinks)).toBe(true)
88
88
  expect(Array.isArray(repaired.Funding)).toBe(true)
89
89
  })
90
+ it("converts array of objects to empty array for Funding", () => {
91
+ const description = {
92
+ Funding: [{ grant: "123" }, { grant: "456" }],
93
+ }
94
+ const repaired = repairDescriptionTypes(description)
95
+ expect(repaired.Funding).toEqual([])
96
+ })
97
+ it("sets DatasetType to 'raw' if not a string", () => {
98
+ const description = {
99
+ DatasetType: 123,
100
+ }
101
+ const repaired = repairDescriptionTypes(description)
102
+ expect(repaired.DatasetType).toEqual("raw")
103
+ })
104
+ it("sets BIDSVersion to '1.8.0' if missing", () => {
105
+ const description = {}
106
+ const repaired = repairDescriptionTypes(description)
107
+ expect(repaired.BIDSVersion).toEqual("1.8.0")
108
+ })
90
109
  })
91
110
  describe("getDescriptionObject()", () => {
92
111
  beforeAll(() => {
@@ -23,6 +23,7 @@ vi.mock("../draft.ts", () => ({
23
23
  }))
24
24
  vi.mock("../../config.ts")
25
25
  vi.mock("../../libs/notifications.ts")
26
+ vi.mock("../../libs/events.ts")
26
27
 
27
28
  describe("snapshot model operations", () => {
28
29
  describe("createSnapshot()", () => {
@@ -25,6 +25,7 @@ import BadAnnexObject from "../models/badAnnexObject"
25
25
  import { datasetsConnection } from "./pagination"
26
26
  import { getDatasetWorker } from "../libs/datalad-service"
27
27
  import notifications from "../libs/notifications"
28
+ import { createEvent, updateEvent } from "../libs/events"
28
29
 
29
30
  export const giveUploaderPermission = (datasetId, userId) => {
30
31
  const permission = new Permission({ datasetId, userId, level: "admin" })
@@ -42,12 +43,18 @@ export const giveUploaderPermission = (datasetId, userId) => {
42
43
  * @returns {Promise} Resolves to {id: accessionNumber} for the new dataset
43
44
  */
44
45
  export const createDataset = async (
45
- uploader,
46
+ uploader: string,
46
47
  userInfo,
47
48
  { affirmedDefaced, affirmedConsent },
48
49
  ) => {
49
50
  // Obtain an accession number
50
51
  const datasetId = await getAccessionNumber()
52
+ // Generate the created event
53
+ const event = await createEvent(
54
+ datasetId,
55
+ uploader,
56
+ { type: "created" },
57
+ )
51
58
  try {
52
59
  const ds = new Dataset({ id: datasetId, uploader })
53
60
  await request
@@ -59,6 +66,8 @@ export const createDataset = async (
59
66
  const md = new Metadata({ datasetId, affirmedDefaced, affirmedConsent })
60
67
  await md.save()
61
68
  await giveUploaderPermission(datasetId, uploader)
69
+ // Creation is complete here, mark successful
70
+ await updateEvent(event)
62
71
  await subscriptions.subscribe(datasetId, uploader)
63
72
  await notifications.snapshotReminder(datasetId)
64
73
  return ds
@@ -105,11 +114,18 @@ export const getDataset = async (id) => {
105
114
  /**
106
115
  * Delete dataset and associated documents
107
116
  */
108
- export const deleteDataset = (id) =>
109
- request
110
- .del(`${getDatasetWorker(id)}/datasets/${id}`)
111
- .then(() => Dataset.deleteOne({ id }).exec())
112
- .then(() => true)
117
+ export const deleteDataset = async (datasetId, user) => {
118
+ const event = await createEvent(
119
+ datasetId,
120
+ user.id,
121
+ { type: "deleted" },
122
+ )
123
+ await request
124
+ .del(`${getDatasetWorker(datasetId)}/datasets/${datasetId}`)
125
+ await Dataset.deleteOne({ datasetId }).exec()
126
+ await updateEvent(event)
127
+ return true
128
+ }
113
129
 
114
130
  /**
115
131
  * For public datasets, cache combinations of sorts/limits/cursors to speed responses
@@ -498,12 +514,19 @@ export const flagAnnexObject = (
498
514
  /**
499
515
  * Update public state
500
516
  */
501
- export const updatePublic = (datasetId, publicFlag) =>
502
- Dataset.updateOne(
517
+ export async function updatePublic(datasetId, publicFlag, user) {
518
+ const event = await createEvent(
519
+ datasetId,
520
+ user.id,
521
+ { type: "published", public: publicFlag },
522
+ )
523
+ await Dataset.updateOne(
503
524
  { id: datasetId },
504
525
  { public: publicFlag, publishDate: new Date() },
505
526
  { upsert: true },
506
527
  ).exec()
528
+ await updateEvent(event)
529
+ }
507
530
 
508
531
  export const getDatasetAnalytics = (datasetId, _tag) => {
509
532
  return Dataset.findOne({ id: datasetId }).then((ds) => ({
@@ -11,6 +11,15 @@ import { getDatasetWorker } from "../libs/datalad-service"
11
11
  import CacheItem, { CacheType } from "../cache/item"
12
12
  import { datasetOrSnapshot } from "../utils/datasetOrSnapshot"
13
13
 
14
+ /**
15
+ * Checks if all elements in an array are strings.
16
+ * @param arr The array to check.
17
+ * @returns True if all elements are strings, false otherwise.
18
+ */
19
+ const isArrayOfStrings = (arr: unknown): arr is string[] => {
20
+ return Array.isArray(arr) && arr.every((item) => typeof item === "string")
21
+ }
22
+
14
23
  /**
15
24
  * Find dataset_description.json id and fetch description object
16
25
  * @param {string} datasetId
@@ -36,64 +45,65 @@ export const descriptionCacheKey = (datasetId, revision) => {
36
45
 
37
46
  export const repairDescriptionTypes = (description) => {
38
47
  const newDescription = { ...description }
39
- // Array types
40
- if (
41
- Object.hasOwn(description, "Authors") &&
42
- !Array.isArray(description.Authors)
43
- ) {
44
- newDescription.Authors = [description.Authors]
45
- }
46
- if (
47
- Object.hasOwn(description, "ReferencesAndLinks") &&
48
- !Array.isArray(description.ReferencesAndLinks)
49
- ) {
50
- newDescription.ReferencesAndLinks = [description.ReferencesAndLinks]
51
- }
52
- if (
53
- Object.hasOwn(description, "Funding") &&
54
- !Array.isArray(description.Funding)
55
- ) {
56
- newDescription.Funding = [description.Funding]
57
- }
58
- if (
59
- Object.hasOwn(description, "EthicsApprovals") &&
60
- !Array.isArray(description.EthicsApprovals)
61
- ) {
62
- newDescription.EthicsApprovals = [description.EthicsApprovals]
63
- }
64
- // String types
65
- if (
66
- Object.hasOwn(description, "Name") &&
67
- typeof description.Name !== "string"
68
- ) {
69
- newDescription.Name = JSON.stringify(description.Name) || ""
70
- }
71
- if (
72
- Object.hasOwn(description, "DatasetDOI") &&
73
- typeof description.DatasetDOI !== "string"
74
- ) {
75
- newDescription.DatasetDOI = JSON.stringify(description.DatasetDOI) || ""
76
- }
77
- if (
78
- Object.hasOwn(description, "Acknowledgements") &&
79
- typeof description.Acknowledgements !== "string"
80
- ) {
81
- newDescription.Acknowledgements =
82
- JSON.stringify(description.Acknowledgements) || ""
48
+
49
+ // Define fields that should be arrays of strings
50
+ const arrayStringFields = [
51
+ "Authors",
52
+ "ReferencesAndLinks",
53
+ "Funding",
54
+ "EthicsApprovals",
55
+ ]
56
+
57
+ // Repair array types - ensure they are arrays of strings
58
+ for (const field of arrayStringFields) {
59
+ if (Object.hasOwn(description, field)) {
60
+ if (!isArrayOfStrings(description[field])) {
61
+ // If it's not an array of strings (or not an array at all), replace with an empty array
62
+ newDescription[field] = []
63
+ }
64
+ // If it is already a valid array of strings, no change is needed.
65
+ }
66
+ // If the field doesn't exist, we don't add it.
83
67
  }
84
- if (
85
- Object.hasOwn(description, "HowToAcknowledge") &&
86
- typeof description.HowToAcknowledge !== "string"
87
- ) {
88
- newDescription.HowToAcknowledge =
89
- JSON.stringify(description.HowToAcknowledge) || ""
68
+
69
+ // Define fields that should be strings
70
+ const stringFields = [
71
+ "Name",
72
+ "DatasetDOI",
73
+ "Acknowledgements",
74
+ "HowToAcknowledge",
75
+ "DatasetType",
76
+ ]
77
+
78
+ // Repair string types - ensure they are strings
79
+ for (const field of stringFields) {
80
+ if (Object.hasOwn(description, field)) {
81
+ if (typeof description[field] !== "string") {
82
+ // Attempt to stringify non-string types, default to empty string or specific default
83
+ if (field === "DatasetType") {
84
+ newDescription[field] = "raw" // Specific default for DatasetType
85
+ } else {
86
+ try {
87
+ // Use JSON.stringify for complex types, otherwise just convert
88
+ const stringified = typeof description[field] === "object"
89
+ ? JSON.stringify(description[field])
90
+ : String(description[field])
91
+ newDescription[field] = stringified || ""
92
+ } catch (_err) {
93
+ newDescription[field] = "" // Fallback to empty string on error
94
+ }
95
+ }
96
+ }
97
+ // If it's already a string, no change needed.
98
+ }
99
+ // If the field doesn't exist, we don't add it.
90
100
  }
91
- if (
92
- Object.hasOwn(description, "DatasetType") &&
93
- typeof description.DatasetType !== "string"
94
- ) {
95
- newDescription.DatasetType = "raw"
101
+
102
+ // Ensure BIDSVersion is present if missing (common default)
103
+ if (!Object.hasOwn(newDescription, "BIDSVersion")) {
104
+ newDescription.BIDSVersion = "1.8.0" // Or your desired default BIDS version
96
105
  }
106
+
97
107
  return newDescription
98
108
  }
99
109
 
@@ -34,8 +34,8 @@ export const readme = (obj) => {
34
34
  })
35
35
  }
36
36
 
37
- export const setReadme = (datasetId, readme, user) => {
38
- return addFileString(datasetId, "README", "text/plain", readme).then(() =>
37
+ export const setReadme = (datasetId, readme, filename, user) => {
38
+ return addFileString(datasetId, filename, "text/plain", readme).then(() =>
39
39
  commitFiles(datasetId, user)
40
40
  )
41
41
  }
@@ -22,6 +22,7 @@ import type { SnapshotDocument } from "../models/snapshot"
22
22
  import { updateDatasetRevision } from "./draft"
23
23
  import { getDatasetWorker } from "../libs/datalad-service"
24
24
  import { join } from "path"
25
+ import { createEvent, updateEvent } from "../libs/events"
25
26
 
26
27
  const lockSnapshot = (datasetId, tag) => {
27
28
  return redlock.lock(
@@ -140,6 +141,11 @@ export const createSnapshot = async (
140
141
  const snapshotLock = await lockSnapshot(datasetId, tag)
141
142
 
142
143
  try {
144
+ // Create a version attempt event
145
+ const event = await createEvent(datasetId, user.id, {
146
+ type: "versioned",
147
+ version: tag,
148
+ })
143
149
  await createIfNotExistsDoi(datasetId, tag, descriptionFieldUpdates)
144
150
 
145
151
  const createSnapshotUrl = `${
@@ -167,6 +173,10 @@ export const createSnapshot = async (
167
173
  updateDatasetName(datasetId),
168
174
  ])
169
175
 
176
+ // Version is created here and event is updated
177
+ await updateEvent(event)
178
+
179
+ // Immediate indexing for new snapshots
170
180
  await reindexDataset(datasetId)
171
181
 
172
182
  announceNewSnapshot(snapshot, datasetId, user)
@@ -22,6 +22,7 @@ import { onBrainlife } from "./brainlife"
22
22
  import { brainInitiative } from "./brainInitiative"
23
23
  import { derivatives } from "./derivatives"
24
24
  import { promiseTimeout } from "../../utils/promiseTimeout"
25
+ import { datasetEvents } from "./datasetEvents"
25
26
  import semver from "semver"
26
27
 
27
28
  export const dataset = async (obj, { id }, { user, userInfo }) => {
@@ -120,7 +121,7 @@ export const deleteDataset = async (
120
121
  { user, userInfo },
121
122
  ) => {
122
123
  await checkDatasetWrite(id, user, userInfo)
123
- const deleted = await datalad.deleteDataset(id)
124
+ const deleted = await datalad.deleteDataset(id, userInfo)
124
125
  // Remove from the current version of the Elastic index
125
126
  try {
126
127
  await removeDatasetSearchDocument(id)
@@ -205,7 +206,7 @@ export const updatePublic = (
205
206
  { user, userInfo },
206
207
  ) => {
207
208
  return checkDatasetWrite(datasetId, user, userInfo).then(() => {
208
- return datalad.updatePublic(datasetId, publicFlag)
209
+ return datalad.updatePublic(datasetId, publicFlag, user)
209
210
  })
210
211
  }
211
212
 
@@ -303,6 +304,7 @@ const Dataset = {
303
304
  worker,
304
305
  reviewers,
305
306
  brainInitiative,
307
+ events: datasetEvents,
306
308
  }
307
309
 
308
310
  export default Dataset
@@ -0,0 +1,59 @@
1
+ import DatasetEvent from "../../models/datasetEvents"
2
+
3
+ /**
4
+ * Get all events for a dataset
5
+ */
6
+ export function datasetEvents(obj, _, { userInfo }) {
7
+ if (userInfo.admin) {
8
+ // Site admins can see all events
9
+ return DatasetEvent.find({ datasetId: obj.id })
10
+ .sort({ timestamp: -1 })
11
+ .populate("user")
12
+ .exec()
13
+ } else {
14
+ // Non-admin users can only see notes without the admin flag
15
+ return DatasetEvent.find({
16
+ datasetId: obj.id,
17
+ event: { admin: { $ne: true } },
18
+ })
19
+ .sort({ timestamp: -1 })
20
+ .populate("user")
21
+ .exec()
22
+ }
23
+ }
24
+
25
+ /**
26
+ * Create or update an admin note event
27
+ */
28
+ export async function saveAdminNote(
29
+ obj,
30
+ { id, datasetId, note },
31
+ { user, userInfo },
32
+ ) {
33
+ // Only site admin users can create an admin note
34
+ if (!userInfo?.admin) {
35
+ throw new Error("Not authorized")
36
+ }
37
+ if (id) {
38
+ const event = await DatasetEvent.findOne({ id, datasetId })
39
+ event.note = note
40
+ await event.save()
41
+ await event.populate("user")
42
+ return event
43
+ } else {
44
+ const event = new DatasetEvent({
45
+ id,
46
+ datasetId,
47
+ userId: user,
48
+ event: {
49
+ type: "note",
50
+ admin: true,
51
+ },
52
+ success: true,
53
+ note,
54
+ })
55
+ await event.save()
56
+ await event.populate("user")
57
+ return event
58
+ }
59
+ }
@@ -10,7 +10,7 @@ import { filterRemovedAnnexObjects } from "../utils/file.js"
10
10
  import { validation } from "./validation"
11
11
 
12
12
  // A draft must have a dataset parent
13
- const draftFiles = async (dataset, args, { userInfo }) => {
13
+ export const draftFiles = async (dataset, args, { userInfo }) => {
14
14
  const hexsha = await getDraftRevision(dataset.id)
15
15
  const files = await getFiles(dataset.id, args.tree || hexsha)
16
16
  return filterRemovedAnnexObjects(dataset.id, userInfo)(files)
@@ -0,0 +1,25 @@
1
+ import { createEvent } from "../../libs/events"
2
+ import { checkDatasetWrite } from "../permissions"
3
+
4
+ /**
5
+ * Create a git event
6
+ */
7
+ export const createGitEvent = async (
8
+ obj,
9
+ { datasetId, commit, reference },
10
+ { user, userInfo },
11
+ ) => {
12
+ await checkDatasetWrite(datasetId, user, userInfo)
13
+ const event = await createEvent(
14
+ datasetId,
15
+ user,
16
+ {
17
+ type: "git",
18
+ commit,
19
+ reference,
20
+ },
21
+ "",
22
+ true,
23
+ )
24
+ return event.toObject()
25
+ }
@@ -43,6 +43,8 @@ import {
43
43
  finishImportRemoteDataset,
44
44
  importRemoteDataset,
45
45
  } from "./importRemoteDataset"
46
+ import { saveAdminNote } from "./datasetEvents"
47
+ import { createGitEvent } from "./gitEvents"
46
48
 
47
49
  const Mutation = {
48
50
  createDataset,
@@ -89,6 +91,8 @@ const Mutation = {
89
91
  importRemoteDataset,
90
92
  finishImportRemoteDataset,
91
93
  updateUser,
94
+ saveAdminNote,
95
+ createGitEvent,
92
96
  }
93
97
 
94
98
  export default Mutation
@@ -4,6 +4,7 @@ import Permission from "../../models/permission"
4
4
  import type { PermissionDocument } from "../../models/permission"
5
5
  import { checkDatasetAdmin } from "../permissions"
6
6
  import { user } from "./user"
7
+ import { createEvent, updateEvent } from "../../libs/events"
7
8
 
8
9
  interface DatasetPermission {
9
10
  id: string
@@ -40,8 +41,16 @@ const publishPermissions = async (datasetId) => {
40
41
  return permissionsUpdated
41
42
  }
42
43
 
44
+ /**
45
+ * Apply permission updates to a list of users
46
+ */
43
47
  async function updateUsers(datasetId: string, level: string, users) {
44
48
  for (const user of users) {
49
+ const event = await createEvent(datasetId, user, {
50
+ type: "permissionChange",
51
+ target: user.id,
52
+ level: level,
53
+ })
45
54
  await Permission.updateOne(
46
55
  {
47
56
  datasetId: datasetId,
@@ -54,6 +63,7 @@ async function updateUsers(datasetId: string, level: string, users) {
54
63
  },
55
64
  { upsert: true },
56
65
  ).exec()
66
+ await updateEvent(event)
57
67
  }
58
68
  return publishPermissions(datasetId)
59
69
  }
@@ -7,7 +7,7 @@ import { getDatasetWorker } from "../../libs/datalad-service"
7
7
 
8
8
  export const publishDataset = (obj, { datasetId }, { user, userInfo }) => {
9
9
  return checkDatasetWrite(datasetId, user, userInfo).then(async () => {
10
- await updatePublic(datasetId, true)
10
+ await updatePublic(datasetId, true, userInfo)
11
11
  const uri = `${getDatasetWorker(datasetId)}/datasets/${datasetId}/publish`
12
12
  return await request
13
13
  .post(uri)
@@ -5,10 +5,30 @@
5
5
  import { setReadme } from "../../datalad/readme"
6
6
  import { checkDatasetWrite } from "../permissions"
7
7
  export { readme } from "../../datalad/readme"
8
+ import { draftFiles } from "./draft"
8
9
 
9
- export const updateReadme = (obj, { datasetId, value }, { user, userInfo }) => {
10
- return checkDatasetWrite(datasetId, user, userInfo).then(() => {
11
- // Save to backend
12
- return setReadme(datasetId, value, userInfo).then(() => true)
10
+ export async function updateReadme(
11
+ obj,
12
+ { datasetId, value },
13
+ { user, userInfo },
14
+ ) {
15
+ await checkDatasetWrite(datasetId, user, userInfo)
16
+ const files = await draftFiles({ id: datasetId }, { tree: "HEAD" }, {
17
+ userInfo,
13
18
  })
19
+ // Default to README.md if none exists
20
+ let filename = "README.md"
21
+ for (const file of files) {
22
+ if (
23
+ file.filename === "README.md" || file.filename === "README.rst" ||
24
+ file.filename === "README.txt" ||
25
+ file.filename === "README"
26
+ ) {
27
+ filename = file.filename
28
+ break
29
+ }
30
+ }
31
+ // Save to backend
32
+ await setReadme(datasetId, value, filename, userInfo)
33
+ return true
14
34
  }
@@ -136,20 +136,23 @@ export const undoDeprecateSnapshot = async (
136
136
  }
137
137
 
138
138
  export const participantCount = (obj, { modality }) => {
139
+ const cacheKey = modality === "NIH" ? "NIH" : modality || "all"
139
140
  const cache = new CacheItem(
140
141
  redis,
141
142
  CacheType.participantCount,
142
- [modality || "all"],
143
+ [cacheKey],
143
144
  3600,
144
145
  )
146
+
145
147
  return cache.get(async () => {
146
148
  const queryHasSubjects = {
147
- "summary.subjects": {
148
- $exists: true,
149
- },
149
+ "summary.subjects": { $exists: true },
150
150
  }
151
- const matchQuery = modality
152
- ? {
151
+
152
+ let matchQuery: Record<string, unknown> = queryHasSubjects
153
+
154
+ if (modality && modality !== "NIH") {
155
+ matchQuery = {
153
156
  $and: [
154
157
  queryHasSubjects,
155
158
  {
@@ -157,7 +160,19 @@ export const participantCount = (obj, { modality }) => {
157
160
  },
158
161
  ],
159
162
  }
160
- : queryHasSubjects
163
+ } else if (modality === "NIH") {
164
+ // When modality is 'NIH', we don't filter by a specific modality.
165
+ // Instead, we query for datasets that have any modality within the NIH portal
166
+ matchQuery = {
167
+ $and: [
168
+ queryHasSubjects,
169
+ {
170
+ "summary.modalities": { $exists: true },
171
+ },
172
+ ],
173
+ }
174
+ }
175
+
161
176
  const aggregateResult = await DatasetModel.aggregate([
162
177
  {
163
178
  $match: {
@@ -198,8 +213,12 @@ export const participantCount = (obj, { modality }) => {
198
213
  },
199
214
  },
200
215
  ]).exec()
201
- if (aggregateResult.length) return aggregateResult[0].participantCount
202
- else return 0
216
+
217
+ if (aggregateResult.length) {
218
+ return aggregateResult[0].participantCount
219
+ } else {
220
+ return 0
221
+ }
203
222
  })
204
223
  }
205
224
 
@@ -19,7 +19,12 @@ export async function summary(dataset): Promise<Partial<SummaryDocument>> {
19
19
  if (datasetSummary) {
20
20
  return {
21
21
  ...datasetSummary,
22
- primaryModality: datasetSummary?.modalities[0],
22
+ // Lowercase all modality fields
23
+ modalities: datasetSummary?.modalities?.map((str) => str.toLowerCase()),
24
+ secondaryModalities: datasetSummary?.secondaryModalities?.map(
25
+ (str) => str.toLowerCase(),
26
+ ),
27
+ primaryModality: datasetSummary?.modalities[0]?.toLowerCase(),
23
28
  }
24
29
  } else {
25
30
  return null
@@ -188,6 +188,10 @@ export const typeDefs = `
188
188
  importRemoteDataset(datasetId: ID!, url: String!): ID
189
189
  # Finish and notify import is done, returns true if successful
190
190
  finishImportRemoteDataset(id: ID!, success: Boolean!, message: String): Boolean
191
+ # Create or update an admin note on a dataset
192
+ saveAdminNote(id: ID, datasetId: ID!, note: String!): DatasetEvent
193
+ # Create a git event log for dataset changes
194
+ createGitEvent(datasetId: ID!, commit: String!, reference: String!): DatasetEvent
191
195
  }
192
196
 
193
197
  # Anonymous dataset reviewer
@@ -412,6 +416,8 @@ export const typeDefs = `
412
416
  reviewers: [DatasetReviewer]
413
417
  # Dataset belongs to Brain Initiative
414
418
  brainInitiative: Boolean
419
+ # Log of events associated with this dataset
420
+ events: [DatasetEvent]
415
421
  }
416
422
 
417
423
  type DatasetDerivatives {
@@ -849,6 +855,32 @@ export const typeDefs = `
849
855
  flagger: User
850
856
  createdAt: DateTime
851
857
  }
858
+
859
+ type DatasetEventDescription {
860
+ type: String
861
+ version: String
862
+ public: Boolean
863
+ target: User
864
+ level: String
865
+ ref: String
866
+ message: String
867
+ }
868
+
869
+ # Dataset events
870
+ type DatasetEvent {
871
+ # Unique identifier for the event
872
+ id: ID
873
+ # Timestamp of the event
874
+ timestamp: DateTime
875
+ # User associated with the event
876
+ user: User
877
+ # Event description object
878
+ event: DatasetEventDescription
879
+ # True if the event succeeded
880
+ success: Boolean
881
+ # Notes associated with the event
882
+ note: String
883
+ }
852
884
  `
853
885
 
854
886
  schemaComposer.addTypeDefs(typeDefs)
@@ -0,0 +1,51 @@
1
+ import DatasetEvent from "../models/datasetEvents"
2
+ import type {
3
+ DatasetEventDocument,
4
+ DatasetEventType,
5
+ } from "../models/datasetEvents"
6
+ import * as Sentry from "@sentry/node"
7
+ /**
8
+ * Create a new dataset event
9
+ */
10
+ export async function createEvent(
11
+ datasetId: string,
12
+ user: string,
13
+ event: DatasetEventType,
14
+ note: string = "",
15
+ success: boolean = false,
16
+ ): Promise<DatasetEventDocument> {
17
+ // Save a Sentry breadcrumb to help debug complex server events
18
+ const breadcrumb: Sentry.Breadcrumb = {
19
+ category: "dataset-event",
20
+ message: `${event.type} event created for dataset ${datasetId}`,
21
+ level: "info",
22
+ data: {
23
+ datasetId,
24
+ user,
25
+ event,
26
+ note,
27
+ },
28
+ }
29
+ Sentry.addBreadcrumb(breadcrumb)
30
+ const created = new DatasetEvent({
31
+ datasetId,
32
+ userId: user,
33
+ event,
34
+ note,
35
+ // Initially create the event as failed - update to success on successful state
36
+ success,
37
+ })
38
+ await created.save()
39
+ return created
40
+ }
41
+
42
+ /**
43
+ * Call when event is finished to mark complete or add failure info
44
+ */
45
+ export async function updateEvent(
46
+ event: DatasetEventDocument,
47
+ success: boolean = true,
48
+ ) {
49
+ event.success = success
50
+ await event.save()
51
+ }
@@ -0,0 +1,106 @@
1
+ import mongoose from "mongoose"
2
+ import { MongoMemoryServer } from "mongodb-memory-server"
3
+ import DatasetEvent from "../datasetEvents"
4
+ import type { DatasetEventDocument, DatasetEventType } from "../datasetEvents"
5
+ import type { OpenNeuroUserId } from "../../types/user"
6
+
7
+ describe("DatasetEvent Model", () => {
8
+ let mongoServer: MongoMemoryServer
9
+
10
+ beforeAll(async () => {
11
+ mongoServer = await MongoMemoryServer.create()
12
+ const mongoUri = mongoServer.getUri()
13
+ await mongoose.connect(mongoUri)
14
+ })
15
+
16
+ afterAll(async () => {
17
+ await mongoose.disconnect()
18
+ await mongoServer.stop()
19
+ })
20
+
21
+ beforeEach(async () => {
22
+ await DatasetEvent.deleteMany({})
23
+ })
24
+
25
+ it("should create a new DatasetEvent", async () => {
26
+ const eventData: Partial<DatasetEventDocument> = {
27
+ datasetId: "ds000001",
28
+ timestamp: new Date(),
29
+ userId: "user123" as OpenNeuroUserId,
30
+ event: {
31
+ type: "created",
32
+ },
33
+ success: true,
34
+ note: "Dataset created successfully",
35
+ }
36
+
37
+ const datasetEvent = new DatasetEvent(eventData)
38
+ const savedDatasetEvent = await datasetEvent.save()
39
+
40
+ expect(savedDatasetEvent._id).toBeDefined()
41
+ expect(savedDatasetEvent.datasetId).toBe("ds000001")
42
+ expect(savedDatasetEvent.userId).toBe("user123")
43
+ expect(savedDatasetEvent.event.type).toBe("created")
44
+ expect(savedDatasetEvent.success).toBe(true)
45
+ expect(savedDatasetEvent.note).toBe("Dataset created successfully")
46
+ expect(savedDatasetEvent.timestamp).toBeInstanceOf(Date)
47
+ })
48
+
49
+ it("should create a DatasetEvent with default values", async () => {
50
+ const eventData: Partial<DatasetEventDocument> = {
51
+ datasetId: "ds000002",
52
+ timestamp: new Date(),
53
+ userId: "user456" as OpenNeuroUserId,
54
+ event: {
55
+ type: "versioned",
56
+ version: "1.0.0",
57
+ },
58
+ }
59
+
60
+ const datasetEvent = new DatasetEvent(eventData)
61
+ const savedDatasetEvent = await datasetEvent.save()
62
+
63
+ expect(savedDatasetEvent._id).toBeDefined()
64
+ expect(savedDatasetEvent.success).toBe(false)
65
+ expect(savedDatasetEvent.note).toBe("")
66
+ })
67
+
68
+ it("should require datasetId, timestamp, user, and event", async () => {
69
+ const eventData = {}
70
+
71
+ const datasetEvent = new DatasetEvent(eventData)
72
+
73
+ await expect(datasetEvent.save()).rejects.toThrow()
74
+ })
75
+
76
+ it("should handle different event types", async () => {
77
+ const events: DatasetEventType[] = [
78
+ { type: "created" },
79
+ { type: "versioned", version: "1.0.0" },
80
+ { type: "deleted" },
81
+ { type: "published", public: true },
82
+ {
83
+ type: "permissionChange",
84
+ target: "user789" as OpenNeuroUserId,
85
+ level: "admin",
86
+ },
87
+ { type: "git", reference: "head/refs/main", commit: "12345667" },
88
+ { type: "upload" },
89
+ { type: "note", admin: false },
90
+ ]
91
+
92
+ for (const event of events) {
93
+ const eventData: Partial<DatasetEventDocument> = {
94
+ datasetId: "ds000003",
95
+ timestamp: new Date(),
96
+ userId: "user101" as OpenNeuroUserId,
97
+ event: event,
98
+ success: true,
99
+ note: "Testing different event types",
100
+ }
101
+ const datasetEvent = new DatasetEvent(eventData)
102
+ const savedDatasetEvent = await datasetEvent.save()
103
+ expect(savedDatasetEvent.event.type).toBe(event.type)
104
+ }
105
+ })
106
+ })
@@ -0,0 +1,139 @@
1
+ import mongoose from "mongoose"
2
+ import type { Document } from "mongoose"
3
+ import type { OpenNeuroUserId } from "../types/user"
4
+ import { v4 as uuidv4 } from "uuid"
5
+ import type { UserDocument } from "./user"
6
+ const { Schema, model } = mongoose
7
+
8
+ const _datasetEventTypes = [
9
+ "created",
10
+ "versioned",
11
+ "deleted",
12
+ "published",
13
+ "permissionChange",
14
+ "git",
15
+ "upload",
16
+ "note",
17
+ ] as const
18
+
19
+ /**
20
+ * Various events that occur affecting one dataset
21
+ *
22
+ * created - Dataset was created
23
+ * versioned - Dataset has a snapshot created
24
+ * deleted - The dataset was deleted
25
+ * published - Dataset was made public
26
+ * permissionChange - Dataset permissions were modified
27
+ * git - A git event modified the dataset's repository (git history provides details)
28
+ * upload - A non-git upload occurred (typically one file changed)
29
+ * note - A note unrelated to another event
30
+ */
31
+ export type DatasetEventName = typeof _datasetEventTypes[number]
32
+
33
+ export type DatasetEventCommon = {
34
+ type: DatasetEventName
35
+ }
36
+
37
+ export type DatasetEventCreated = DatasetEventCommon & {
38
+ type: "created"
39
+ }
40
+
41
+ export type DatasetEventVersioned = DatasetEventCommon & {
42
+ type: "versioned"
43
+ version: string
44
+ }
45
+
46
+ export type DatasetEventDeleted = DatasetEventCommon & {
47
+ type: "deleted"
48
+ }
49
+
50
+ export type DatasetEventPublished = DatasetEventCommon & {
51
+ type: "published"
52
+ // True if made public, false if made private
53
+ public: boolean
54
+ }
55
+
56
+ export type DatasetEventPermissionChange = DatasetEventCommon & {
57
+ type: "permissionChange"
58
+ // User with the permission being changed
59
+ target: OpenNeuroUserId
60
+ level: string
61
+ }
62
+
63
+ export type DatasetEventGit = DatasetEventCommon & {
64
+ type: "git"
65
+ commit: string
66
+ reference: string
67
+ }
68
+
69
+ export type DatasetEventUpload = DatasetEventCommon & {
70
+ type: "upload"
71
+ }
72
+
73
+ export type DatasetEventNote = DatasetEventCommon & {
74
+ type: "note"
75
+ // Is this note visible only to site admins?
76
+ admin: boolean
77
+ }
78
+
79
+ /**
80
+ * Description of a dataset event
81
+ */
82
+ export type DatasetEventType =
83
+ | DatasetEventCreated
84
+ | DatasetEventVersioned
85
+ | DatasetEventDeleted
86
+ | DatasetEventPublished
87
+ | DatasetEventPermissionChange
88
+ | DatasetEventGit
89
+ | DatasetEventUpload
90
+ | DatasetEventNote
91
+
92
+ /**
93
+ * Dataset events log changes to a dataset
94
+ */
95
+ export interface DatasetEventDocument extends Document {
96
+ // Unique id for the event
97
+ id: string
98
+ // Affected dataset
99
+ datasetId: string
100
+ // Timestamp of the event
101
+ timestamp: Date
102
+ // User id that triggered the event
103
+ userId: string
104
+ // User that triggered the event
105
+ user: UserDocument
106
+ // A description of the event, optional but recommended to provide context
107
+ event: DatasetEventType
108
+ // Did the action logged succeed?
109
+ success: boolean
110
+ // Admin notes
111
+ note: string
112
+ }
113
+
114
+ const datasetEventSchema = new Schema<DatasetEventDocument>({
115
+ id: { type: String, required: true, default: uuidv4 },
116
+ datasetId: { type: String, required: true },
117
+ timestamp: { type: Date, default: Date.now },
118
+ userId: { type: String, required: true },
119
+ event: {
120
+ type: Object,
121
+ required: true,
122
+ },
123
+ success: { type: Boolean, default: false },
124
+ note: { type: String, default: "" },
125
+ })
126
+
127
+ datasetEventSchema.virtual("user", {
128
+ ref: "User",
129
+ localField: "userId",
130
+ foreignField: "id",
131
+ justOne: true,
132
+ })
133
+
134
+ const DatasetEvent = model<DatasetEventDocument>(
135
+ "DatasetEvent",
136
+ datasetEventSchema,
137
+ )
138
+
139
+ export default DatasetEvent
@@ -0,0 +1,2 @@
1
+ /** OpenNeuro UUID string used to identify a user uniquely */
2
+ export type OpenNeuroUserId = string