@openneuro/server 4.47.7 → 5.0.0-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/package.json +10 -7
  2. package/src/app.ts +1 -1
  3. package/src/cache/__tests__/tree.spec.ts +212 -0
  4. package/src/cache/tree.ts +148 -0
  5. package/src/datalad/__tests__/dataRetentionNotifications.spec.ts +11 -0
  6. package/src/datalad/__tests__/files.spec.ts +249 -0
  7. package/src/datalad/dataRetentionNotifications.ts +5 -0
  8. package/src/datalad/dataset.ts +29 -1
  9. package/src/datalad/files.ts +362 -39
  10. package/src/datalad/snapshots.ts +29 -54
  11. package/src/graphql/resolvers/__tests__/response-status.spec.ts +42 -0
  12. package/src/graphql/resolvers/build-search-query.ts +391 -0
  13. package/src/graphql/resolvers/cache.ts +5 -1
  14. package/src/graphql/resolvers/dataset-search.ts +40 -23
  15. package/src/graphql/resolvers/datasetEvents.ts +48 -78
  16. package/src/graphql/resolvers/draft.ts +5 -2
  17. package/src/graphql/resolvers/holdDeletion.ts +21 -0
  18. package/src/graphql/resolvers/index.ts +6 -0
  19. package/src/graphql/resolvers/mutation.ts +2 -0
  20. package/src/graphql/resolvers/response-status.ts +43 -0
  21. package/src/graphql/resolvers/snapshots.ts +9 -18
  22. package/src/graphql/resolvers/summary.ts +17 -0
  23. package/src/graphql/schema.ts +54 -14
  24. package/src/handlers/datalad.ts +4 -0
  25. package/src/handlers/doi.ts +32 -36
  26. package/src/libs/doi/__tests__/doi.spec.ts +50 -12
  27. package/src/libs/doi/__tests__/validate.spec.ts +110 -0
  28. package/src/libs/doi/index.ts +108 -71
  29. package/src/libs/doi/metadata.ts +101 -0
  30. package/src/libs/doi/validate.ts +59 -0
  31. package/src/libs/presign.ts +137 -0
  32. package/src/models/dataset.ts +2 -0
  33. package/src/models/doi.ts +7 -0
  34. package/src/queues/producer-methods.ts +9 -5
  35. package/src/queues/queue-schedule.ts +1 -1
  36. package/src/queues/queues.ts +2 -2
  37. package/src/routes.ts +10 -2
  38. package/src/types/datacite/LICENSE +37 -0
  39. package/src/types/datacite/README.md +3 -0
  40. package/src/types/datacite/datacite-v4.5.json +643 -0
  41. package/src/types/datacite/datacite-v4.5.ts +281 -0
  42. package/src/types/datacite.ts +53 -63
  43. package/src/utils/datacite-mapper.ts +7 -3
  44. package/src/utils/datacite-utils.ts +12 -15
  45. package/src/libs/doi/__tests__/__snapshots__/doi.spec.ts.snap +0 -17
@@ -1,12 +1,10 @@
1
1
  import DatasetEvent from "../../models/datasetEvents"
2
+ import { toDbStatus, toGraphqlStatus } from "./response-status"
3
+ import type { DbStatus, GraphqlStatus } from "./response-status"
2
4
  import User from "../../models/user"
3
- import type { UserDocument } from "../../models/user"
4
5
  import { checkDatasetAdmin } from "../permissions"
5
6
  import type {
6
- DatasetEventContributorCitation,
7
- DatasetEventContributorCitationResponse,
8
7
  DatasetEventContributorRequest,
9
- DatasetEventContributorRequestResponse,
10
8
  DatasetEventDocument,
11
9
  } from "../../models/datasetEvents"
12
10
  import { UserNotificationStatus } from "../../models/userNotificationStatus"
@@ -24,34 +22,12 @@ function isContributorRequest(
24
22
  return event.event.type === "contributorRequest"
25
23
  }
26
24
 
27
- function isContributorCitation(
28
- event: DatasetEventDocument,
29
- ): event is DatasetEventDocument & { event: DatasetEventContributorCitation } {
30
- return event.event.type === "contributorCitation"
31
- }
32
-
33
- function isContributorRequestResponse(
34
- event: DatasetEventDocument,
35
- ): event is DatasetEventDocument & {
36
- event: DatasetEventContributorRequestResponse
37
- } {
38
- return event.event.type === "contributorRequestResponse"
39
- }
40
-
41
- function isContributorCitationResponse(
42
- event: DatasetEventDocument,
43
- ): event is DatasetEventDocument & {
44
- event: DatasetEventContributorCitationResponse
45
- } {
46
- return event.event.type === "contributorCitationResponse"
47
- }
48
-
49
25
  /** Enriched type for GraphQL */
50
26
  export type EnrichedDatasetEvent =
51
27
  & Omit<DatasetEventDocument, "notificationStatus">
52
28
  & {
53
29
  hasBeenRespondedTo?: boolean
54
- responseStatus?: "pending" | "accepted" | "denied" | null
30
+ responseStatus?: DbStatus | null
55
31
  notificationStatus?: UserNotificationStatusDocument
56
32
  }
57
33
 
@@ -81,11 +57,11 @@ export async function datasetEvents(
81
57
  }) as UserNotificationStatusDocument
82
58
  }
83
59
 
60
+ // Internal representation stays lowercase (DbStatus) here; the
61
+ // GraphQL resolver edge (DatasetEventTypeResolvers in ./index.ts)
62
+ // converts to the uppercase ResponseStatusType enum at query time.
84
63
  if ("resolutionStatus" in e.event) {
85
- ev.responseStatus = e.event.resolutionStatus as
86
- | "pending"
87
- | "accepted"
88
- | "denied"
64
+ ev.responseStatus = e.event.resolutionStatus as DbStatus
89
65
  ev.hasBeenRespondedTo = ev.responseStatus !== null &&
90
66
  ev.responseStatus !== "pending"
91
67
  } else {
@@ -103,51 +79,29 @@ export async function datasetEvents(
103
79
  )
104
80
  }
105
81
 
106
- // --- Field-level resolvers ---
107
- export const DatasetEventResolvers = {
108
- hasBeenRespondedTo: (ev: EnrichedDatasetEvent) =>
109
- ev.hasBeenRespondedTo ?? false,
110
- responseStatus: (ev: EnrichedDatasetEvent) => ev.responseStatus ?? null,
111
- notificationStatus: (ev: EnrichedDatasetEvent) =>
112
- ev.notificationStatus?.status ?? "UNREAD",
113
- requestId: (ev: EnrichedDatasetEvent) =>
114
- isContributorRequest(ev) || isContributorRequestResponse(ev)
115
- ? ev.event.requestId
116
- : null,
117
- target: async (ev: EnrichedDatasetEvent): Promise<UserDocument | null> => {
118
- const targetUserId = isContributorRequestResponse(ev) ||
119
- isContributorCitation(ev) ||
120
- isContributorCitationResponse(ev)
121
- ? ev.event.targetUserId
122
- : undefined
123
-
124
- if (!targetUserId) return null
125
- // Use findOne({ id }) for UUID strings
126
- return User.findOne({ id: targetUserId })
127
- },
128
- user: async (ev: EnrichedDatasetEvent): Promise<UserDocument | null> =>
129
- ev.userId ? User.findOne({ id: ev.userId }) : null,
130
- contributorData: (ev: EnrichedDatasetEvent) => {
131
- let data: DatasetEventContributorCitation["contributorData"] | undefined
132
-
133
- if (
134
- (isContributorCitation(ev) || isContributorCitationResponse(ev)) &&
135
- ev.event.contributorData
136
- ) {
137
- data = ev.event.contributorData
138
- } else if (
139
- (isContributorRequest(ev) || isContributorRequestResponse(ev)) &&
140
- ev.event.contributorData
141
- ) {
142
- data = ev.event.contributorData
143
- }
82
+ /**
83
+ * Minimal type-level resolvers wired via ./index.ts. These exist so the
84
+ * case conversion for resolutionStatus / responseStatus runs at the
85
+ * GraphQL edge, regardless of which query/mutation produced the event
86
+ * object. Only the status-related fields are declared here; every other
87
+ * field of DatasetEvent and DatasetEventDescription continues to resolve
88
+ * via default property access.
89
+ */
90
+ export const DatasetEventTypeResolvers = {
91
+ responseStatus: (
92
+ ev: { responseStatus?: DbStatus | null },
93
+ ): GraphqlStatus | null => toGraphqlStatus(ev.responseStatus),
94
+ hasBeenRespondedTo: (
95
+ ev: { hasBeenRespondedTo?: boolean },
96
+ ): boolean => ev.hasBeenRespondedTo ?? false,
97
+ }
144
98
 
145
- return {
146
- ...data,
147
- contributorType: data?.contributorType || "Researcher",
148
- }
149
- },
99
+ export const DatasetEventDescriptionTypeResolvers = {
100
+ resolutionStatus: (
101
+ evDesc: { resolutionStatus?: DbStatus | null },
102
+ ): GraphqlStatus | null => toGraphqlStatus(evDesc.resolutionStatus),
150
103
  }
104
+
151
105
  /**
152
106
  * Create a 'contributor request' event
153
107
  */
@@ -235,13 +189,13 @@ export async function processContributorRequest(
235
189
  datasetId,
236
190
  requestId,
237
191
  targetUserId,
238
- resolutionStatus,
192
+ resolutionStatus: graphqlResolutionStatus,
239
193
  reason,
240
194
  }: {
241
195
  datasetId: string
242
196
  requestId: string
243
197
  targetUserId: string
244
- resolutionStatus: "accepted" | "denied"
198
+ resolutionStatus: "ACCEPTED" | "DENIED"
245
199
  reason?: string
246
200
  },
247
201
  { user: currentUserId, userInfo }: {
@@ -253,6 +207,14 @@ export async function processContributorRequest(
253
207
  throw new Error("Authentication required to process contributor requests.")
254
208
  }
255
209
 
210
+ const resolutionStatus = toDbStatus(graphqlResolutionStatus)
211
+
212
+ // Note that this is technically possible with hand-crafted GraphQL
213
+ // Separating types in the schema will enforce this at the endpoint
214
+ if (resolutionStatus === "pending") {
215
+ throw new Error("PENDING is not a valid resolution action.")
216
+ }
217
+
256
218
  await checkDatasetAdmin(datasetId, currentUserId, userInfo)
257
219
 
258
220
  const originalRequestEvent = await DatasetEvent.findOne({
@@ -437,15 +399,23 @@ export async function processContributorCitation(
437
399
  obj,
438
400
  {
439
401
  eventId,
440
- status,
402
+ status: graphqlStatus,
441
403
  }: {
442
404
  eventId: string
443
- status: "accepted" | "denied"
405
+ status: "ACCEPTED" | "DENIED"
444
406
  },
445
407
  { user, userInfo }: { user: string; userInfo: { admin?: boolean } },
446
408
  ) {
447
409
  if (!user) throw new Error("Authentication required.")
448
410
 
411
+ const status = toDbStatus(graphqlStatus)
412
+
413
+ // Note that this is technically possible with hand-crafted GraphQL
414
+ // Separating types in the schema will enforce this at the endpoint
415
+ if (status === "pending") {
416
+ throw new Error("PENDING is not a valid resolution action.")
417
+ }
418
+
449
419
  const citationEvent = await DatasetEvent.findOne({ id: eventId })
450
420
 
451
421
  if (!citationEvent || citationEvent.event.type !== "contributorCitation") {
@@ -5,7 +5,7 @@ import { description } from "./description.js"
5
5
  import { readme } from "./readme.js"
6
6
  import { getDraftRevision } from "../../datalad/draft.js"
7
7
  import { checkDatasetWrite } from "../permissions.js"
8
- import { getFiles } from "../../datalad/files"
8
+ import { getFiles, getFilesRecursive } from "../../datalad/files"
9
9
  import { filterRemovedAnnexObjects } from "../utils/file.js"
10
10
  import { validation } from "./validation"
11
11
  import FileCheck from "../../models/fileCheck"
@@ -14,7 +14,10 @@ import { contributors } from "../../datalad/contributors"
14
14
  // A draft must have a dataset parent
15
15
  export const draftFiles = async (dataset, args, { userInfo }) => {
16
16
  const hexsha = await getDraftRevision(dataset.id)
17
- const files = await getFiles(dataset.id, args.tree || hexsha)
17
+ const treeish = args.tree || hexsha
18
+ const files = args.recursive
19
+ ? await getFilesRecursive(dataset.id, treeish)
20
+ : await getFiles(dataset.id, treeish)
18
21
  return filterRemovedAnnexObjects(dataset.id, userInfo)(files)
19
22
  }
20
23
 
@@ -0,0 +1,21 @@
1
+ import Dataset from "../../models/dataset"
2
+
3
+ /**
4
+ * Toggle the holdDeletion flag on a dataset to prevent automated deletion.
5
+ * Requires site admin access.
6
+ */
7
+ export async function holdDeletion(
8
+ _obj: Record<string, unknown>,
9
+ { datasetId, hold }: { datasetId: string; hold: boolean },
10
+ { userInfo }: { userInfo: { admin: boolean } },
11
+ ): Promise<boolean> {
12
+ if (userInfo?.admin && datasetId.length === 8 && datasetId.startsWith("ds")) {
13
+ try {
14
+ await Dataset.updateOne({ id: datasetId }, { holdDeletion: hold }).exec()
15
+ return true
16
+ } catch (_err) {
17
+ return false
18
+ }
19
+ }
20
+ return false
21
+ }
@@ -7,6 +7,10 @@ import Draft from "./draft.js"
7
7
  import Snapshot from "./snapshots.js"
8
8
  import User from "./user.js"
9
9
  import Comment from "./comment.js"
10
+ import {
11
+ DatasetEventDescriptionTypeResolvers,
12
+ DatasetEventTypeResolvers,
13
+ } from "./datasetEvents"
10
14
 
11
15
  export default {
12
16
  // Scalars
@@ -22,4 +26,6 @@ export default {
22
26
  Draft,
23
27
  Snapshot,
24
28
  Comment,
29
+ DatasetEvent: DatasetEventTypeResolvers,
30
+ DatasetEventDescription: DatasetEventDescriptionTypeResolvers,
25
31
  }
@@ -53,6 +53,7 @@ import {
53
53
  } from "./datasetEvents"
54
54
  import { createGitEvent } from "./gitEvents"
55
55
  import { fsckDataset, updateFileCheck } from "./fileCheck"
56
+ import { holdDeletion } from "./holdDeletion"
56
57
  import { updateContributors } from "../../datalad/contributors"
57
58
  import { updateWorkerTask } from "./worker"
58
59
 
@@ -92,6 +93,7 @@ const Mutation = {
92
93
  cacheClear,
93
94
  revalidate,
94
95
  fsckDataset,
96
+ holdDeletion,
95
97
  prepareRepoAccess,
96
98
  reexportRemotes,
97
99
  resetDraft,
@@ -0,0 +1,43 @@
1
+ /**
2
+ * Case mapping between the DB representation of resolutionStatus
3
+ * (lowercase: "pending" | "accepted" | "denied") and the GraphQL
4
+ * SDL enum ResponseStatusType (uppercase: "PENDING" | "ACCEPTED" | "DENIED").
5
+ *
6
+ * The DB layer enforces lowercase via Mongoose enum at
7
+ * packages/openneuro-server/src/models/datasetEvents.ts. These helpers mediate
8
+ * between that persisted representation and the typed GraphQL surface.
9
+ */
10
+
11
+ export type DbStatus = "pending" | "accepted" | "denied"
12
+ export type GraphqlStatus = "PENDING" | "ACCEPTED" | "DENIED"
13
+
14
+ const DB_TO_GRAPHQL: Record<DbStatus, GraphqlStatus> = {
15
+ pending: "PENDING",
16
+ accepted: "ACCEPTED",
17
+ denied: "DENIED",
18
+ }
19
+
20
+ const GRAPHQL_TO_DB: Record<GraphqlStatus, DbStatus> = {
21
+ PENDING: "pending",
22
+ ACCEPTED: "accepted",
23
+ DENIED: "denied",
24
+ }
25
+
26
+ export function toGraphqlStatus(
27
+ value: DbStatus | null | undefined,
28
+ ): GraphqlStatus | null {
29
+ if (value === null || value === undefined) return null
30
+ const mapped = DB_TO_GRAPHQL[value]
31
+ if (!mapped) {
32
+ throw new Error(`toGraphqlStatus: unrecognized DB status value '${value}'`)
33
+ }
34
+ return mapped
35
+ }
36
+
37
+ export function toDbStatus(value: GraphqlStatus): DbStatus {
38
+ const mapped = GRAPHQL_TO_DB[value]
39
+ if (!mapped) {
40
+ throw new Error(`toDbStatus: unrecognized GraphQL status value '${value}'`)
41
+ }
42
+ return mapped
43
+ }
@@ -6,7 +6,7 @@ import { readme } from "./readme.js"
6
6
  import { description } from "./description.js"
7
7
  import { summary } from "./summary"
8
8
  import { issuesSnapshotStatus, snapshotIssues } from "./issues.js"
9
- import { getFiles } from "../../datalad/files"
9
+ import { getFiles, getFilesRecursive } from "../../datalad/files"
10
10
  import Summary from "../../models/summary"
11
11
  import DatasetModel from "../../models/dataset"
12
12
  import { filterRemovedAnnexObjects } from "../utils/file"
@@ -14,7 +14,6 @@ import DeprecatedSnapshot from "../../models/deprecatedSnapshot"
14
14
  import { redis } from "../../libs/redis"
15
15
  import CacheItem, { CacheType } from "../../cache/item"
16
16
  import { normalizeDOI } from "../../libs/doi/normalize"
17
- import { downloadFiles } from "../../datalad/snapshots"
18
17
  import { snapshotValidation } from "./validation"
19
18
  import { advancedDatasetSearchConnection } from "./dataset-search"
20
19
  import { contributors } from "../../datalad/contributors"
@@ -34,10 +33,14 @@ export const snapshot = (obj, { datasetId, tag }, context) => {
34
33
  description: () => description(snapshot),
35
34
  readme: () => readme(snapshot),
36
35
  summary: () => summary({ id: datasetId, revision: snapshot.hexsha }),
37
- files: ({ tree }) =>
38
- getFiles(datasetId, tree || snapshot.hexsha).then(
36
+ files: ({ tree, recursive }) => {
37
+ const filesPromise = recursive
38
+ ? getFilesRecursive(datasetId, tree || snapshot.hexsha)
39
+ : getFiles(datasetId, tree || snapshot.hexsha)
40
+ return filesPromise.then(
39
41
  filterRemovedAnnexObjects(datasetId, context.userInfo),
40
- ),
42
+ )
43
+ },
41
44
  size: () =>
42
45
  Summary.findOne({ datasetId: datasetId, id: snapshot.hexsha })
43
46
  .exec()
@@ -45,7 +48,6 @@ export const snapshot = (obj, { datasetId, tag }, context) => {
45
48
  deprecated: () => deprecated({ datasetId, tag }),
46
49
  related: () => related(datasetId),
47
50
  onBrainlife: () => onBrainlife(snapshot),
48
- downloadFiles: () => downloadFiles(datasetId, tag),
49
51
  }))
50
52
  },
51
53
  )
@@ -140,17 +142,7 @@ export const undoDeprecateSnapshot = async (
140
142
 
141
143
  /** Query used to run a search for NIH datasets */
142
144
  const brainInitiativeQuery = {
143
- "bool": {
144
- "filter": [
145
- {
146
- "match": {
147
- "brainInitiative": {
148
- "query": "true",
149
- },
150
- },
151
- },
152
- ],
153
- },
145
+ brainInitiative: true,
154
146
  }
155
147
 
156
148
  export const participantCount = (obj, { modality }) => {
@@ -178,7 +170,6 @@ export const participantCount = (obj, { modality }) => {
178
170
  query: brainInitiativeQuery,
179
171
  datasetType: "All Public",
180
172
  datasetStatus: "",
181
- sortBy: "",
182
173
  after,
183
174
  first: 100,
184
175
  }, { user: null, userInfo: {} })
@@ -1,3 +1,4 @@
1
+ import { getDraftRevision } from "../../datalad/draft"
1
2
  import Summary from "../../models/summary"
2
3
  import type { SummaryDocument } from "../../models/summary"
3
4
 
@@ -51,3 +52,19 @@ export const updateSummary = (obj, args) => {
51
52
  .exec()
52
53
  .then(() => args.summary)
53
54
  }
55
+
56
+ /**
57
+ * Get the primary modality for a dataset from the validator summary.
58
+ * Returns undefined if no summary is available.
59
+ */
60
+ export async function getPrimaryModality(
61
+ datasetId: string,
62
+ ): Promise<string | undefined> {
63
+ try {
64
+ const revision = await getDraftRevision(datasetId)
65
+ const result = await summary({ id: datasetId, revision })
66
+ return result?.primaryModality || undefined
67
+ } catch {
68
+ return undefined
69
+ }
70
+ }
@@ -215,8 +215,8 @@ export const typeDefs = `
215
215
  datasetId: ID!
216
216
  targetUserId: ID!
217
217
  requestId: ID!
218
- resolutionStatus: String!
219
- reason: String
218
+ resolutionStatus: ResponseStatusType!
219
+ reason: String
220
220
  ): DatasetEvent
221
221
  # Create or update a fileCheck document
222
222
  updateFileCheck(
@@ -239,8 +239,10 @@ export const typeDefs = `
239
239
  ): DatasetEvent
240
240
  processContributorCitation(
241
241
  eventId: ID!
242
- status: String!
242
+ status: ResponseStatusType!
243
243
  ): DatasetEvent
244
+ # Hold or release automated deletion for a dataset
245
+ holdDeletion(datasetId: ID!, hold: Boolean!): Boolean
244
246
  # Update worker task queue status
245
247
  updateWorkerTask(
246
248
  id: ID!,
@@ -267,6 +269,45 @@ export const typeDefs = `
267
269
  url: String!
268
270
  }
269
271
 
272
+ # Sort options for advanced dataset search
273
+ enum SearchSortOption {
274
+ relevance
275
+ newest
276
+ oldest
277
+ activity
278
+ name_asc
279
+ name_desc
280
+ last_updated
281
+ }
282
+
283
+ # Search input for advanced dataset search
284
+ input DatasetSearchInput {
285
+ keywords: [String]
286
+ modality: String
287
+ ageRange: [Int]
288
+ subjectCountRange: [Int]
289
+ diagnosis: String
290
+ tasks: [String]
291
+ authors: [String]
292
+ sex: String
293
+ dateRange: String
294
+ species: String
295
+ studyStructure: String
296
+ studyDomains: [String]
297
+ bidsDatasetType: String
298
+ brainInitiative: Boolean
299
+ bodyParts: [String]
300
+ scannerManufacturers: [String]
301
+ scannerManufacturersModelNames: [String]
302
+ tracerNames: [String]
303
+ tracerRadionuclides: [String]
304
+ sortBy: SearchSortOption
305
+ "Filter datasets by a specific user's permissions"
306
+ userId: String
307
+ "Filter to only public datasets"
308
+ publicOnly: Boolean
309
+ }
310
+
270
311
  input DeleteFile {
271
312
  path: String!
272
313
  filename: String
@@ -323,7 +364,7 @@ export const typeDefs = `
323
364
 
324
365
  input SubjectMetadataInput {
325
366
  participantId: String!
326
- age: Int
367
+ age: Float
327
368
  sex: String
328
369
  group: String
329
370
  }
@@ -355,7 +396,7 @@ export const typeDefs = `
355
396
  openneuroPaperDOI: String
356
397
  seniorAuthor: String
357
398
  adminUsers: [String]
358
- ages: [Int]
399
+ ages: [Float]
359
400
  modalities: [String]
360
401
  grantFunderName: String
361
402
  grantIdentifier: String
@@ -488,6 +529,8 @@ export const typeDefs = `
488
529
  brainInitiative: Boolean
489
530
  # Log of events associated with this dataset
490
531
  events: [DatasetEvent]
532
+ # Hold automated deletion for this dataset
533
+ holdDeletion: Boolean
491
534
  }
492
535
 
493
536
  type DatasetDerivatives {
@@ -581,7 +624,7 @@ export const typeDefs = `
581
624
  # Validator issues (schema validator)
582
625
  validation: DatasetValidation
583
626
  # Committed files in the working tree
584
- files(tree: String): [DatasetFile]
627
+ files(tree: String, recursive: Boolean): [DatasetFile]
585
628
  # dataset_description.json fields
586
629
  description: Description
587
630
  # Dataset README
@@ -616,7 +659,7 @@ export const typeDefs = `
616
659
  # Validator issues (schema validator)
617
660
  validation: DatasetValidation
618
661
  # Snapshot files
619
- files(tree: String): [DatasetFile]
662
+ files(tree: String, recursive: Boolean): [DatasetFile]
620
663
  # dataset_description.json fields
621
664
  description: Description
622
665
  # Snapshot usage and download statistics
@@ -633,8 +676,6 @@ export const typeDefs = `
633
676
  onBrainlife: Boolean @cacheControl(maxAge: 10080, scope: PUBLIC)
634
677
  # Total size in bytes of this snapshot
635
678
  size: BigInt
636
- # Single list of files to download this snapshot (only available on snapshots)
637
- downloadFiles: [DatasetFile]
638
679
  # Contributors list from datacite.yml
639
680
  contributors: [Contributor]
640
681
  }
@@ -784,7 +825,7 @@ export const typeDefs = `
784
825
 
785
826
  type SubjectMetadata {
786
827
  participantId: String!
787
- age: Int
828
+ age: Float
788
829
  sex: String
789
830
  group: String
790
831
  }
@@ -871,7 +912,6 @@ export const typeDefs = `
871
912
  # File metadata and link to contents
872
913
  type DatasetFile {
873
914
  id: ID!
874
- key: String
875
915
  filename: String!
876
916
  size: BigInt
877
917
  annexed: Boolean
@@ -934,7 +974,7 @@ export const typeDefs = `
934
974
  openneuroPaperDOI: String
935
975
  seniorAuthor: String
936
976
  adminUsers: [String]
937
- ages: [Int]
977
+ ages: [Float]
938
978
  modalities: [String]
939
979
  grantFunderName: String
940
980
  grantIdentifier: String
@@ -975,7 +1015,7 @@ export const typeDefs = `
975
1015
  requestId: ID
976
1016
  reason: String
977
1017
  datasetId: ID
978
- resolutionStatus: String
1018
+ resolutionStatus: ResponseStatusType
979
1019
  contributorData: Contributor
980
1020
  }
981
1021
 
@@ -997,7 +1037,7 @@ export const typeDefs = `
997
1037
  datasetId: ID
998
1038
  # User's notification status event
999
1039
  notificationStatus: UserNotificationStatus
1000
- responseStatus: String
1040
+ responseStatus: ResponseStatusType
1001
1041
  hasBeenRespondedTo: Boolean
1002
1042
  }
1003
1043
 
@@ -86,7 +86,11 @@ export const getFile = async (req, res) => {
86
86
  */
87
87
  export const getObject = (req, res) => {
88
88
  const { datasetId, key } = req.params
89
+ const { filename } = req.query
89
90
  const worker = getDatasetWorker(datasetId)
91
+ if (req.query?.filename) {
92
+ res.set("Content-Disposition", `attachment; filename=${filename}`)
93
+ }
90
94
  // Backend depends on git object or git-annex key
91
95
  if (key.length === 40) {
92
96
  const uri = `${worker}/datasets/${datasetId}/objects/${key}`
@@ -1,44 +1,43 @@
1
1
  import config from "../config"
2
- import doi from "../libs/doi"
2
+ import { createDraftDoi } from "../libs/doi"
3
+ import { assembleMetadata } from "../libs/doi/metadata"
3
4
  import Doi from "../models/doi"
4
5
  import Snapshot from "../models/snapshot"
5
6
 
6
7
  export async function createSnapshotDoi(req, res) {
7
- let doiRes = null
8
8
  if (!config.doi.username || !config.doi.password) {
9
- return res.send({ doiRes: null })
9
+ return res.send({ doi: null })
10
10
  }
11
11
  const datasetId = req.params.datasetId
12
12
  const snapshotId = req.params.snapshotId
13
- const oldDesc = req.body
14
- const doiExists = await Doi.findOne({
15
- datasetId: datasetId,
16
- snapshotId: snapshotId,
17
- })
13
+
14
+ // Return existing DOI if already registered
15
+ const doiExists = await Doi.findOne({ datasetId, snapshotId })
18
16
  if (doiExists) {
19
- doiRes = doiExists.doi
20
- return res.send({ doi: doiRes })
21
- } else {
22
- const snapExists = Snapshot.findOne({
23
- datasetId: datasetId,
24
- tag: snapshotId,
25
- }).exec()
26
- if (!snapExists) {
27
- return
28
- }
29
- await doi
30
- .registerSnapshotDoi(datasetId, snapshotId, oldDesc)
31
- .then((doiRes) => {
32
- if (doiRes) {
33
- Doi.updateOne(
34
- { datasetId: datasetId, snapshotId: snapshotId },
35
- { $set: { doi: doiRes } },
36
- { upsert: true },
37
- )
38
- return res.send({ doi: doiRes })
39
- }
40
- return res.send({ doiRes: null })
41
- })
17
+ return res.send({ doi: doiExists.doi })
18
+ }
19
+
20
+ const snapExists = await Snapshot.findOne({
21
+ datasetId,
22
+ tag: snapshotId,
23
+ }).exec()
24
+ if (!snapExists) {
25
+ return res.status(404).send({ error: "Snapshot not found" })
26
+ }
27
+
28
+ try {
29
+ const attributes = await assembleMetadata(datasetId, snapshotId)
30
+ const doi = await createDraftDoi(attributes)
31
+
32
+ await Doi.updateOne(
33
+ { datasetId, snapshotId },
34
+ { $set: { doi, state: "draft" } },
35
+ { upsert: true },
36
+ )
37
+
38
+ return res.send({ doi })
39
+ } catch (err) {
40
+ return res.status(500).send({ error: err.message })
42
41
  }
43
42
  }
44
43
 
@@ -47,11 +46,8 @@ export async function getDoi(req, res) {
47
46
  const datasetId = req.params.datasetId
48
47
  const snapshotId = req.params.snapshotId
49
48
  const doi = await Doi.findOne(
50
- {
51
- datasetId: datasetId,
52
- snapshotId: snapshotId,
53
- },
54
- "doi",
49
+ { datasetId, snapshotId },
50
+ "doi state",
55
51
  ).exec()
56
52
  return res.send(doi)
57
53
  }