@openneuro/server 4.28.3 → 4.29.0-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/package.json +9 -9
  2. package/src/app.ts +0 -2
  3. package/src/cache/item.ts +4 -4
  4. package/src/datalad/__tests__/files.spec.ts +0 -25
  5. package/src/datalad/analytics.ts +0 -1
  6. package/src/datalad/dataset.ts +17 -7
  7. package/src/datalad/description.ts +11 -11
  8. package/src/datalad/draft.ts +1 -1
  9. package/src/datalad/pagination.ts +3 -3
  10. package/src/datalad/snapshots.ts +6 -2
  11. package/src/elasticsearch/reindex-dataset.ts +2 -6
  12. package/src/graphql/permissions.ts +1 -1
  13. package/src/graphql/resolvers/__tests__/brainlife.spec.ts +1 -1
  14. package/src/graphql/resolvers/brainlife.ts +3 -5
  15. package/src/graphql/resolvers/cache.ts +1 -1
  16. package/src/graphql/resolvers/dataset-search.ts +4 -4
  17. package/src/graphql/resolvers/dataset.ts +14 -10
  18. package/src/graphql/resolvers/derivatives.ts +5 -7
  19. package/src/graphql/resolvers/draft.ts +4 -15
  20. package/src/graphql/resolvers/importRemoteDataset.ts +1 -0
  21. package/src/graphql/resolvers/metadata.ts +5 -4
  22. package/src/graphql/resolvers/mutation.ts +6 -3
  23. package/src/graphql/resolvers/permissions.ts +52 -33
  24. package/src/graphql/resolvers/reset.ts +1 -1
  25. package/src/graphql/resolvers/reviewer.ts +1 -0
  26. package/src/graphql/resolvers/snapshots.ts +4 -2
  27. package/src/graphql/resolvers/summary.ts +4 -4
  28. package/src/graphql/resolvers/validation.ts +37 -2
  29. package/src/graphql/schema.ts +59 -50
  30. package/src/handlers/comments.ts +7 -6
  31. package/src/handlers/datalad.ts +2 -3
  32. package/src/libs/__tests__/dataset.spec.ts +1 -1
  33. package/src/libs/email/index.ts +3 -2
  34. package/src/models/analytics.ts +2 -1
  35. package/src/models/badAnnexObject.ts +3 -2
  36. package/src/models/comment.ts +2 -1
  37. package/src/models/counter.ts +2 -1
  38. package/src/models/dataset.ts +3 -2
  39. package/src/models/deletion.ts +2 -1
  40. package/src/models/deprecatedSnapshot.ts +2 -1
  41. package/src/models/doi.ts +2 -1
  42. package/src/models/file.ts +3 -2
  43. package/src/models/ingestDataset.ts +2 -1
  44. package/src/models/issue.ts +2 -1
  45. package/src/models/key.ts +2 -1
  46. package/src/models/mailgunIdentifier.ts +2 -1
  47. package/src/models/metadata.ts +2 -1
  48. package/src/models/newsletter.ts +2 -1
  49. package/src/models/notification.ts +2 -1
  50. package/src/models/permission.ts +2 -1
  51. package/src/models/reviewer.ts +2 -1
  52. package/src/models/snapshot.ts +2 -1
  53. package/src/models/stars.ts +2 -1
  54. package/src/models/subscription.ts +2 -1
  55. package/src/models/summary.ts +9 -2
  56. package/src/models/upload.ts +2 -1
  57. package/src/models/user.ts +2 -1
  58. package/src/models/validation.ts +58 -0
  59. package/src/routes.ts +1 -1
  60. package/src/utils/validateUrl.ts +1 -1
  61. package/tsconfig.json +3 -1
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@openneuro/server",
3
- "version": "4.28.3",
3
+ "version": "4.29.0-alpha.1",
4
4
  "description": "Core service for the OpenNeuro platform.",
5
5
  "license": "MIT",
6
6
  "main": "src/server.js",
@@ -15,13 +15,13 @@
15
15
  },
16
16
  "author": "Squishymedia",
17
17
  "dependencies": {
18
- "@apollo/client": "3.7.2",
18
+ "@apollo/client": "3.11.8",
19
19
  "@apollo/server": "4.9.3",
20
20
  "@apollo/utils.keyvadapter": "3.0.0",
21
21
  "@elastic/elasticsearch": "8.13.1",
22
22
  "@graphql-tools/schema": "^10.0.0",
23
23
  "@keyv/redis": "^2.7.0",
24
- "@openneuro/search": "^4.28.3",
24
+ "@openneuro/search": "^4.29.0-alpha.1",
25
25
  "@sentry/node": "^8.25.0",
26
26
  "@sentry/profiling-node": "^8.25.0",
27
27
  "base64url": "^3.0.0",
@@ -30,7 +30,7 @@
30
30
  "date-fns": "^2.16.1",
31
31
  "draft-js": "^0.11.7",
32
32
  "draft-js-export-html": "^1.4.1",
33
- "express": "4.19.2",
33
+ "express": "4.20.0",
34
34
  "graphql": "16.8.1",
35
35
  "graphql-bigint": "^1.0.0",
36
36
  "graphql-compose": "9.0.10",
@@ -59,8 +59,8 @@
59
59
  "semver": "^5.5.0",
60
60
  "sitemap": "^2.1.0",
61
61
  "superagent": "^3.8.2",
62
- "ts-node": "9.1.1",
63
- "typescript": "5.1.6",
62
+ "ts-node": "10.9.2",
63
+ "typescript": "5.6.3",
64
64
  "underscore": "^1.8.3",
65
65
  "uuid": "10.0.0",
66
66
  "xmldoc": "^1.1.0"
@@ -79,11 +79,11 @@
79
79
  "nodemon": "3.1.0",
80
80
  "ts-node-dev": "1.1.6",
81
81
  "tsc-watch": "^4.2.9",
82
- "vitest": "1.5.0",
83
- "vitest-fetch-mock": "0.2.2"
82
+ "vitest": "2.1.2",
83
+ "vitest-fetch-mock": "0.3.0"
84
84
  },
85
85
  "publishConfig": {
86
86
  "access": "public"
87
87
  },
88
- "gitHead": "54302c961cba3420d7bac9dcf67010b78d430651"
88
+ "gitHead": "6c4cb2bde08a8a0cbead95d505a7d002290fdb5b"
89
89
  }
package/src/app.ts CHANGED
@@ -1,6 +1,4 @@
1
1
  /*eslint no-console: ["error", { allow: ["log"] }] */
2
- /* eslint-disable no-unused-vars*/
3
-
4
2
  /**
5
3
  * Express app setup
6
4
  */
package/src/cache/item.ts CHANGED
@@ -1,7 +1,7 @@
1
- import { Redis } from "ioredis"
1
+ import type { Redis } from "ioredis"
2
2
  import * as zlib from "zlib"
3
3
  import { promisify } from "util"
4
- import { CacheType } from "./types"
4
+ import type { CacheType } from "./types"
5
5
  export { CacheType } from "./types"
6
6
 
7
7
  const compress = promisify(zlib.gzip)
@@ -13,7 +13,7 @@ const decompress = promisify(zlib.gunzip)
13
13
  */
14
14
  export function cacheKey(
15
15
  type: CacheType,
16
- compositeKeys: Array<string>,
16
+ compositeKeys: string[],
17
17
  ): string {
18
18
  return `${type.toString()}:${compositeKeys.join(":")}`
19
19
  }
@@ -35,7 +35,7 @@ class CacheItem {
35
35
  constructor(
36
36
  redis: Redis,
37
37
  type: CacheType,
38
- compositeKeys?: Array<string>,
38
+ compositeKeys?: string[],
39
39
  expiration?: number,
40
40
  ) {
41
41
  this.redis = redis
@@ -11,31 +11,6 @@ vi.mock("../../config.ts")
11
11
 
12
12
  const filename = "sub-01/anat/sub-01_T1w.nii.gz"
13
13
 
14
- const mockRootFiles = [
15
- { filename: "README" },
16
- { filename: "dataset_description.json" },
17
- ]
18
- const mockSub01 = [
19
- { filename: "sub-01/anat/sub-01_T1w.nii.gz" },
20
- { filename: "sub-01/func/sub-01_task-onebacktask_run-01_bold.nii.gz" },
21
- ]
22
- const mockSub02 = [
23
- { filename: "sub-02/anat/sub-02_T1w.nii.gz" },
24
- { filename: "sub-02/func/sub-02_task-onebacktask_run-01_bold.nii.gz" },
25
- ]
26
- const mockSub03 = [
27
- { filename: "sub-03/anat/sub-03_T1w.nii.gz" },
28
- { filename: "sub-03/func/sub-03_task-onebacktask_run-01_bold.nii.gz" },
29
- ]
30
- const mockDerivatives = [{ filename: "derivatives/groundbreaking_output.html" }]
31
- const mockFiles = [
32
- ...mockRootFiles,
33
- ...mockSub01,
34
- ...mockSub02,
35
- ...mockSub03,
36
- ...mockDerivatives,
37
- ]
38
-
39
14
  describe("datalad files", () => {
40
15
  describe("encodeFilePath()", () => {
41
16
  it("should encode a nested path", () => {
@@ -1,4 +1,3 @@
1
- import Analytics from "../models/analytics"
2
1
  import Dataset from "../models/dataset"
3
2
 
4
3
  /**
@@ -7,7 +7,7 @@ import request from "superagent"
7
7
  import requestNode from "request"
8
8
  import objectHash from "object-hash"
9
9
  import { Readable } from "stream"
10
- import * as Mongoose from "mongoose"
10
+ import type * as Mongoose from "mongoose"
11
11
  import config from "../config"
12
12
  import * as subscriptions from "../handlers/subscriptions"
13
13
  import { generateDataladCookie } from "../libs/authentication/jwt"
@@ -69,15 +69,26 @@ export const createDataset = async (
69
69
  }
70
70
  }
71
71
 
72
+ interface WorkerDraftFields {
73
+ // Commit id hash
74
+ ref: string
75
+ // Commit tree ref
76
+ tree: string
77
+ // Commit message
78
+ message: string
79
+ // Commit author time
80
+ modified: string
81
+ }
82
+
72
83
  /**
73
84
  * Return the latest commit
74
85
  * @param {string} id Dataset accession number
75
86
  */
76
- export const getDraftHead = async (id) => {
87
+ export const getDraftHead = async (id): Promise<WorkerDraftFields> => {
77
88
  const draftRes = await request
78
89
  .get(`${getDatasetWorker(id)}/datasets/${id}/draft`)
79
90
  .set("Accept", "application/json")
80
- return draftRes.body.hexsha
91
+ return draftRes.body
81
92
  }
82
93
 
83
94
  /**
@@ -87,7 +98,7 @@ export const getDataset = async (id) => {
87
98
  const dataset = await Dataset.findOne({ id }).lean()
88
99
  return {
89
100
  ...dataset,
90
- revision: await getDraftHead(id),
101
+ revision: (await getDraftHead(id)).ref,
91
102
  }
92
103
  }
93
104
 
@@ -378,7 +389,6 @@ export const addFileString = (datasetId, filename, mimetype, content) =>
378
389
  // Mock a stream so we can reuse addFile
379
390
  createReadStream: () => {
380
391
  const stream = new Readable()
381
- // eslint-disable-next-line @typescript-eslint/unbound-method
382
392
  stream._read = () => {
383
393
  // Content is available already, _read does nothing
384
394
  }
@@ -406,7 +416,7 @@ export const commitFiles = (datasetId, user) => {
406
416
  .set("Accept", "application/json")
407
417
  .then((res) => {
408
418
  gitRef = res.body.ref
409
- return updateDatasetRevision(datasetId, gitRef).then(() => gitRef)
419
+ return updateDatasetRevision(datasetId).then(() => gitRef)
410
420
  })
411
421
  }
412
422
 
@@ -495,7 +505,7 @@ export const updatePublic = (datasetId, publicFlag) =>
495
505
  { upsert: true },
496
506
  ).exec()
497
507
 
498
- export const getDatasetAnalytics = (datasetId, tag) => {
508
+ export const getDatasetAnalytics = (datasetId, _tag) => {
499
509
  return Dataset.findOne({ id: datasetId }).then((ds) => ({
500
510
  datasetId,
501
511
  views: ds.views || 0,
@@ -38,58 +38,58 @@ export const repairDescriptionTypes = (description) => {
38
38
  const newDescription = { ...description }
39
39
  // Array types
40
40
  if (
41
- description.hasOwnProperty("Authors") &&
41
+ Object.hasOwn(description, "Authors") &&
42
42
  !Array.isArray(description.Authors)
43
43
  ) {
44
44
  newDescription.Authors = [description.Authors]
45
45
  }
46
46
  if (
47
- description.hasOwnProperty("ReferencesAndLinks") &&
47
+ Object.hasOwn(description, "ReferencesAndLinks") &&
48
48
  !Array.isArray(description.ReferencesAndLinks)
49
49
  ) {
50
50
  newDescription.ReferencesAndLinks = [description.ReferencesAndLinks]
51
51
  }
52
52
  if (
53
- description.hasOwnProperty("Funding") &&
53
+ Object.hasOwn(description, "Funding") &&
54
54
  !Array.isArray(description.Funding)
55
55
  ) {
56
56
  newDescription.Funding = [description.Funding]
57
57
  }
58
58
  if (
59
- description.hasOwnProperty("EthicsApprovals") &&
59
+ Object.hasOwn(description, "EthicsApprovals") &&
60
60
  !Array.isArray(description.EthicsApprovals)
61
61
  ) {
62
62
  newDescription.EthicsApprovals = [description.EthicsApprovals]
63
63
  }
64
64
  // String types
65
65
  if (
66
- description.hasOwnProperty("Name") &&
66
+ Object.hasOwn(description, "Name") &&
67
67
  typeof description.Name !== "string"
68
68
  ) {
69
69
  newDescription.Name = JSON.stringify(description.Name) || ""
70
70
  }
71
71
  if (
72
- description.hasOwnProperty("DatasetDOI") &&
72
+ Object.hasOwn(description, "DatasetDOI") &&
73
73
  typeof description.DatasetDOI !== "string"
74
74
  ) {
75
75
  newDescription.DatasetDOI = JSON.stringify(description.DatasetDOI) || ""
76
76
  }
77
77
  if (
78
- description.hasOwnProperty("Acknowledgements") &&
78
+ Object.hasOwn(description, "Acknowledgements") &&
79
79
  typeof description.Acknowledgements !== "string"
80
80
  ) {
81
81
  newDescription.Acknowledgements =
82
82
  JSON.stringify(description.Acknowledgements) || ""
83
83
  }
84
84
  if (
85
- description.hasOwnProperty("HowToAcknowledge") &&
85
+ Object.hasOwn(description, "HowToAcknowledge") &&
86
86
  typeof description.HowToAcknowledge !== "string"
87
87
  ) {
88
88
  newDescription.HowToAcknowledge =
89
89
  JSON.stringify(description.HowToAcknowledge) || ""
90
90
  }
91
91
  if (
92
- description.hasOwnProperty("DatasetType") &&
92
+ Object.hasOwn(description, "DatasetType") &&
93
93
  typeof description.DatasetType !== "string"
94
94
  ) {
95
95
  newDescription.DatasetType = "raw"
@@ -104,7 +104,7 @@ export const appendSeniorAuthor = (description) => {
104
104
  try {
105
105
  const SeniorAuthor = description?.Authors[description.Authors.length - 1]
106
106
  return { ...description, SeniorAuthor }
107
- } catch (err) {
107
+ } catch (_err) {
108
108
  return description
109
109
  }
110
110
  }
@@ -132,7 +132,7 @@ export const description = async (obj) => {
132
132
  )
133
133
  })
134
134
  return appendSeniorAuthor(repairDescriptionTypes(datasetDescription))
135
- } catch (err) {
135
+ } catch (_err) {
136
136
  return defaultDescription
137
137
  }
138
138
  }
@@ -21,7 +21,7 @@ export const getDraftRevision = async (datasetId) => {
21
21
  })
22
22
  }
23
23
 
24
- export const updateDatasetRevision = (datasetId, gitRef) => {
24
+ export const updateDatasetRevision = (datasetId) => {
25
25
  /**
26
26
  * Update the revision modified time in a draft on changes
27
27
  */
@@ -43,10 +43,10 @@ export const maxLimit = (limit) => Math.max(Math.min(limit, 100), 1)
43
43
 
44
44
  // Decode cursor from options object
45
45
  export const getOffsetFromCursor = (options) => {
46
- if (options.hasOwnProperty("after") && options.after) {
46
+ if (Object.hasOwn(options, "after") && options.after) {
47
47
  return decodeCursor(options.after).offset
48
48
  }
49
- if (options.hasOwnProperty("before") && options.before) {
49
+ if (Object.hasOwn(options, "before") && options.before) {
50
50
  return (
51
51
  decodeCursor(options.before).offset - Math.max(maxLimit(options.first), 0)
52
52
  )
@@ -63,7 +63,7 @@ export const getOffsetFromCursor = (options) => {
63
63
  export const sortAggregate = (options) => {
64
64
  const sortingStages = []
65
65
  const finalSort = {}
66
- if (options.hasOwnProperty("orderBy")) {
66
+ if (Object.hasOwn(options, "orderBy")) {
67
67
  if ("created" in options.orderBy && options.orderBy.created) {
68
68
  finalSort["_id"] = sortEnumToInt(options.orderBy.created)
69
69
  }
@@ -1,6 +1,7 @@
1
1
  /**
2
2
  * Get snapshots from datalad-service tags
3
3
  */
4
+ import * as Sentry from "@sentry/node"
4
5
  import request from "superagent"
5
6
  import { reindexDataset } from "../elasticsearch/reindex-dataset"
6
7
  import { redis, redlock } from "../libs/redis"
@@ -16,7 +17,8 @@ import { getFiles } from "./files"
16
17
  import { generateDataladCookie } from "../libs/authentication/jwt"
17
18
  import notifications from "../libs/notifications"
18
19
  import Dataset from "../models/dataset"
19
- import Snapshot, { SnapshotDocument } from "../models/snapshot"
20
+ import Snapshot from "../models/snapshot"
21
+ import type { SnapshotDocument } from "../models/snapshot"
20
22
  import { updateDatasetRevision } from "./draft"
21
23
  import { getDatasetWorker } from "../libs/datalad-service"
22
24
  import { join } from "path"
@@ -62,6 +64,8 @@ const createIfNotExistsDoi = async (
62
64
  descriptionFieldUpdates["DatasetDOI"] = `doi:${snapshotDoi}`
63
65
  }
64
66
  } catch (err) {
67
+ Sentry.captureException(err)
68
+ // eslint-disable-next-line no-console
65
69
  console.error(err)
66
70
  throw new Error("DOI minting failed.")
67
71
  }
@@ -154,7 +158,7 @@ export const createSnapshot = async (
154
158
 
155
159
  await Promise.all([
156
160
  // Update the draft status in datasets collection in case any changes were made (DOI, License)
157
- updateDatasetRevision(datasetId, snapshot.hexsha),
161
+ updateDatasetRevision(datasetId),
158
162
 
159
163
  // Update metadata in snapshots collection
160
164
  createSnapshotMetadata(datasetId, tag, snapshot.hexsha, snapshot.created),
@@ -1,12 +1,8 @@
1
1
  import config from "../config"
2
2
  import { indexDataset, indexingToken, queryForIndex } from "@openneuro/search"
3
3
  import { elasticClient } from "./elastic-client"
4
- import {
5
- ApolloClient,
6
- from,
7
- InMemoryCache,
8
- NormalizedCacheObject,
9
- } from "@apollo/client"
4
+ import { ApolloClient, from, InMemoryCache } from "@apollo/client"
5
+ import type { NormalizedCacheObject } from "@apollo/client"
10
6
  import { setContext } from "@apollo/client/link/context"
11
7
  import { HttpLink } from "@apollo/client/link/http"
12
8
 
@@ -67,7 +67,7 @@ export class DeletedDatasetError extends GraphQLError {
67
67
  // Only return a relative path to avoid cross site risks
68
68
  extensions = { code: "DELETED_DATASET", redirect: url.pathname }
69
69
  }
70
- } catch (err) {
70
+ } catch (_err) {
71
71
  // Do nothing
72
72
  }
73
73
  }
@@ -1,5 +1,5 @@
1
1
  import { vi } from "vitest"
2
- import { HasId } from "../../../utils/datasetOrSnapshot"
2
+ import type { HasId } from "../../../utils/datasetOrSnapshot"
3
3
  import { brainlifeQuery } from "../brainlife"
4
4
 
5
5
  vi.mock("ioredis")
@@ -1,7 +1,5 @@
1
- import {
2
- DatasetOrSnapshot,
3
- getDatasetFromSnapshotId,
4
- } from "../../utils/datasetOrSnapshot"
1
+ import { getDatasetFromSnapshotId } from "../../utils/datasetOrSnapshot"
2
+ import type { DatasetOrSnapshot } from "../../utils/datasetOrSnapshot"
5
3
 
6
4
  interface BrainlifeFindQuery {
7
5
  removed: boolean
@@ -52,7 +50,7 @@ export const onBrainlife = async (
52
50
  } else {
53
51
  return false
54
52
  }
55
- } catch (err) {
53
+ } catch (_err) {
56
54
  return false
57
55
  }
58
56
  }
@@ -19,7 +19,7 @@ export async function cacheClear(
19
19
  try {
20
20
  await downloadCache.drop()
21
21
  return true
22
- } catch (err) {
22
+ } catch (_err) {
23
23
  return false
24
24
  }
25
25
  } else {
@@ -1,10 +1,10 @@
1
+ import * as Sentry from "@sentry/node"
1
2
  import { elasticClient } from "../../elasticsearch/elastic-client"
2
3
  import { dataset } from "./dataset"
3
4
  import Star from "../../models/stars"
4
5
  import Subscription from "../../models/subscription"
5
6
  import Permission from "../../models/permission"
6
7
  import { hashObject } from "../../libs/authentication/crypto"
7
- import util from "util"
8
8
 
9
9
  const elasticIndex = "datasets"
10
10
 
@@ -70,7 +70,7 @@ export const elasticRelayConnection = (
70
70
  },
71
71
  }
72
72
  } catch (err) {
73
- console.error(err)
73
+ Sentry.captureException(err)
74
74
  }
75
75
  }
76
76
 
@@ -95,11 +95,11 @@ export const datasetSearchConnection = async (
95
95
  if (after) {
96
96
  try {
97
97
  requestBody.search_after = decodeCursor(after)
98
- } catch (err) {
98
+ } catch (_err) {
99
99
  // Don't include search_after if parsing fails
100
100
  }
101
101
  }
102
- const result = await elasticClient.search({
102
+ await elasticClient.search({
103
103
  index: elasticIndex,
104
104
  size: first,
105
105
  q: `${q} AND public:true`,
@@ -125,6 +125,7 @@ export const deleteDataset = async (
125
125
  await removeDatasetSearchDocument(id)
126
126
  } catch (err) {
127
127
  // This likely means this dataset had not yet been indexed
128
+ /* eslint-disable-next-line no-console */
128
129
  console.error(err)
129
130
  }
130
131
  await new Deletion({
@@ -146,9 +147,9 @@ export const deleteFiles = async (
146
147
  ) => {
147
148
  try {
148
149
  await checkDatasetWrite(datasetId, user, userInfo)
149
- const deletedFiles = await datalad.deleteFiles(datasetId, files, userInfo)
150
+ await datalad.deleteFiles(datasetId, files, userInfo)
150
151
  return true
151
- } catch (err) {
152
+ } catch (_err) {
152
153
  return false
153
154
  }
154
155
  }
@@ -169,7 +170,7 @@ export const removeAnnexObject = async (
169
170
  userInfo,
170
171
  )
171
172
  return true
172
- } catch (err) {
173
+ } catch (_err) {
173
174
  return false
174
175
  }
175
176
  }
@@ -189,7 +190,7 @@ export const flagAnnexObject = async (
189
190
  userInfo,
190
191
  )
191
192
  return true
192
- } catch (err) {
193
+ } catch (_err) {
193
194
  return false
194
195
  }
195
196
  }
@@ -228,7 +229,7 @@ export const trackAnalytics = (obj, { datasetId, tag, type }) => {
228
229
  try {
229
230
  dataladAnalytics.trackAnalytics(datasetId, tag, type)
230
231
  return true
231
- } catch (err) {
232
+ } catch (_err) {
232
233
  return false
233
234
  }
234
235
  }
@@ -276,11 +277,14 @@ const worker = (obj) => getDatasetWorker(obj.id)
276
277
  */
277
278
  const Dataset = {
278
279
  uploader: (ds) => user(ds, { id: ds.uploader }),
279
- draft: async (obj) => ({
280
- id: obj.id,
281
- revision: await datalad.getDraftHead(obj.id),
282
- modified: obj.modified,
283
- }),
280
+ draft: async (obj) => {
281
+ const draftHead = await datalad.getDraftHead(obj.id)
282
+ return {
283
+ id: obj.id,
284
+ revision: draftHead.ref,
285
+ modified: draftHead.modified,
286
+ }
287
+ },
284
288
  snapshots,
285
289
  latestSnapshot,
286
290
  analytics,
@@ -1,8 +1,6 @@
1
- import {
2
- DatasetOrSnapshot,
3
- getDatasetFromSnapshotId,
4
- } from "../../utils/datasetOrSnapshot"
1
+ import { getDatasetFromSnapshotId } from "../../utils/datasetOrSnapshot"
5
2
  import config from "../../config"
3
+ import type { DatasetOrSnapshot } from "../../utils/datasetOrSnapshot"
6
4
 
7
5
  const S3_BUCKET = "openneuro-derivatives"
8
6
  const GITHUB_ORGANIZATION = "OpenNeuroDerivatives"
@@ -44,7 +42,7 @@ export const githubDerivative = async (
44
42
  }
45
43
  }
46
44
  return false
47
- } catch (err) {
45
+ } catch (_err) {
48
46
  return false
49
47
  }
50
48
  }
@@ -78,14 +76,14 @@ export const derivativeObject = (
78
76
  */
79
77
  export const derivatives = async (
80
78
  dataset: DatasetOrSnapshot,
81
- ): Promise<Array<DatasetDerivatives>> => {
79
+ ): Promise<DatasetDerivatives[]> => {
82
80
  let datasetId
83
81
  if ("tag" in dataset) {
84
82
  datasetId = getDatasetFromSnapshotId(dataset.id)
85
83
  } else {
86
84
  datasetId = dataset.id
87
85
  }
88
- const available: Array<DatasetDerivatives> = []
86
+ const available: DatasetDerivatives[] = []
89
87
  if (await githubDerivative(datasetId, "mriqc")) {
90
88
  available.push(derivativeObject(datasetId, "mriqc"))
91
89
  }
@@ -3,10 +3,11 @@ import { summary } from "./summary"
3
3
  import { issues } from "./issues.js"
4
4
  import { description } from "./description.js"
5
5
  import { readme } from "./readme.js"
6
- import { getDraftRevision, updateDatasetRevision } from "../../datalad/draft.js"
6
+ import { getDraftRevision } from "../../datalad/draft.js"
7
7
  import { checkDatasetWrite } from "../permissions.js"
8
8
  import { getFiles } from "../../datalad/files"
9
9
  import { filterRemovedAnnexObjects } from "../utils/file.js"
10
+ import { validation } from "./validation"
10
11
 
11
12
  // A draft must have a dataset parent
12
13
  const draftFiles = async (dataset, args, { userInfo }) => {
@@ -15,6 +16,7 @@ const draftFiles = async (dataset, args, { userInfo }) => {
15
16
  return filterRemovedAnnexObjects(dataset.id, userInfo)(files)
16
17
  }
17
18
 
19
+ /* eslint-disable-next-line @typescript-eslint/no-unused-vars */
18
20
  const draftSize = async (dataset, args, { userInfo }) => {
19
21
  const hexsha = await getDraftRevision(dataset.id)
20
22
  return Summary.findOne({ datasetId: dataset.id, id: hexsha })
@@ -22,20 +24,6 @@ const draftSize = async (dataset, args, { userInfo }) => {
22
24
  .then((res) => res?.toObject()?.size)
23
25
  }
24
26
 
25
- /**
26
- * Deprecated mutation to move the draft HEAD reference forward or backward
27
- *
28
- * Exists to support existing usage where this would result in the initial snapshot
29
- */
30
- export const updateRef = async (
31
- obj,
32
- { datasetId, ref },
33
- { user, userInfo },
34
- ) => {
35
- await checkDatasetWrite(datasetId, user, userInfo)
36
- await updateDatasetRevision(datasetId, ref)
37
- }
38
-
39
27
  /**
40
28
  * Mutation to move the draft HEAD reference forward or backward
41
29
  */
@@ -49,6 +37,7 @@ const draft = {
49
37
  size: draftSize,
50
38
  summary,
51
39
  issues,
40
+ validation,
52
41
  modified: (obj) => obj.modified,
53
42
  description,
54
43
  readme,
@@ -65,6 +65,7 @@ export async function importRemoteDataset(
65
65
  export async function finishImportRemoteDataset(
66
66
  _: Record<string, unknown>,
67
67
  { id, success, message }: { id: string; success: boolean; message: string },
68
+ /* eslint-disable-next-line @typescript-eslint/no-unused-vars */
68
69
  { user, userInfo }: { user: string; userInfo: Record<string, unknown> },
69
70
  ): Promise<boolean> {
70
71
  const ingest = await IngestDataset.findById(id)
@@ -1,7 +1,8 @@
1
1
  import Snapshot from "../../models/snapshot"
2
- import { LeanDocument } from "mongoose"
2
+ import type { LeanDocument } from "mongoose"
3
3
  import DatasetModel from "../../models/dataset"
4
- import MetadataModel, { MetadataDocument } from "../../models/metadata"
4
+ import MetadataModel from "../../models/metadata"
5
+ import type { MetadataDocument } from "../../models/metadata"
5
6
  import { latestSnapshot } from "./snapshots"
6
7
  import { permissions } from "./permissions"
7
8
 
@@ -71,8 +72,8 @@ export const addMetadata = async (obj, { datasetId, metadata }) => {
71
72
  * Resolve all public datasets and return metadata
72
73
  */
73
74
  export async function publicMetadata(
74
- obj,
75
- ): Promise<Array<LeanDocument<MetadataDocument>>> {
75
+ _obj,
76
+ ): Promise<LeanDocument<MetadataDocument>[]> {
76
77
  const datasets = await DatasetModel.find({
77
78
  public: true,
78
79
  }).lean()