@openneuro/server 4.20.5 → 4.20.6-alpha.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (185) hide show
  1. package/package.json +4 -6
  2. package/src/__mocks__/{config.js → config.ts} +5 -5
  3. package/src/app.ts +32 -31
  4. package/src/cache/item.ts +6 -7
  5. package/src/cache/types.ts +8 -8
  6. package/src/{config.js → config.ts} +6 -6
  7. package/src/datalad/__tests__/changelog.spec.ts +83 -0
  8. package/src/datalad/__tests__/dataset.spec.ts +109 -0
  9. package/src/datalad/__tests__/description.spec.ts +141 -0
  10. package/src/datalad/__tests__/files.spec.ts +77 -0
  11. package/src/datalad/__tests__/pagination.spec.ts +136 -0
  12. package/src/datalad/__tests__/{snapshots.spec.js → snapshots.spec.ts} +17 -17
  13. package/src/datalad/{analytics.js → analytics.ts} +4 -4
  14. package/src/datalad/{changelog.js → changelog.ts} +17 -14
  15. package/src/datalad/{dataset.js → dataset.ts} +95 -93
  16. package/src/datalad/{description.js → description.ts} +37 -37
  17. package/src/datalad/draft.ts +38 -0
  18. package/src/datalad/files.ts +26 -20
  19. package/src/datalad/{pagination.js → pagination.ts} +47 -47
  20. package/src/datalad/{readme.js → readme.ts} +13 -11
  21. package/src/datalad/{reexporter.js → reexporter.ts} +4 -4
  22. package/src/datalad/{snapshots.js → snapshots.ts} +56 -62
  23. package/src/datalad/{upload.js → upload.ts} +7 -5
  24. package/src/elasticsearch/elastic-client.ts +11 -0
  25. package/src/elasticsearch/reindex-dataset.ts +7 -7
  26. package/src/graphql/__tests__/__snapshots__/permissions.spec.ts.snap +5 -0
  27. package/src/graphql/__tests__/{comment.spec.js → comment.spec.ts} +17 -17
  28. package/src/graphql/__tests__/permissions.spec.ts +113 -0
  29. package/src/graphql/{permissions.js → permissions.ts} +14 -14
  30. package/src/graphql/resolvers/__tests__/brainlife.spec.ts +11 -11
  31. package/src/graphql/resolvers/__tests__/{dataset-search.spec.js → dataset-search.spec.ts} +25 -23
  32. package/src/graphql/resolvers/__tests__/dataset.spec.ts +175 -0
  33. package/src/graphql/resolvers/__tests__/derivatives.spec.ts +19 -19
  34. package/src/graphql/resolvers/__tests__/importRemoteDataset.spec.ts +20 -20
  35. package/src/graphql/resolvers/__tests__/permssions.spec.ts +35 -0
  36. package/src/graphql/resolvers/__tests__/snapshots.spec.ts +59 -0
  37. package/src/graphql/resolvers/__tests__/user.spec.ts +18 -0
  38. package/src/graphql/resolvers/brainlife.ts +4 -4
  39. package/src/graphql/resolvers/cache.ts +4 -4
  40. package/src/graphql/resolvers/{comment.js → comment.ts} +16 -16
  41. package/src/graphql/resolvers/{dataset-search.js → dataset-search.ts} +45 -43
  42. package/src/graphql/resolvers/{dataset.js → dataset.ts} +38 -52
  43. package/src/graphql/resolvers/datasetType.ts +3 -3
  44. package/src/graphql/resolvers/derivatives.ts +11 -11
  45. package/src/graphql/resolvers/description.ts +18 -0
  46. package/src/graphql/resolvers/{draft.js → draft.ts} +13 -13
  47. package/src/graphql/resolvers/{flaggedFiles.js → flaggedFiles.ts} +4 -4
  48. package/src/graphql/resolvers/{follow.js → follow.ts} +1 -1
  49. package/src/graphql/resolvers/git.ts +3 -3
  50. package/src/graphql/resolvers/history.ts +13 -0
  51. package/src/graphql/resolvers/importRemoteDataset.ts +12 -11
  52. package/src/graphql/resolvers/index.ts +25 -0
  53. package/src/graphql/resolvers/{issues.js → issues.ts} +9 -9
  54. package/src/graphql/resolvers/metadata.ts +8 -8
  55. package/src/graphql/resolvers/{mutation.js → mutation.ts} +26 -26
  56. package/src/graphql/resolvers/{newsletter.js → newsletter.ts} +2 -2
  57. package/src/graphql/resolvers/permissions.ts +15 -21
  58. package/src/graphql/resolvers/publish.ts +17 -0
  59. package/src/graphql/resolvers/query.ts +21 -0
  60. package/src/graphql/resolvers/{readme.js → readme.ts} +3 -3
  61. package/src/graphql/resolvers/{reexporter.js → reexporter.ts} +2 -2
  62. package/src/graphql/resolvers/relation.ts +5 -5
  63. package/src/graphql/resolvers/{reset.js → reset.ts} +2 -2
  64. package/src/graphql/resolvers/reviewer.ts +4 -4
  65. package/src/graphql/resolvers/{snapshots.js → snapshots.ts} +49 -49
  66. package/src/graphql/resolvers/{stars.js → stars.ts} +1 -1
  67. package/src/graphql/resolvers/summary.ts +3 -3
  68. package/src/graphql/resolvers/{upload.js → upload.ts} +5 -5
  69. package/src/graphql/resolvers/{user.js → user.ts} +16 -18
  70. package/src/graphql/resolvers/{validation.js → validation.ts} +12 -14
  71. package/src/graphql/{schema.js → schema.ts} +4 -6
  72. package/src/graphql/utils/{file.js → file.ts} +2 -2
  73. package/src/handlers/{comments.js → comments.ts} +11 -11
  74. package/src/handlers/{config.js → config.ts} +1 -1
  75. package/src/handlers/{datalad.js → datalad.ts} +22 -22
  76. package/src/handlers/{doi.js → doi.ts} +6 -6
  77. package/src/handlers/reviewer.ts +6 -6
  78. package/src/handlers/{sitemap.js → sitemap.ts} +19 -19
  79. package/src/handlers/stars.ts +11 -10
  80. package/src/handlers/{subscriptions.js → subscriptions.ts} +17 -16
  81. package/src/handlers/{users.js → users.ts} +3 -3
  82. package/src/libs/__tests__/apikey.spec.ts +25 -0
  83. package/src/libs/__tests__/datalad-service.spec.ts +27 -0
  84. package/src/libs/__tests__/{dataset.spec.js → dataset.spec.ts} +9 -9
  85. package/src/libs/{apikey.js → apikey.ts} +5 -5
  86. package/src/libs/authentication/__tests__/jwt.spec.ts +59 -0
  87. package/src/libs/authentication/{crypto.js → crypto.ts} +16 -16
  88. package/src/libs/authentication/google.ts +18 -0
  89. package/src/libs/authentication/jwt.ts +40 -33
  90. package/src/libs/authentication/{orcid.js → orcid.ts} +11 -11
  91. package/src/libs/authentication/{passport.js → passport.ts} +45 -30
  92. package/src/libs/authentication/{states.js → states.ts} +17 -20
  93. package/src/libs/{counter.js → counter.ts} +1 -1
  94. package/src/libs/{datalad-service.js → datalad-service.ts} +4 -4
  95. package/src/libs/dataset.ts +9 -0
  96. package/src/libs/doi/__tests__/__snapshots__/doi.spec.ts.snap +17 -0
  97. package/src/libs/doi/__tests__/doi.spec.ts +25 -0
  98. package/src/libs/doi/__tests__/normalize.spec.ts +19 -19
  99. package/src/libs/doi/{index.js → index.ts} +27 -21
  100. package/src/libs/doi/normalize.ts +2 -2
  101. package/src/libs/email/__tests__/index.spec.ts +14 -14
  102. package/src/libs/email/index.ts +4 -4
  103. package/src/libs/email/templates/__tests__/comment-created.spec.ts +12 -12
  104. package/src/libs/email/templates/__tests__/dataset-deleted.spec.ts +6 -6
  105. package/src/libs/email/templates/__tests__/owner-unsubscribed.spec.ts +6 -6
  106. package/src/libs/email/templates/__tests__/snapshot-created.spec.ts +9 -9
  107. package/src/libs/email/templates/__tests__/snapshot-reminder.spec.ts +7 -7
  108. package/src/libs/email/templates/comment-created.ts +2 -1
  109. package/src/libs/email/templates/dataset-deleted.ts +2 -1
  110. package/src/libs/email/templates/dataset-import-failed.ts +2 -1
  111. package/src/libs/email/templates/dataset-imported.ts +2 -1
  112. package/src/libs/email/templates/owner-unsubscribed.ts +2 -1
  113. package/src/libs/email/templates/snapshot-created.ts +2 -1
  114. package/src/libs/email/templates/snapshot-reminder.ts +2 -1
  115. package/src/libs/{notifications.js → notifications.ts} +100 -113
  116. package/src/libs/{orcid.js → orcid.ts} +20 -20
  117. package/src/libs/{redis.js → redis.ts} +6 -6
  118. package/src/models/__tests__/ingestDataset.spec.ts +15 -15
  119. package/src/models/analytics.ts +2 -2
  120. package/src/models/badAnnexObject.ts +6 -6
  121. package/src/models/comment.ts +10 -10
  122. package/src/models/counter.ts +2 -2
  123. package/src/models/dataset.ts +16 -16
  124. package/src/models/deletion.ts +3 -3
  125. package/src/models/deprecatedSnapshot.ts +2 -2
  126. package/src/models/doi.ts +2 -2
  127. package/src/models/file.ts +2 -2
  128. package/src/models/ingestDataset.ts +4 -4
  129. package/src/models/issue.ts +2 -2
  130. package/src/models/key.ts +2 -2
  131. package/src/models/mailgunIdentifier.ts +2 -2
  132. package/src/models/metadata.ts +3 -3
  133. package/src/models/newsletter.ts +3 -3
  134. package/src/models/notification.ts +2 -2
  135. package/src/models/permission.ts +4 -4
  136. package/src/models/reviewer.ts +7 -7
  137. package/src/models/snapshot.ts +2 -2
  138. package/src/models/stars.ts +6 -6
  139. package/src/models/subscription.ts +2 -2
  140. package/src/models/summary.ts +2 -2
  141. package/src/models/upload.ts +3 -3
  142. package/src/models/user.ts +4 -4
  143. package/src/{routes.js → routes.ts} +62 -62
  144. package/src/server.ts +9 -9
  145. package/src/utils/__tests__/datasetOrSnapshot.spec.ts +25 -25
  146. package/src/utils/__tests__/validateUrl.spec.ts +10 -10
  147. package/src/utils/datasetOrSnapshot.ts +2 -2
  148. package/src/utils/validateUrl.ts +1 -1
  149. package/src/datalad/__tests__/changelog.spec.js +0 -82
  150. package/src/datalad/__tests__/dataset.spec.js +0 -109
  151. package/src/datalad/__tests__/description.spec.js +0 -137
  152. package/src/datalad/__tests__/files.spec.js +0 -75
  153. package/src/datalad/__tests__/pagination.spec.js +0 -136
  154. package/src/datalad/draft.js +0 -37
  155. package/src/elasticsearch/elastic-client.js +0 -11
  156. package/src/graphql/__tests__/permissions.spec.js +0 -107
  157. package/src/graphql/pubsub.js +0 -5
  158. package/src/graphql/resolvers/__tests__/dataset.spec.js +0 -175
  159. package/src/graphql/resolvers/__tests__/permssions.spec.js +0 -34
  160. package/src/graphql/resolvers/__tests__/snapshots.spec.js +0 -58
  161. package/src/graphql/resolvers/__tests__/user.spec.js +0 -17
  162. package/src/graphql/resolvers/description.js +0 -29
  163. package/src/graphql/resolvers/history.js +0 -11
  164. package/src/graphql/resolvers/index.js +0 -25
  165. package/src/graphql/resolvers/publish.js +0 -17
  166. package/src/graphql/resolvers/query.js +0 -21
  167. package/src/graphql/resolvers/subscriptions.js +0 -81
  168. package/src/graphql/utils/publish-draft-update.js +0 -13
  169. package/src/libs/__tests__/apikey.spec.js +0 -24
  170. package/src/libs/__tests__/datalad-service.spec.js +0 -26
  171. package/src/libs/authentication/__tests__/jwt.spec.js +0 -23
  172. package/src/libs/authentication/globus.js +0 -11
  173. package/src/libs/authentication/google.js +0 -19
  174. package/src/libs/bidsId.js +0 -68
  175. package/src/libs/dataset.js +0 -9
  176. package/src/libs/doi/__tests__/doi.spec.js +0 -24
  177. package/src/libs/redis-pubsub.js +0 -5
  178. package/src/libs/request.js +0 -155
  179. package/src/libs/scitran.js +0 -25
  180. package/src/libs/subscription-server.js +0 -20
  181. package/src/libs/testing-utils.js +0 -17
  182. package/src/persistent/datasets/.gitignore +0 -3
  183. package/src/persistent/temp/.gitignore +0 -3
  184. /package/src/libs/__mocks__/{notifications.js → notifications.ts} +0 -0
  185. /package/src/libs/authentication/{verifyUser.js → verifyUser.ts} +0 -0
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@openneuro/server",
3
- "version": "4.20.5",
3
+ "version": "4.20.6-alpha.3",
4
4
  "description": "Core service for the OpenNeuro platform.",
5
5
  "license": "MIT",
6
6
  "main": "src/server.js",
@@ -21,7 +21,7 @@
21
21
  "@elastic/elasticsearch": "7.15.0",
22
22
  "@graphql-tools/schema": "^10.0.0",
23
23
  "@keyv/redis": "^2.7.0",
24
- "@openneuro/search": "^4.20.5",
24
+ "@openneuro/search": "^4.20.6-alpha.3",
25
25
  "@passport-next/passport-google-oauth2": "^1.0.0",
26
26
  "@sentry/node": "^4.5.3",
27
27
  "base64url": "^3.0.0",
@@ -32,12 +32,10 @@
32
32
  "draft-js-export-html": "^1.4.1",
33
33
  "elastic-apm-node": "3.49.1",
34
34
  "express": "4.18.2",
35
- "graphql": "16.6.0",
35
+ "graphql": "16.8.1",
36
36
  "graphql-bigint": "^1.0.0",
37
37
  "graphql-compose": "9.0.10",
38
38
  "graphql-iso-date": "^3.6.1",
39
- "graphql-redis-subscriptions": "2.1.0",
40
- "graphql-subscriptions": "^1.1.0",
41
39
  "graphql-tools": "9.0.0",
42
40
  "immutable": "^3.8.2",
43
41
  "ioredis": "4.17.3",
@@ -87,5 +85,5 @@
87
85
  "publishConfig": {
88
86
  "access": "public"
89
87
  },
90
- "gitHead": "d397dba94af3bf8482b8a082b8f8d28a79232488"
88
+ "gitHead": "74d0a6d75494f382655482c563a52d6ed82477ec"
91
89
  }
@@ -1,24 +1,24 @@
1
1
  const config = {
2
2
  auth: {
3
3
  jwt: {
4
- secret: '123456',
4
+ secret: "123456",
5
5
  },
6
6
  },
7
7
  datalad: {
8
- uri: 'datalad',
8
+ uri: "datalad",
9
9
  workers: 4,
10
10
  },
11
11
  mongo: {
12
- url: 'mongodb://',
12
+ url: "mongodb://",
13
13
  },
14
14
  notifications: {
15
15
  email: {
16
- from: 'notifications@example.com',
16
+ from: "notifications@example.com",
17
17
  },
18
18
  },
19
19
  elasticsearch: {},
20
20
  doi: {
21
- username: '',
21
+ username: "",
22
22
  },
23
23
  }
24
24
 
package/src/app.ts CHANGED
@@ -4,29 +4,29 @@
4
4
  /**
5
5
  * Express app setup
6
6
  */
7
- import { createServer } from 'http'
8
- import cors from 'cors'
9
- import express, { urlencoded, json } from 'express'
10
- import passport from 'passport'
11
- import config from './config'
12
- import routes from './routes'
13
- import morgan from 'morgan'
14
- import schema from './graphql/schema'
15
- import { ApolloServer } from '@apollo/server'
16
- import { ApolloServerPluginLandingPageLocalDefault } from '@apollo/server/plugin/landingPage/default'
17
- import { expressMiddleware } from '@apollo/server/express4'
18
- import { ApolloServerPluginDrainHttpServer } from '@apollo/server/plugin/drainHttpServer'
19
- import { KeyvAdapter } from '@apollo/utils.keyvadapter'
20
- import Keyv from 'keyv'
21
- import KeyvRedis from '@keyv/redis'
22
- import cookieParser from 'cookie-parser'
23
- import * as jwt from './libs/authentication/jwt'
24
- import * as auth from './libs/authentication/states.js'
25
- import { sitemapHandler } from './handlers/sitemap.js'
26
- import { setupPassportAuth } from './libs/authentication/passport.js'
27
- import { redis } from './libs/redis'
28
- import { version } from './lerna.json'
29
- export { Express } from 'express-serve-static-core'
7
+ import { createServer } from "http"
8
+ import cors from "cors"
9
+ import express, { json, urlencoded } from "express"
10
+ import passport from "passport"
11
+ import config from "./config"
12
+ import routes from "./routes"
13
+ import morgan from "morgan"
14
+ import schema from "./graphql/schema"
15
+ import { ApolloServer } from "@apollo/server"
16
+ import { ApolloServerPluginLandingPageLocalDefault } from "@apollo/server/plugin/landingPage/default"
17
+ import { expressMiddleware } from "@apollo/server/express4"
18
+ import { ApolloServerPluginDrainHttpServer } from "@apollo/server/plugin/drainHttpServer"
19
+ import { KeyvAdapter } from "@apollo/utils.keyvadapter"
20
+ import Keyv from "keyv"
21
+ import KeyvRedis from "@keyv/redis"
22
+ import cookieParser from "cookie-parser"
23
+ import * as jwt from "./libs/authentication/jwt"
24
+ import * as auth from "./libs/authentication/states"
25
+ import { sitemapHandler } from "./handlers/sitemap"
26
+ import { setupPassportAuth } from "./libs/authentication/passport"
27
+ import { redis } from "./libs/redis"
28
+ import { version } from "./lerna.json"
29
+ export { Express } from "express-serve-static-core"
30
30
 
31
31
  interface OpenNeuroRequestContext {
32
32
  user: string
@@ -47,17 +47,18 @@ export async function expressApolloSetup() {
47
47
 
48
48
  app.use((req, res, next) => {
49
49
  res.set(config.headers)
50
- res.type('application/json')
50
+ res.type("application/json")
51
51
  next()
52
52
  })
53
- app.use(morgan('short'))
53
+ app.use(morgan("short"))
54
54
  app.use(cookieParser())
55
- app.use(urlencoded({ extended: false, limit: '50mb' }))
56
- app.use(json({ limit: '50mb' }))
55
+ app.use(urlencoded({ extended: false, limit: "50mb" }))
56
+ app.use(json({ limit: "50mb" }))
57
57
 
58
58
  // routing ---------------------------------------------------------
59
- app.use('/sitemap.xml', sitemapHandler)
59
+ app.use("/sitemap.xml", sitemapHandler)
60
60
  app.use(config.apiPrefix, routes)
61
+ app.use("/api/", routes)
61
62
 
62
63
  const httpServer = createServer(app)
63
64
 
@@ -78,8 +79,8 @@ export async function expressApolloSetup() {
78
79
  async willSendResponse(requestContext) {
79
80
  const { response } = requestContext
80
81
  if (
81
- response.body.kind === 'single' &&
82
- 'data' in response.body.singleResult
82
+ response.body.kind === "single" &&
83
+ "data" in response.body.singleResult
83
84
  ) {
84
85
  response.body.singleResult.extensions = {
85
86
  ...response.body.singleResult.extensions,
@@ -98,7 +99,7 @@ export async function expressApolloSetup() {
98
99
 
99
100
  // Setup GraphQL middleware
100
101
  app.use(
101
- ['/graphql', '/crn/graphql'],
102
+ ["/graphql", "/crn/graphql", "/api/graphql"],
102
103
  cors<cors.CorsRequest>(),
103
104
  jwt.authenticate,
104
105
  auth.optional,
package/src/cache/item.ts CHANGED
@@ -1,8 +1,8 @@
1
- import { Redis } from 'ioredis'
2
- import * as zlib from 'zlib'
3
- import { promisify } from 'util'
4
- import { CacheType } from './types'
5
- export { CacheType } from './types'
1
+ import { Redis } from "ioredis"
2
+ import * as zlib from "zlib"
3
+ import { promisify } from "util"
4
+ import { CacheType } from "./types"
5
+ export { CacheType } from "./types"
6
6
 
7
7
  const compress = promisify(zlib.gzip)
8
8
  const decompress = promisify(zlib.gunzip)
@@ -15,7 +15,7 @@ export function cacheKey(
15
15
  type: CacheType,
16
16
  compositeKeys: Array<string>,
17
17
  ): string {
18
- return `${type.toString()}:${compositeKeys.join(':')}`
18
+ return `${type.toString()}:${compositeKeys.join(":")}`
19
19
  }
20
20
 
21
21
  /**
@@ -27,7 +27,6 @@ class CacheItem {
27
27
  expiration = 0
28
28
  private redis: Redis
29
29
  /**
30
- *
31
30
  * @param redis ioredis client
32
31
  * @param type A CacheType value
33
32
  * @param compositeKeys Values identifying this cache key
@@ -4,12 +4,12 @@
4
4
  * Use short names here, particularly for keys with many values
5
5
  */
6
6
  export enum CacheType {
7
- datasetsConnection = 'connection',
8
- datasetDescription = 'description',
9
- commitFiles = 'files',
10
- readme = 'readme',
11
- snapshot = 'snapshot',
12
- snapshotIndex = 'snapshotIndex',
13
- participantCount = 'participantCount',
14
- snapshotDownload = 'download',
7
+ datasetsConnection = "connection",
8
+ datasetDescription = "description",
9
+ commitFiles = "files",
10
+ readme = "readme",
11
+ snapshot = "snapshot",
12
+ snapshotIndex = "snapshotIndex",
13
+ participantCount = "participantCount",
14
+ snapshotDownload = "download",
15
15
  }
@@ -1,12 +1,12 @@
1
1
  const config = {
2
2
  url: process.env.CRN_SERVER_URL,
3
3
  port: 8111,
4
- apiPrefix: '/crn/',
5
- location: '/srv',
4
+ apiPrefix: "/crn/",
5
+ location: "/srv",
6
6
  headers: {
7
- 'Access-Control-Allow-Origin': '*',
8
- 'Access-Control-Allow-Methods': 'GET, POST, OPTIONS, PUT, PATCH, DELETE',
9
- 'Access-Control-Allow-Headers': 'content-type, Authorization',
7
+ "Access-Control-Allow-Origin": "*",
8
+ "Access-Control-Allow-Methods": "GET, POST, OPTIONS, PUT, PATCH, DELETE",
9
+ "Access-Control-Allow-Headers": "content-type, Authorization",
10
10
  },
11
11
  analysis: {
12
12
  enabled: process.env.ANALYSIS_ENABLED,
@@ -33,7 +33,7 @@ const config = {
33
33
  },
34
34
  mongo: {
35
35
  url: process.env.MONGO_URL,
36
- dbName: 'crn',
36
+ dbName: "crn",
37
37
  connectTimeoutMS: 1000,
38
38
  },
39
39
  redis: {
@@ -0,0 +1,83 @@
1
+ import { vi } from "vitest"
2
+ import * as changelog from "../changelog"
3
+
4
+ vi.mock("ioredis")
5
+ vi.mock("../../config.ts")
6
+
7
+ describe("changelog editing tools", () => {
8
+ describe("findVersion()", () => {
9
+ it("finds the version bounds for a single entry", () => {
10
+ const newChanges = ["1.0.0 2019-01-01", " - Initial version"]
11
+ expect(changelog.findVersion(newChanges, "1.0.0")).toEqual([0, 2])
12
+ })
13
+ it("returns an empty array when no matching version is found", () => {
14
+ const newChanges = ["1.0.0 2019-01-01", " - Initial version"]
15
+ expect(changelog.findVersion(newChanges, "1.0.1")).toEqual([])
16
+ })
17
+ it("returns the correct offset for a change in the middle of others", () => {
18
+ const newChanges = [
19
+ "2.0.0 2019-02-02",
20
+ " - New derivatives",
21
+ "1.1.0 2019-02-01",
22
+ " - Added subjects",
23
+ " - Fixed metadata",
24
+ "1.0.0 2019-01-01",
25
+ " - Initial version",
26
+ ]
27
+ expect(changelog.findVersion(newChanges, "1.1.0")).toEqual([2, 3])
28
+ })
29
+ it("works with fuzzy data", () => {
30
+ const newChanges = ["I", "am", "a", "banana"]
31
+ expect(changelog.findVersion(newChanges, "1.0.0")).toEqual([])
32
+ })
33
+ })
34
+ describe("spliceChangelog()", () => {
35
+ it("splices in changes to an existing changelog", () => {
36
+ const original = "1.0.0 2019-01-01\n - Initial version\n"
37
+ const changes = ["Some new change", "Another new change"]
38
+ const tag = "1.0.0"
39
+ expect(
40
+ changelog.spliceChangelog(original, tag, "2019-02-01", changes),
41
+ ).toEqual(
42
+ "1.0.0 2019-02-01\n - Some new change\n - Another new change\n",
43
+ )
44
+ })
45
+ it("splices correctly when no matching version is found", () => {
46
+ const original = "1.0.0 2019-01-01\n - Initial version\n"
47
+ const changes = ["Some new change"]
48
+ const tag = "1.0.1"
49
+ expect(
50
+ changelog.spliceChangelog(original, tag, "2019-02-01", changes),
51
+ ).toEqual(
52
+ "1.0.1 2019-02-01\n - Some new change\n1.0.0 2019-01-01\n - Initial version\n",
53
+ )
54
+ })
55
+ it("handles fuzzy data", () => {
56
+ const original = "abc 123 not a changelog\nnope"
57
+ const changes = ["Fixed up data"]
58
+ const tag = "1.0.0"
59
+ expect(
60
+ changelog.spliceChangelog(original, tag, "2019-02-01", changes),
61
+ ).toEqual(
62
+ "1.0.0 2019-02-01\n - Fixed up data\nabc 123 not a changelog\nnope\n",
63
+ )
64
+ })
65
+ it("works with legacy versions", () => {
66
+ const original = "00001 1999-12-31\n - Partying"
67
+ const changes = ["Bringing dataset into the present day"]
68
+ const tag = "1.0.0"
69
+ expect(
70
+ changelog.spliceChangelog(original, tag, "2019-02-01", changes),
71
+ ).toEqual(
72
+ "1.0.0 2019-02-01\n - Bringing dataset into the present day\n00001 1999-12-31\n - Partying\n",
73
+ )
74
+ })
75
+ it("works if no CHANGES are provided", () => {
76
+ expect(
77
+ changelog.spliceChangelog("", "1.0.0", "2019-03-04", [
78
+ "Initial snapshot",
79
+ ]),
80
+ ).toEqual("1.0.0 2019-03-04\n - Initial snapshot\n")
81
+ })
82
+ })
83
+ })
@@ -0,0 +1,109 @@
1
+ import { vi } from "vitest"
2
+ import request from "superagent"
3
+ import { createDataset, datasetsFilter, testBlacklist } from "../dataset"
4
+ import { getDatasetWorker } from "../../libs/datalad-service"
5
+ import { connect } from "mongoose"
6
+
7
+ // Mock requests to Datalad service
8
+ vi.mock("superagent")
9
+ vi.mock("ioredis")
10
+ vi.mock("../../libs/redis")
11
+ vi.mock("../../config.ts")
12
+ vi.mock("../../libs/notifications")
13
+
14
+ describe("dataset model operations", () => {
15
+ describe("createDataset()", () => {
16
+ beforeAll(() => {
17
+ // Setup MongoDB with mongodb-memory-server
18
+ connect(globalThis.__MONGO_URI__)
19
+ })
20
+ it("resolves to dataset id string", async () => {
21
+ const user = { id: "1234" }
22
+ const { id: dsId } = await createDataset(user.id, user, {
23
+ affirmedDefaced: true,
24
+ affirmedConsent: true,
25
+ })
26
+ expect(dsId).toHaveLength(8)
27
+ expect(dsId.slice(0, 2)).toBe("ds")
28
+ })
29
+ it("posts to the DataLad /datasets/{dsId} endpoint", async () => {
30
+ const user = { id: "1234" }
31
+ // Reset call count for request.post
32
+ request.post.mockClear()
33
+ const { id: dsId } = await createDataset(user.id, user, {
34
+ affirmedDefaced: true,
35
+ affirmedConsent: true,
36
+ })
37
+ expect(request.post).toHaveBeenCalledTimes(1)
38
+ expect(request.post).toHaveBeenCalledWith(
39
+ expect.stringContaining(`${getDatasetWorker(dsId)}/datasets/`),
40
+ )
41
+ })
42
+ })
43
+ describe("datasetsFilter()", () => {
44
+ describe("filterBy: {all: true} ", () => {
45
+ it("returns the specified match for regular users", () => {
46
+ const testMatch = { test: "match" }
47
+ expect(
48
+ datasetsFilter({
49
+ userId: "1234",
50
+ admin: false,
51
+ filterBy: { all: true },
52
+ })(testMatch)[0].$match,
53
+ ).toBe(testMatch)
54
+ })
55
+ it("excludes match argument for admins", () => {
56
+ const testMatch = { test: "match" }
57
+ expect(
58
+ datasetsFilter({
59
+ userId: "5678",
60
+ admin: true,
61
+ filterBy: { all: true },
62
+ })(testMatch),
63
+ ).not.toBe(testMatch)
64
+ })
65
+ })
66
+ describe("filterBy: {invalid: true}", () => {
67
+ it("returns the correct number of stages", () => {
68
+ expect(
69
+ datasetsFilter({ filterBy: { invalid: true } })({}),
70
+ ).toHaveLength(4)
71
+ })
72
+ })
73
+ describe("filterBy: {invalid: true, public: true}", () => {
74
+ it("returns the same number of stages as invalid: true", () => {
75
+ expect(
76
+ datasetsFilter({ filterBy: { invalid: true, public: true } })({}),
77
+ ).toHaveLength(4)
78
+ })
79
+ it("returns one less stage for admins with all", () => {
80
+ expect(
81
+ datasetsFilter({
82
+ admin: true,
83
+ filterBy: { invalid: true, public: true, all: true },
84
+ })({}),
85
+ ).toHaveLength(3)
86
+ })
87
+ })
88
+ describe("testBlacklist", () => {
89
+ it("returns false for .bidsignore", () => {
90
+ expect(testBlacklist("", ".bidsignore")).toBe(false)
91
+ })
92
+ it("returns true for .git paths", () => {
93
+ expect(testBlacklist(".git", "HEAD")).toBe(true)
94
+ })
95
+ it("returns true for root level .DS_Store files", () => {
96
+ expect(testBlacklist("", ".DS_Store")).toBe(true)
97
+ })
98
+ it("returns true for nested .DS_Store files", () => {
99
+ expect(testBlacklist("sub-01/anat/", ".DS_Store")).toBe(true)
100
+ })
101
+ // https://github.com/OpenNeuroOrg/openneuro/issues/2519
102
+ it("skips ._ prefixed files created by macOS", () => {
103
+ expect(testBlacklist("", "._.DS_Store")).toBe(true)
104
+ expect(testBlacklist("stimuli/", "._1002.png")).toBe(true)
105
+ expect(testBlacklist("stimuli/", "test._1002.png")).toBe(false)
106
+ })
107
+ })
108
+ })
109
+ })
@@ -0,0 +1,141 @@
1
+ import { vi } from "vitest"
2
+ import {
3
+ appendSeniorAuthor,
4
+ getDescriptionObject,
5
+ repairDescriptionTypes,
6
+ } from "../description"
7
+
8
+ // Mock requests to Datalad service
9
+ vi.mock("ioredis")
10
+ vi.mock("../../config.ts")
11
+
12
+ describe("datalad dataset descriptions", () => {
13
+ describe("appendSeniorAuthor", () => {
14
+ it("returns author out of several", () => {
15
+ expect(
16
+ appendSeniorAuthor({
17
+ Authors: ["A. Bee", "C. Dee", "E. Eff"],
18
+ Name: "test dataset",
19
+ }),
20
+ ).toHaveProperty("SeniorAuthor", "E. Eff")
21
+ })
22
+ it("returns a description when no Authors array is provided", () => {
23
+ expect(
24
+ appendSeniorAuthor({ Authors: null, Name: "test dataset" }),
25
+ ).toHaveProperty("Name", "test dataset")
26
+ })
27
+ it("returns a description when no Authors array is empty", () => {
28
+ expect(
29
+ appendSeniorAuthor({ Authors: [], Name: "test dataset" }),
30
+ ).toHaveProperty("Name", "test dataset")
31
+ })
32
+ })
33
+ describe("repairDescriptionTypes", () => {
34
+ it("converts strings to one element arrays for array fields", () => {
35
+ const description = {
36
+ Authors: "Not, An Array",
37
+ BIDSVersion: "1.2.0",
38
+ ReferencesAndLinks: "https://openneuro.org",
39
+ Funding: ["This one", "is correct"],
40
+ EthicsApprovals: "Also, Not, Array",
41
+ }
42
+ const repaired = repairDescriptionTypes(description)
43
+ // Check for discarded fields
44
+ expect(repaired.BIDSVersion).toBe(description.BIDSVersion)
45
+ // Check for extra fields
46
+ expect(repaired.DatasetDOI).toBe(undefined)
47
+ // Test each repaired field for type correct value
48
+ expect(Array.isArray(repaired.Authors)).toBe(true)
49
+ expect(Array.isArray(repaired.ReferencesAndLinks)).toBe(true)
50
+ expect(Array.isArray(repaired.Funding)).toBe(true)
51
+ expect(Array.isArray(repaired.EthicsApprovals)).toBe(true)
52
+ })
53
+ it("converts any invalid value to string values for string fields", () => {
54
+ const description = {
55
+ BIDSVersion: "1.2.0",
56
+ Name: 1.5,
57
+ DatasetDOI: ["Should", "not", "be", "an", "array"],
58
+ Acknowledgements: ["Should not be an array"],
59
+ HowToAcknowledge: Symbol(), // This can't serialize but just in case
60
+ }
61
+ const repaired = repairDescriptionTypes(description)
62
+ // Check for discarded fields
63
+ expect(repaired.BIDSVersion).toBe(description.BIDSVersion)
64
+ // Check for extra fields
65
+ expect(repaired.Authors).toBe(undefined)
66
+ // Check converted types
67
+ expect(typeof repaired.Name).toBe("string")
68
+ expect(typeof repaired.DatasetDOI).toBe("string")
69
+ expect(typeof repaired.Acknowledgements).toBe("string")
70
+ expect(typeof repaired.HowToAcknowledge).toBe("string")
71
+ })
72
+ it("returns correct types for empty strings", () => {
73
+ const description = {
74
+ Name: "Classification learning",
75
+ License:
76
+ "This dataset is made available under the Public Domain Dedication and License \nv1.0, whose full text can be found at \nhttp://www.opendatacommons.org/licenses/pddl/1.0/. \nWe hope that all users will follow the ODC Attribution/Share-Alike \nCommunity Norms (http://www.opendatacommons.org/norms/odc-by-sa/); \nin particular, while not legally required, we hope that all users \nof the data will acknowledge the OpenfMRI project and NSF Grant \nOCI-1131441 (R. Poldrack, PI) in any publications.",
77
+ Authors: "",
78
+ Acknowledgements: "",
79
+ HowToAcknowledge: "",
80
+ Funding: "",
81
+ ReferencesAndLinks: "",
82
+ DatasetDOI: "",
83
+ BIDSVersion: "1.0.0",
84
+ }
85
+ const repaired = repairDescriptionTypes(description)
86
+ expect(Array.isArray(repaired.Authors)).toBe(true)
87
+ expect(Array.isArray(repaired.ReferencesAndLinks)).toBe(true)
88
+ expect(Array.isArray(repaired.Funding)).toBe(true)
89
+ })
90
+ })
91
+ describe("getDescriptionObject()", () => {
92
+ beforeAll(() => {
93
+ global.fetch = vi.fn()
94
+ })
95
+ it("returns the parsed dataset_description.json object", async () => {
96
+ // @ts-expect-error Fetch mock includes mockResolvedValue
97
+ fetch.mockResolvedValue({
98
+ json: () =>
99
+ Promise.resolve({ Name: "Balloon Analog Risk-taking Task" }),
100
+ headers: {
101
+ get: () => "application/json",
102
+ },
103
+ status: 200,
104
+ })
105
+ const description = await getDescriptionObject("ds000001", "1.0.0")
106
+ expect(description).toEqual({ Name: "Balloon Analog Risk-taking Task" })
107
+ })
108
+ it("handles a corrupted response", async () => {
109
+ global.fetch = vi.fn()
110
+ // @ts-expect-error Fetch mock includes mockResolvedValue
111
+ fetch.mockResolvedValue({
112
+ json: () => Promise.reject("JSON could not be parsed"),
113
+ headers: {
114
+ get: () => "application/json",
115
+ },
116
+ status: 400,
117
+ })
118
+ await expect(getDescriptionObject("ds000001", "1.0.0")).rejects.toEqual(
119
+ Error(
120
+ "Backend request failed, dataset_description.json may not exist or may be non-JSON (type: application/json, status: 400)",
121
+ ),
122
+ )
123
+ })
124
+ it("throws an error when nothing is returned", async () => {
125
+ global.fetch = vi.fn()
126
+ // @ts-expect-error Fetch mock includes mockResolvedValue
127
+ fetch.mockResolvedValue({
128
+ json: () => Promise.reject("JSON could not be parsed"),
129
+ headers: {
130
+ get: () => "application/json",
131
+ },
132
+ status: 404,
133
+ })
134
+ await expect(getDescriptionObject("ds000001", "1.0.0")).rejects.toEqual(
135
+ Error(
136
+ "Backend request failed, dataset_description.json may not exist or may be non-JSON (type: application/json, status: 404)",
137
+ ),
138
+ )
139
+ })
140
+ })
141
+ })
@@ -0,0 +1,77 @@
1
+ import { vi } from "vitest"
2
+ import {
3
+ computeTotalSize,
4
+ decodeFilePath,
5
+ encodeFilePath,
6
+ fileUrl,
7
+ } from "../files"
8
+
9
+ vi.mock("ioredis")
10
+ vi.mock("../../config.ts")
11
+
12
+ const filename = "sub-01/anat/sub-01_T1w.nii.gz"
13
+
14
+ const mockRootFiles = [
15
+ { filename: "README" },
16
+ { filename: "dataset_description.json" },
17
+ ]
18
+ const mockSub01 = [
19
+ { filename: "sub-01/anat/sub-01_T1w.nii.gz" },
20
+ { filename: "sub-01/func/sub-01_task-onebacktask_run-01_bold.nii.gz" },
21
+ ]
22
+ const mockSub02 = [
23
+ { filename: "sub-02/anat/sub-02_T1w.nii.gz" },
24
+ { filename: "sub-02/func/sub-02_task-onebacktask_run-01_bold.nii.gz" },
25
+ ]
26
+ const mockSub03 = [
27
+ { filename: "sub-03/anat/sub-03_T1w.nii.gz" },
28
+ { filename: "sub-03/func/sub-03_task-onebacktask_run-01_bold.nii.gz" },
29
+ ]
30
+ const mockDerivatives = [{ filename: "derivatives/groundbreaking_output.html" }]
31
+ const mockFiles = [
32
+ ...mockRootFiles,
33
+ ...mockSub01,
34
+ ...mockSub02,
35
+ ...mockSub03,
36
+ ...mockDerivatives,
37
+ ]
38
+
39
+ describe("datalad files", () => {
40
+ describe("encodeFilePath()", () => {
41
+ it("should encode a nested path", () => {
42
+ expect(encodeFilePath(filename)).toBe("sub-01:anat:sub-01_T1w.nii.gz")
43
+ })
44
+ })
45
+ describe("decodeFilePath()", () => {
46
+ it("decodes a file path", () => {
47
+ expect(decodeFilePath("sub-01:anat:sub-01_T1w.nii.gz")).toBe(filename)
48
+ })
49
+ })
50
+ describe("fileUrl()", () => {
51
+ it("returns a working URL", () => {
52
+ expect(fileUrl("ds000001", "", filename)).toBe(
53
+ "http://datalad-0/datasets/ds000001/files/sub-01:anat:sub-01_T1w.nii.gz",
54
+ )
55
+ })
56
+ it("handles path nesting", () => {
57
+ expect(fileUrl("ds000001", "sub-01/anat", "sub-01_T1w.nii.gz")).toBe(
58
+ "http://datalad-0/datasets/ds000001/files/sub-01:anat:sub-01_T1w.nii.gz",
59
+ )
60
+ })
61
+ })
62
+ describe("computeTotalSize()", () => {
63
+ it("computes the size correctly", () => {
64
+ const mockFileSizes = [
65
+ { filename: "README", size: 234 },
66
+ { filename: "dataset_description.json", size: 432 },
67
+ { filename: "sub-01/anat/sub-01_T1w.nii.gz", size: 10858 },
68
+ {
69
+ filename: "sub-01/func/sub-01_task-onebacktask_run-01_bold.nii.gz",
70
+ size: 1945682,
71
+ },
72
+ ]
73
+ // @ts-expect-error Test is mocking this
74
+ expect(computeTotalSize(mockFileSizes)).toBe(1957206)
75
+ })
76
+ })
77
+ })