@openneuro/server 4.30.2 → 4.31.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,65 @@
1
+ import { redis } from "../../libs/redis"
2
+ import type { DatasetOrSnapshot } from "../../utils/datasetOrSnapshot"
3
+ import { latestSnapshot } from "./snapshots"
4
+ import { description } from "../../datalad/description"
5
+ import Metadata from "../../models/metadata"
6
+ import CacheItem, { CacheType } from "../../cache/item"
7
+ import * as Sentry from "@sentry/node"
8
+ import fundedAwards from "../../data/funded_awards.json"
9
+
10
+ const brainInitiativeMatch = new RegExp("brain.initiative", "i")
11
+
12
+ const brainInitiativeGrants = fundedAwards.map((award) =>
13
+ award.field_project_number.replace(/[^a-zA-Z0-9]/g, "")
14
+ )
15
+
16
+ /**
17
+ * Check for any Brain Initiative metadata
18
+ */
19
+ export const brainInitiative = async (
20
+ dataset: DatasetOrSnapshot,
21
+ _,
22
+ context,
23
+ ): Promise<boolean> => {
24
+ const cache = new CacheItem(
25
+ redis,
26
+ CacheType.brainInitiative,
27
+ [dataset.id],
28
+ 86400,
29
+ )
30
+ return await cache.get(async () => {
31
+ try {
32
+ const metadata = await Metadata.findOne({ datasetId: dataset.id })
33
+ if (metadata.grantFunderName.match(brainInitiativeMatch)) {
34
+ return true
35
+ } else {
36
+ // Fetch snapshot if metadata didn't match
37
+ const snapshot = await latestSnapshot(dataset, null, context)
38
+ const snapshotDescription = await description(snapshot)
39
+ for (const funding of snapshotDescription.Funding) {
40
+ if (funding.match(brainInitiativeMatch)) {
41
+ return true
42
+ }
43
+ }
44
+ // Check for grant ids too - filter to only alphanumeric to improve matching across format differences
45
+ const identifier = metadata.grantIdentifier.replace(/[^a-zA-Z0-9]/g, "")
46
+ for (const grant of brainInitiativeGrants) {
47
+ if (
48
+ identifier.includes(grant)
49
+ ) {
50
+ return true
51
+ }
52
+ for (const funding of snapshotDescription.Funding) {
53
+ if (funding.replace(/[^a-zA-Z0-9]/g, "").includes(grant)) {
54
+ return true
55
+ }
56
+ }
57
+ }
58
+ }
59
+ return false
60
+ } catch (_err) {
61
+ Sentry.captureException(_err)
62
+ return false
63
+ }
64
+ })
65
+ }
@@ -19,6 +19,7 @@ import { reviewers } from "./reviewer"
19
19
  import { getDatasetWorker } from "../../libs/datalad-service"
20
20
  import { getFileName } from "../../datalad/files"
21
21
  import { onBrainlife } from "./brainlife"
22
+ import { brainInitiative } from "./brainInitiative"
22
23
  import { derivatives } from "./derivatives"
23
24
  import { promiseTimeout } from "../../utils/promiseTimeout"
24
25
  import semver from "semver"
@@ -301,6 +302,7 @@ const Dataset = {
301
302
  history,
302
303
  worker,
303
304
  reviewers,
305
+ brainInitiative,
304
306
  }
305
307
 
306
308
  export default Dataset
@@ -1,5 +1,5 @@
1
1
  import Snapshot from "../../models/snapshot"
2
- import type { LeanDocument } from "mongoose"
2
+ import type { FlattenMaps } from "mongoose"
3
3
  import DatasetModel from "../../models/dataset"
4
4
  import MetadataModel from "../../models/metadata"
5
5
  import type { MetadataDocument } from "../../models/metadata"
@@ -15,7 +15,7 @@ export const metadata = async (
15
15
  dataset,
16
16
  _,
17
17
  context,
18
- ): Promise<LeanDocument<MetadataDocument>> => {
18
+ ): Promise<FlattenMaps<MetadataDocument>> => {
19
19
  const record = await MetadataModel.findOne({
20
20
  datasetId: dataset.id,
21
21
  }).lean()
@@ -73,11 +73,11 @@ export const addMetadata = async (obj, { datasetId, metadata }) => {
73
73
  */
74
74
  export async function publicMetadata(
75
75
  _obj,
76
- ): Promise<LeanDocument<MetadataDocument>[]> {
76
+ ): Promise<FlattenMaps<MetadataDocument>[]> {
77
77
  const datasets = await DatasetModel.find({
78
78
  public: true,
79
79
  }).lean()
80
- const dsMetadata: LeanDocument<MetadataDocument>[] = []
80
+ const dsMetadata: FlattenMaps<MetadataDocument>[] = []
81
81
  for (const ds of datasets) {
82
82
  dsMetadata.push(await metadata(ds, null, {}))
83
83
  }
@@ -18,7 +18,7 @@ export async function permissions(ds): Promise<DatasetPermission> {
18
18
  (userPermission) => ({
19
19
  ...userPermission.toJSON(),
20
20
  user: user(ds, { id: userPermission.userId }),
21
- } as PermissionDocument & { user: Promise<UserDocument> }),
21
+ } as unknown as PermissionDocument & { user: Promise<UserDocument> }),
22
22
  ),
23
23
  }
24
24
  }
@@ -29,7 +29,7 @@ export const users = (obj, args, { userInfo }) => {
29
29
 
30
30
  export const removeUser = (obj, { id }, { userInfo }) => {
31
31
  if (userInfo.admin) {
32
- return User.findByIdAndRemove(id).exec()
32
+ return User.findByIdAndDelete(id).exec()
33
33
  } else {
34
34
  return Promise.reject(new Error("You must be a site admin to remove users"))
35
35
  }
@@ -22,7 +22,15 @@ export const validation = async (dataset, _, { userInfo }) => {
22
22
  { userInfo },
23
23
  )
24
24
  }
25
- return data
25
+ // Return with errors and warning counts appended
26
+ return {
27
+ ...data.toObject(),
28
+ errors: data.issues.filter((issue) =>
29
+ issue.severity === "error"
30
+ ).length,
31
+ warnings:
32
+ data.issues.filter((issue) => issue.severity === "warning").length,
33
+ }
26
34
  })
27
35
  }
28
36
 
@@ -31,11 +39,18 @@ export const validation = async (dataset, _, { userInfo }) => {
31
39
  */
32
40
  export const snapshotValidation = async (snapshot) => {
33
41
  const datasetId = snapshot.id.split(":")[0]
34
- return Validation.findOne({
42
+ const validation = await Validation.findOne({
35
43
  id: snapshot.hexsha,
36
44
  datasetId,
37
- })
38
- .exec()
45
+ }).exec()
46
+ // Return with errors and warning counts appended
47
+ return {
48
+ ...validation.toObject(),
49
+ errors:
50
+ validation.issues.filter((issue) => issue.severity === "error").length,
51
+ warnings:
52
+ validation.issues.filter((issue) => issue.severity === "warning").length,
53
+ }
39
54
  }
40
55
 
41
56
  export function validationSeveritySort(a, b) {
@@ -410,6 +410,8 @@ export const typeDefs = `
410
410
  worker: String
411
411
  # Anonymous reviewers for this dataset
412
412
  reviewers: [DatasetReviewer]
413
+ # Dataset belongs to Brain Initiative
414
+ brainInitiative: Boolean
413
415
  }
414
416
 
415
417
  type DatasetDerivatives {
@@ -475,8 +477,13 @@ export const typeDefs = `
475
477
  # Hash of the data validated
476
478
  id: String
477
479
  datasetId: String
480
+ # Issue objects returned by BIDS validator
478
481
  issues: [ValidatorIssue]
479
482
  codeMessages: [ValidatorCodeMessage]
483
+ # Count of errors
484
+ errors: Int
485
+ # Count of warnings
486
+ warnings: Int
480
487
  }
481
488
 
482
489
  # Ephemeral draft or working tree for a dataset
@@ -0,0 +1,22 @@
1
+ /**
2
+ * Migration of accounts from previous providers (Google) to ORCID as primary authentication
3
+ *
4
+ * Runs on successful oauth linking these accounts
5
+ *
6
+ * Either Google -> ORCID (creates / updates ORCID account with the Google account and merges any data)
7
+ * or ORCID + Google (merges Google account data into existing account)
8
+ */
9
+
10
+ /**
11
+ *
12
+ */
13
+ function migrationOrcidToGoogle() {
14
+
15
+ }
16
+
17
+ /**
18
+ *
19
+ */
20
+ function migrationGoogleToOrcid() {
21
+
22
+ }
@@ -3,6 +3,7 @@ import type { Document } from "mongoose"
3
3
  const { Schema, model } = mongoose
4
4
 
5
5
  export interface SubscriptionDocument extends Document {
6
+ _id: string
6
7
  datasetId: string
7
8
  userId: string
8
9
  }
@@ -4,6 +4,7 @@ import type { Document } from "mongoose"
4
4
  const { Schema, model } = mongoose
5
5
 
6
6
  export interface UserDocument extends Document {
7
+ _id: string
7
8
  id: string
8
9
  email: string
9
10
  name: string
package/tsconfig.json CHANGED
@@ -9,9 +9,8 @@
9
9
  "module": "CommonJS"
10
10
  },
11
11
  "include": ["./src"],
12
- "files": ["./src/lerna.json"],
12
+ "files": ["./src/lerna.json", "./src/data/funded_awards.json"],
13
13
  "references": [
14
- { "path": "../openneuro-client" },
15
14
  { "path": "../openneuro-search" }
16
15
  ]
17
16
  }