@openneuro/server 4.20.5 → 4.20.6-alpha.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +4 -6
- package/src/__mocks__/{config.js → config.ts} +5 -5
- package/src/app.ts +32 -31
- package/src/cache/item.ts +6 -7
- package/src/cache/types.ts +8 -8
- package/src/{config.js → config.ts} +6 -6
- package/src/datalad/__tests__/changelog.spec.ts +83 -0
- package/src/datalad/__tests__/dataset.spec.ts +109 -0
- package/src/datalad/__tests__/description.spec.ts +141 -0
- package/src/datalad/__tests__/files.spec.ts +77 -0
- package/src/datalad/__tests__/pagination.spec.ts +136 -0
- package/src/datalad/__tests__/{snapshots.spec.js → snapshots.spec.ts} +17 -17
- package/src/datalad/{analytics.js → analytics.ts} +4 -4
- package/src/datalad/{changelog.js → changelog.ts} +17 -14
- package/src/datalad/{dataset.js → dataset.ts} +95 -93
- package/src/datalad/{description.js → description.ts} +37 -37
- package/src/datalad/draft.ts +38 -0
- package/src/datalad/files.ts +26 -20
- package/src/datalad/{pagination.js → pagination.ts} +47 -47
- package/src/datalad/{readme.js → readme.ts} +13 -11
- package/src/datalad/{reexporter.js → reexporter.ts} +4 -4
- package/src/datalad/{snapshots.js → snapshots.ts} +56 -62
- package/src/datalad/{upload.js → upload.ts} +7 -5
- package/src/elasticsearch/elastic-client.ts +11 -0
- package/src/elasticsearch/reindex-dataset.ts +7 -7
- package/src/graphql/__tests__/__snapshots__/permissions.spec.ts.snap +5 -0
- package/src/graphql/__tests__/{comment.spec.js → comment.spec.ts} +17 -17
- package/src/graphql/__tests__/permissions.spec.ts +113 -0
- package/src/graphql/{permissions.js → permissions.ts} +14 -14
- package/src/graphql/resolvers/__tests__/brainlife.spec.ts +11 -11
- package/src/graphql/resolvers/__tests__/{dataset-search.spec.js → dataset-search.spec.ts} +25 -23
- package/src/graphql/resolvers/__tests__/dataset.spec.ts +175 -0
- package/src/graphql/resolvers/__tests__/derivatives.spec.ts +19 -19
- package/src/graphql/resolvers/__tests__/importRemoteDataset.spec.ts +20 -20
- package/src/graphql/resolvers/__tests__/permssions.spec.ts +35 -0
- package/src/graphql/resolvers/__tests__/snapshots.spec.ts +59 -0
- package/src/graphql/resolvers/__tests__/user.spec.ts +18 -0
- package/src/graphql/resolvers/brainlife.ts +4 -4
- package/src/graphql/resolvers/cache.ts +4 -4
- package/src/graphql/resolvers/{comment.js → comment.ts} +16 -16
- package/src/graphql/resolvers/{dataset-search.js → dataset-search.ts} +45 -43
- package/src/graphql/resolvers/{dataset.js → dataset.ts} +38 -52
- package/src/graphql/resolvers/datasetType.ts +3 -3
- package/src/graphql/resolvers/derivatives.ts +11 -11
- package/src/graphql/resolvers/description.ts +18 -0
- package/src/graphql/resolvers/{draft.js → draft.ts} +13 -13
- package/src/graphql/resolvers/{flaggedFiles.js → flaggedFiles.ts} +4 -4
- package/src/graphql/resolvers/{follow.js → follow.ts} +1 -1
- package/src/graphql/resolvers/git.ts +3 -3
- package/src/graphql/resolvers/history.ts +13 -0
- package/src/graphql/resolvers/importRemoteDataset.ts +12 -11
- package/src/graphql/resolvers/index.ts +25 -0
- package/src/graphql/resolvers/{issues.js → issues.ts} +9 -9
- package/src/graphql/resolvers/metadata.ts +8 -8
- package/src/graphql/resolvers/{mutation.js → mutation.ts} +26 -26
- package/src/graphql/resolvers/{newsletter.js → newsletter.ts} +2 -2
- package/src/graphql/resolvers/permissions.ts +15 -21
- package/src/graphql/resolvers/publish.ts +17 -0
- package/src/graphql/resolvers/query.ts +21 -0
- package/src/graphql/resolvers/{readme.js → readme.ts} +3 -3
- package/src/graphql/resolvers/{reexporter.js → reexporter.ts} +2 -2
- package/src/graphql/resolvers/relation.ts +5 -5
- package/src/graphql/resolvers/{reset.js → reset.ts} +2 -2
- package/src/graphql/resolvers/reviewer.ts +4 -4
- package/src/graphql/resolvers/{snapshots.js → snapshots.ts} +49 -49
- package/src/graphql/resolvers/{stars.js → stars.ts} +1 -1
- package/src/graphql/resolvers/summary.ts +3 -3
- package/src/graphql/resolvers/{upload.js → upload.ts} +5 -5
- package/src/graphql/resolvers/{user.js → user.ts} +16 -18
- package/src/graphql/resolvers/{validation.js → validation.ts} +12 -14
- package/src/graphql/{schema.js → schema.ts} +4 -6
- package/src/graphql/utils/{file.js → file.ts} +2 -2
- package/src/handlers/{comments.js → comments.ts} +11 -11
- package/src/handlers/{config.js → config.ts} +1 -1
- package/src/handlers/{datalad.js → datalad.ts} +22 -22
- package/src/handlers/{doi.js → doi.ts} +6 -6
- package/src/handlers/reviewer.ts +6 -6
- package/src/handlers/{sitemap.js → sitemap.ts} +19 -19
- package/src/handlers/stars.ts +11 -10
- package/src/handlers/{subscriptions.js → subscriptions.ts} +17 -16
- package/src/handlers/{users.js → users.ts} +3 -3
- package/src/libs/__tests__/apikey.spec.ts +25 -0
- package/src/libs/__tests__/datalad-service.spec.ts +27 -0
- package/src/libs/__tests__/{dataset.spec.js → dataset.spec.ts} +9 -9
- package/src/libs/{apikey.js → apikey.ts} +5 -5
- package/src/libs/authentication/__tests__/jwt.spec.ts +59 -0
- package/src/libs/authentication/{crypto.js → crypto.ts} +16 -16
- package/src/libs/authentication/google.ts +18 -0
- package/src/libs/authentication/jwt.ts +40 -33
- package/src/libs/authentication/{orcid.js → orcid.ts} +11 -11
- package/src/libs/authentication/{passport.js → passport.ts} +45 -30
- package/src/libs/authentication/{states.js → states.ts} +17 -20
- package/src/libs/{counter.js → counter.ts} +1 -1
- package/src/libs/{datalad-service.js → datalad-service.ts} +4 -4
- package/src/libs/dataset.ts +9 -0
- package/src/libs/doi/__tests__/__snapshots__/doi.spec.ts.snap +17 -0
- package/src/libs/doi/__tests__/doi.spec.ts +25 -0
- package/src/libs/doi/__tests__/normalize.spec.ts +19 -19
- package/src/libs/doi/{index.js → index.ts} +27 -21
- package/src/libs/doi/normalize.ts +2 -2
- package/src/libs/email/__tests__/index.spec.ts +14 -14
- package/src/libs/email/index.ts +4 -4
- package/src/libs/email/templates/__tests__/comment-created.spec.ts +12 -12
- package/src/libs/email/templates/__tests__/dataset-deleted.spec.ts +6 -6
- package/src/libs/email/templates/__tests__/owner-unsubscribed.spec.ts +6 -6
- package/src/libs/email/templates/__tests__/snapshot-created.spec.ts +9 -9
- package/src/libs/email/templates/__tests__/snapshot-reminder.spec.ts +7 -7
- package/src/libs/email/templates/comment-created.ts +2 -1
- package/src/libs/email/templates/dataset-deleted.ts +2 -1
- package/src/libs/email/templates/dataset-import-failed.ts +2 -1
- package/src/libs/email/templates/dataset-imported.ts +2 -1
- package/src/libs/email/templates/owner-unsubscribed.ts +2 -1
- package/src/libs/email/templates/snapshot-created.ts +2 -1
- package/src/libs/email/templates/snapshot-reminder.ts +2 -1
- package/src/libs/{notifications.js → notifications.ts} +100 -113
- package/src/libs/{orcid.js → orcid.ts} +20 -20
- package/src/libs/{redis.js → redis.ts} +6 -6
- package/src/models/__tests__/ingestDataset.spec.ts +15 -15
- package/src/models/analytics.ts +2 -2
- package/src/models/badAnnexObject.ts +6 -6
- package/src/models/comment.ts +10 -10
- package/src/models/counter.ts +2 -2
- package/src/models/dataset.ts +16 -16
- package/src/models/deletion.ts +3 -3
- package/src/models/deprecatedSnapshot.ts +2 -2
- package/src/models/doi.ts +2 -2
- package/src/models/file.ts +2 -2
- package/src/models/ingestDataset.ts +4 -4
- package/src/models/issue.ts +2 -2
- package/src/models/key.ts +2 -2
- package/src/models/mailgunIdentifier.ts +2 -2
- package/src/models/metadata.ts +3 -3
- package/src/models/newsletter.ts +3 -3
- package/src/models/notification.ts +2 -2
- package/src/models/permission.ts +4 -4
- package/src/models/reviewer.ts +7 -7
- package/src/models/snapshot.ts +2 -2
- package/src/models/stars.ts +6 -6
- package/src/models/subscription.ts +2 -2
- package/src/models/summary.ts +2 -2
- package/src/models/upload.ts +3 -3
- package/src/models/user.ts +4 -4
- package/src/{routes.js → routes.ts} +62 -62
- package/src/server.ts +9 -9
- package/src/utils/__tests__/datasetOrSnapshot.spec.ts +25 -25
- package/src/utils/__tests__/validateUrl.spec.ts +10 -10
- package/src/utils/datasetOrSnapshot.ts +2 -2
- package/src/utils/validateUrl.ts +1 -1
- package/src/datalad/__tests__/changelog.spec.js +0 -82
- package/src/datalad/__tests__/dataset.spec.js +0 -109
- package/src/datalad/__tests__/description.spec.js +0 -137
- package/src/datalad/__tests__/files.spec.js +0 -75
- package/src/datalad/__tests__/pagination.spec.js +0 -136
- package/src/datalad/draft.js +0 -37
- package/src/elasticsearch/elastic-client.js +0 -11
- package/src/graphql/__tests__/permissions.spec.js +0 -107
- package/src/graphql/pubsub.js +0 -5
- package/src/graphql/resolvers/__tests__/dataset.spec.js +0 -175
- package/src/graphql/resolvers/__tests__/permssions.spec.js +0 -34
- package/src/graphql/resolvers/__tests__/snapshots.spec.js +0 -58
- package/src/graphql/resolvers/__tests__/user.spec.js +0 -17
- package/src/graphql/resolvers/description.js +0 -29
- package/src/graphql/resolvers/history.js +0 -11
- package/src/graphql/resolvers/index.js +0 -25
- package/src/graphql/resolvers/publish.js +0 -17
- package/src/graphql/resolvers/query.js +0 -21
- package/src/graphql/resolvers/subscriptions.js +0 -81
- package/src/graphql/utils/publish-draft-update.js +0 -13
- package/src/libs/__tests__/apikey.spec.js +0 -24
- package/src/libs/__tests__/datalad-service.spec.js +0 -26
- package/src/libs/authentication/__tests__/jwt.spec.js +0 -23
- package/src/libs/authentication/globus.js +0 -11
- package/src/libs/authentication/google.js +0 -19
- package/src/libs/bidsId.js +0 -68
- package/src/libs/dataset.js +0 -9
- package/src/libs/doi/__tests__/doi.spec.js +0 -24
- package/src/libs/redis-pubsub.js +0 -5
- package/src/libs/request.js +0 -155
- package/src/libs/scitran.js +0 -25
- package/src/libs/subscription-server.js +0 -20
- package/src/libs/testing-utils.js +0 -17
- package/src/persistent/datasets/.gitignore +0 -3
- package/src/persistent/temp/.gitignore +0 -3
- /package/src/libs/__mocks__/{notifications.js → notifications.ts} +0 -0
- /package/src/libs/authentication/{verifyUser.js → verifyUser.ts} +0 -0
package/src/datalad/files.ts
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
|
-
import request from
|
|
2
|
-
import { redis } from
|
|
3
|
-
import CacheItem, { CacheType } from
|
|
4
|
-
import { getDatasetWorker } from
|
|
1
|
+
import request from "superagent"
|
|
2
|
+
import { redis } from "../libs/redis"
|
|
3
|
+
import CacheItem, { CacheType } from "../cache/item"
|
|
4
|
+
import { getDatasetWorker } from "../libs/datalad-service"
|
|
5
5
|
|
|
6
6
|
/**
|
|
7
7
|
* Convert to URL compatible path
|
|
8
8
|
* @param {String} path
|
|
9
9
|
*/
|
|
10
10
|
export const encodeFilePath = (path: string): string => {
|
|
11
|
-
return path.replace(new RegExp(
|
|
11
|
+
return path.replace(new RegExp("/", "g"), ":")
|
|
12
12
|
}
|
|
13
13
|
|
|
14
14
|
/**
|
|
@@ -16,7 +16,7 @@ export const encodeFilePath = (path: string): string => {
|
|
|
16
16
|
* @param {String} path
|
|
17
17
|
*/
|
|
18
18
|
export const decodeFilePath = (path: string): string => {
|
|
19
|
-
return path.replace(new RegExp(
|
|
19
|
+
return path.replace(new RegExp(":", "g"), "/")
|
|
20
20
|
}
|
|
21
21
|
|
|
22
22
|
/**
|
|
@@ -25,7 +25,7 @@ export const decodeFilePath = (path: string): string => {
|
|
|
25
25
|
* @param {String} filename
|
|
26
26
|
*/
|
|
27
27
|
export const getFileName = (path: string, filename: string): string => {
|
|
28
|
-
const filePath = path ? [path, filename].join(
|
|
28
|
+
const filePath = path ? [path, filename].join("/") : filename
|
|
29
29
|
return filename ? encodeFilePath(filePath) : encodeFilePath(path)
|
|
30
30
|
}
|
|
31
31
|
|
|
@@ -44,13 +44,17 @@ export const fileUrl = (
|
|
|
44
44
|
): string => {
|
|
45
45
|
const fileName = getFileName(path, filename)
|
|
46
46
|
if (revision) {
|
|
47
|
-
return `http://${
|
|
48
|
-
|
|
49
|
-
|
|
47
|
+
return `http://${
|
|
48
|
+
getDatasetWorker(
|
|
49
|
+
datasetId,
|
|
50
|
+
)
|
|
51
|
+
}/datasets/${datasetId}/snapshots/${revision}/files/${fileName}`
|
|
50
52
|
} else {
|
|
51
|
-
return `http://${
|
|
52
|
-
|
|
53
|
-
|
|
53
|
+
return `http://${
|
|
54
|
+
getDatasetWorker(
|
|
55
|
+
datasetId,
|
|
56
|
+
)
|
|
57
|
+
}/datasets/${datasetId}/files/${fileName}`
|
|
54
58
|
}
|
|
55
59
|
}
|
|
56
60
|
|
|
@@ -88,22 +92,24 @@ export const getFiles = (datasetId, treeish): Promise<[DatasetFile]> => {
|
|
|
88
92
|
treeish.substring(0, 7),
|
|
89
93
|
])
|
|
90
94
|
return cache.get(
|
|
91
|
-
doNotCache =>
|
|
95
|
+
(doNotCache) =>
|
|
92
96
|
request
|
|
93
97
|
.get(
|
|
94
|
-
`${
|
|
95
|
-
|
|
96
|
-
|
|
98
|
+
`${
|
|
99
|
+
getDatasetWorker(
|
|
100
|
+
datasetId,
|
|
101
|
+
)
|
|
102
|
+
}/datasets/${datasetId}/tree/${treeish}`,
|
|
97
103
|
)
|
|
98
|
-
.set(
|
|
99
|
-
.then(response => {
|
|
104
|
+
.set("Accept", "application/json")
|
|
105
|
+
.then((response) => {
|
|
100
106
|
if (response.status === 200) {
|
|
101
107
|
const {
|
|
102
108
|
body: { files },
|
|
103
109
|
} = response
|
|
104
110
|
for (const f of files) {
|
|
105
111
|
// Skip caching this tree if it doesn't contain S3 URLs - likely still exporting
|
|
106
|
-
if (!f.directory && !f.urls[0].includes(
|
|
112
|
+
if (!f.directory && !f.urls[0].includes("s3.amazonaws.com")) {
|
|
107
113
|
doNotCache(true)
|
|
108
114
|
break
|
|
109
115
|
}
|
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
// Helpers for pagination
|
|
2
|
-
import Dataset from
|
|
2
|
+
import Dataset from "../models/dataset"
|
|
3
3
|
|
|
4
|
-
const sortEnumToInt = val => (val ===
|
|
4
|
+
const sortEnumToInt = (val) => (val === "ascending" ? 1 : -1)
|
|
5
5
|
|
|
6
6
|
/**
|
|
7
7
|
* Takes an API sort request and converts it to MongoDB
|
|
8
8
|
* @param {object} sortOptions {created: 'ascending'}
|
|
9
9
|
* @returns {object} Mongo suitable sort arguments {created: 1}
|
|
10
10
|
*/
|
|
11
|
-
export const enumToMongoSort = sortOptions =>
|
|
11
|
+
export const enumToMongoSort = (sortOptions) =>
|
|
12
12
|
Object.keys(sortOptions).reduce((mongoSort, val) => {
|
|
13
13
|
mongoSort[val] = sortEnumToInt(sortOptions[val])
|
|
14
14
|
return mongoSort
|
|
@@ -18,12 +18,12 @@ export const enumToMongoSort = sortOptions =>
|
|
|
18
18
|
* Encode a cursor offset in a mongodb collection
|
|
19
19
|
* @param {object} value cursor fields
|
|
20
20
|
*/
|
|
21
|
-
export const apiCursor = value => {
|
|
22
|
-
return Buffer.from(JSON.stringify(value)).toString(
|
|
21
|
+
export const apiCursor = (value) => {
|
|
22
|
+
return Buffer.from(JSON.stringify(value)).toString("base64")
|
|
23
23
|
}
|
|
24
24
|
|
|
25
|
-
export const decodeCursor = cursor => {
|
|
26
|
-
return JSON.parse(Buffer.from(cursor,
|
|
25
|
+
export const decodeCursor = (cursor) => {
|
|
26
|
+
return JSON.parse(Buffer.from(cursor, "base64").toString())
|
|
27
27
|
}
|
|
28
28
|
|
|
29
29
|
/**
|
|
@@ -39,14 +39,14 @@ export const applyCursorToEdges = (edges, offset) => {
|
|
|
39
39
|
}
|
|
40
40
|
|
|
41
41
|
// Limit to options.first in range 1 <= limit <= 100
|
|
42
|
-
export const maxLimit = limit => Math.max(Math.min(limit, 100), 1)
|
|
42
|
+
export const maxLimit = (limit) => Math.max(Math.min(limit, 100), 1)
|
|
43
43
|
|
|
44
44
|
// Decode cursor from options object
|
|
45
|
-
export const getOffsetFromCursor = options => {
|
|
46
|
-
if (options.hasOwnProperty(
|
|
45
|
+
export const getOffsetFromCursor = (options) => {
|
|
46
|
+
if (options.hasOwnProperty("after") && options.after) {
|
|
47
47
|
return decodeCursor(options.after).offset
|
|
48
48
|
}
|
|
49
|
-
if (options.hasOwnProperty(
|
|
49
|
+
if (options.hasOwnProperty("before") && options.before) {
|
|
50
50
|
return (
|
|
51
51
|
decodeCursor(options.before).offset - Math.max(maxLimit(options.first), 0)
|
|
52
52
|
)
|
|
@@ -60,72 +60,72 @@ export const getOffsetFromCursor = options => {
|
|
|
60
60
|
* @param {object} options Query parameters
|
|
61
61
|
* @returns {array} Steps required to sort any specified fields
|
|
62
62
|
*/
|
|
63
|
-
export const sortAggregate = options => {
|
|
63
|
+
export const sortAggregate = (options) => {
|
|
64
64
|
const sortingStages = []
|
|
65
65
|
const finalSort = {}
|
|
66
|
-
if (options.hasOwnProperty(
|
|
67
|
-
if (
|
|
68
|
-
finalSort[
|
|
66
|
+
if (options.hasOwnProperty("orderBy")) {
|
|
67
|
+
if ("created" in options.orderBy && options.orderBy.created) {
|
|
68
|
+
finalSort["_id"] = sortEnumToInt(options.orderBy.created)
|
|
69
69
|
}
|
|
70
|
-
if (
|
|
71
|
-
finalSort[
|
|
70
|
+
if ("name" in options.orderBy && options.orderBy.name) {
|
|
71
|
+
finalSort["name"] = sortEnumToInt(options.orderBy.name)
|
|
72
72
|
}
|
|
73
|
-
if (
|
|
73
|
+
if ("uploader" in options.orderBy && options.orderBy.uploader) {
|
|
74
74
|
sortingStages.push({
|
|
75
75
|
$lookup: {
|
|
76
|
-
from:
|
|
77
|
-
localField:
|
|
78
|
-
foreignField:
|
|
79
|
-
as:
|
|
76
|
+
from: "users",
|
|
77
|
+
localField: "uploader",
|
|
78
|
+
foreignField: "id",
|
|
79
|
+
as: "uploadUser",
|
|
80
80
|
},
|
|
81
81
|
})
|
|
82
|
-
finalSort[
|
|
82
|
+
finalSort["uploadUser.name"] = sortEnumToInt(options.orderBy.uploader)
|
|
83
83
|
}
|
|
84
|
-
if (
|
|
84
|
+
if ("stars" in options.orderBy && options.orderBy.stars) {
|
|
85
85
|
// Lookup related collection values
|
|
86
86
|
sortingStages.push({
|
|
87
87
|
$lookup: {
|
|
88
|
-
from:
|
|
89
|
-
localField:
|
|
90
|
-
foreignField:
|
|
91
|
-
as:
|
|
88
|
+
from: "stars",
|
|
89
|
+
localField: "id",
|
|
90
|
+
foreignField: "datasetId",
|
|
91
|
+
as: "stars",
|
|
92
92
|
},
|
|
93
93
|
})
|
|
94
94
|
// Count stars
|
|
95
95
|
sortingStages.push({
|
|
96
96
|
$addFields: {
|
|
97
|
-
starsCount: { $size:
|
|
97
|
+
starsCount: { $size: "$stars" },
|
|
98
98
|
},
|
|
99
99
|
})
|
|
100
|
-
finalSort[
|
|
100
|
+
finalSort["starsCount"] = sortEnumToInt(options.orderBy.stars)
|
|
101
101
|
}
|
|
102
|
-
if (
|
|
103
|
-
finalSort[
|
|
102
|
+
if ("downloads" in options.orderBy && options.orderBy.downloads) {
|
|
103
|
+
finalSort["downloads"] = sortEnumToInt(options.orderBy.downloads)
|
|
104
104
|
}
|
|
105
|
-
if (
|
|
106
|
-
finalSort[
|
|
105
|
+
if ("views" in options.orderBy && options.orderBy.views) {
|
|
106
|
+
finalSort["views"] = sortEnumToInt(options.orderBy.views)
|
|
107
107
|
}
|
|
108
|
-
if (
|
|
108
|
+
if ("subscriptions" in options.orderBy && options.orderBy.subscriptions) {
|
|
109
109
|
sortingStages.push({
|
|
110
110
|
$lookup: {
|
|
111
|
-
from:
|
|
112
|
-
localField:
|
|
113
|
-
foreignField:
|
|
114
|
-
as:
|
|
111
|
+
from: "subscriptions",
|
|
112
|
+
localField: "id",
|
|
113
|
+
foreignField: "datasetId",
|
|
114
|
+
as: "subscriptions",
|
|
115
115
|
},
|
|
116
116
|
})
|
|
117
117
|
// Count stars
|
|
118
118
|
sortingStages.push({
|
|
119
119
|
$addFields: {
|
|
120
|
-
subscriptionsCount: { $size:
|
|
120
|
+
subscriptionsCount: { $size: "$subscriptions" },
|
|
121
121
|
},
|
|
122
122
|
})
|
|
123
|
-
finalSort[
|
|
123
|
+
finalSort["subscriptionsCount"] = sortEnumToInt(
|
|
124
124
|
options.orderBy.subscriptions,
|
|
125
125
|
)
|
|
126
126
|
}
|
|
127
|
-
if (
|
|
128
|
-
finalSort[
|
|
127
|
+
if ("publishDate" in options.orderBy && options.orderBy.publishDate) {
|
|
128
|
+
finalSort["publishDate"] = sortEnumToInt(options.orderBy.publishDate)
|
|
129
129
|
}
|
|
130
130
|
sortingStages.push({ $sort: finalSort })
|
|
131
131
|
}
|
|
@@ -137,7 +137,7 @@ export const sortAggregate = options => {
|
|
|
137
137
|
* @param {object} options Query options such as {limit: 5, orderBy: {creation: 'descending'}}
|
|
138
138
|
* @returns {(presortAggregate: array) => object} presortAggregate Any presorting / pagination constraints
|
|
139
139
|
*/
|
|
140
|
-
export const datasetsConnection = options => presortAggregate => {
|
|
140
|
+
export const datasetsConnection = (options) => (presortAggregate) => {
|
|
141
141
|
const offset = getOffsetFromCursor(options)
|
|
142
142
|
const realLimit = maxLimit(options.first)
|
|
143
143
|
// One query for match -> count -> sort -> skip -> limit
|
|
@@ -145,18 +145,18 @@ export const datasetsConnection = options => presortAggregate => {
|
|
|
145
145
|
...presortAggregate,
|
|
146
146
|
...sortAggregate(options),
|
|
147
147
|
{
|
|
148
|
-
$group: { _id: null, count: { $sum: 1 }, datasets: { $push:
|
|
148
|
+
$group: { _id: null, count: { $sum: 1 }, datasets: { $push: "$$ROOT" } },
|
|
149
149
|
},
|
|
150
150
|
{
|
|
151
151
|
$project: {
|
|
152
152
|
count: 1,
|
|
153
|
-
datasets: { $slice: [
|
|
153
|
+
datasets: { $slice: ["$datasets", offset, realLimit] },
|
|
154
154
|
},
|
|
155
155
|
},
|
|
156
156
|
]
|
|
157
157
|
return Dataset.aggregate(pipeline)
|
|
158
158
|
.exec()
|
|
159
|
-
.then(results => {
|
|
159
|
+
.then((results) => {
|
|
160
160
|
const result = results.pop()
|
|
161
161
|
if (result) {
|
|
162
162
|
const { datasets, count } = result
|
|
@@ -1,16 +1,18 @@
|
|
|
1
|
-
import { addFileString, commitFiles } from
|
|
2
|
-
import { redis } from
|
|
3
|
-
import CacheItem, { CacheType } from
|
|
4
|
-
import { getDatasetWorker } from
|
|
5
|
-
import { datasetOrSnapshot } from
|
|
1
|
+
import { addFileString, commitFiles } from "./dataset"
|
|
2
|
+
import { redis } from "../libs/redis"
|
|
3
|
+
import CacheItem, { CacheType } from "../cache/item"
|
|
4
|
+
import { getDatasetWorker } from "../libs/datalad-service"
|
|
5
|
+
import { datasetOrSnapshot } from "../utils/datasetOrSnapshot"
|
|
6
6
|
|
|
7
7
|
export const readmeUrl = (datasetId, revision) => {
|
|
8
|
-
return `http://${
|
|
9
|
-
|
|
10
|
-
|
|
8
|
+
return `http://${
|
|
9
|
+
getDatasetWorker(
|
|
10
|
+
datasetId,
|
|
11
|
+
)
|
|
12
|
+
}/datasets/${datasetId}/snapshots/${revision}/files/README`
|
|
11
13
|
}
|
|
12
14
|
|
|
13
|
-
export const readme = obj => {
|
|
15
|
+
export const readme = (obj) => {
|
|
14
16
|
const { datasetId, revision } = datasetOrSnapshot(obj)
|
|
15
17
|
const cache = new CacheItem(redis, CacheType.readme, [
|
|
16
18
|
datasetId,
|
|
@@ -33,7 +35,7 @@ export const readme = obj => {
|
|
|
33
35
|
}
|
|
34
36
|
|
|
35
37
|
export const setReadme = (datasetId, readme, user) => {
|
|
36
|
-
return addFileString(datasetId,
|
|
37
|
-
commitFiles(datasetId, user)
|
|
38
|
+
return addFileString(datasetId, "README", "text/plain", readme).then(() =>
|
|
39
|
+
commitFiles(datasetId, user)
|
|
38
40
|
)
|
|
39
41
|
}
|
|
@@ -1,13 +1,13 @@
|
|
|
1
|
-
import request from
|
|
2
|
-
import { getDatasetWorker } from
|
|
1
|
+
import request from "superagent"
|
|
2
|
+
import { getDatasetWorker } from "../libs/datalad-service"
|
|
3
3
|
|
|
4
4
|
/**
|
|
5
5
|
* Run remote reexporter.
|
|
6
6
|
*/
|
|
7
|
-
export const runReexporter = datasetId => {
|
|
7
|
+
export const runReexporter = (datasetId) => {
|
|
8
8
|
const worker = getDatasetWorker(datasetId)
|
|
9
9
|
const uri = `${worker}/datasets/${datasetId}/reexport-remotes`
|
|
10
10
|
return request.post(uri)
|
|
11
11
|
}
|
|
12
12
|
|
|
13
|
-
export const CHECK =
|
|
13
|
+
export const CHECK = "hi"
|
|
@@ -1,26 +1,25 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* Get snapshots from datalad-service tags
|
|
3
3
|
*/
|
|
4
|
-
import request from
|
|
5
|
-
import { reindexDataset } from
|
|
6
|
-
import { redis, redlock } from
|
|
7
|
-
import CacheItem, { CacheType } from
|
|
8
|
-
import config from
|
|
9
|
-
import pubsub from '../graphql/pubsub.js'
|
|
4
|
+
import request from "superagent"
|
|
5
|
+
import { reindexDataset } from "../elasticsearch/reindex-dataset"
|
|
6
|
+
import { redis, redlock } from "../libs/redis"
|
|
7
|
+
import CacheItem, { CacheType } from "../cache/item"
|
|
8
|
+
import config from "../config"
|
|
10
9
|
import {
|
|
11
|
-
updateDatasetName,
|
|
12
10
|
snapshotCreationComparison,
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
import
|
|
16
|
-
import
|
|
17
|
-
import {
|
|
18
|
-
import
|
|
19
|
-
import
|
|
20
|
-
import
|
|
21
|
-
import {
|
|
22
|
-
import {
|
|
23
|
-
import {
|
|
11
|
+
updateDatasetName,
|
|
12
|
+
} from "../graphql/resolvers/dataset"
|
|
13
|
+
import { description } from "../graphql/resolvers/description"
|
|
14
|
+
import doiLib from "../libs/doi/index"
|
|
15
|
+
import { getFiles } from "./files"
|
|
16
|
+
import { generateDataladCookie } from "../libs/authentication/jwt"
|
|
17
|
+
import notifications from "../libs/notifications"
|
|
18
|
+
import Dataset from "../models/dataset"
|
|
19
|
+
import Snapshot, { SnapshotDocument } from "../models/snapshot"
|
|
20
|
+
import { updateDatasetRevision } from "./draft"
|
|
21
|
+
import { getDatasetWorker } from "../libs/datalad-service"
|
|
22
|
+
import { join } from "path"
|
|
24
23
|
|
|
25
24
|
const lockSnapshot = (datasetId, tag) => {
|
|
26
25
|
return redlock.lock(
|
|
@@ -53,17 +52,18 @@ const createIfNotExistsDoi = async (
|
|
|
53
52
|
// Mint a DOI
|
|
54
53
|
// Get the newest description
|
|
55
54
|
try {
|
|
56
|
-
const oldDesc = await description({ id: datasetId, revision:
|
|
55
|
+
const oldDesc = await description({ id: datasetId, revision: "HEAD" })
|
|
57
56
|
const snapshotDoi = await doiLib.registerSnapshotDoi(
|
|
58
57
|
datasetId,
|
|
59
58
|
tag,
|
|
60
59
|
oldDesc,
|
|
61
60
|
)
|
|
62
|
-
if (snapshotDoi)
|
|
63
|
-
descriptionFieldUpdates[
|
|
61
|
+
if (snapshotDoi) {
|
|
62
|
+
descriptionFieldUpdates["DatasetDOI"] = `doi:${snapshotDoi}`
|
|
63
|
+
}
|
|
64
64
|
} catch (err) {
|
|
65
65
|
console.error(err)
|
|
66
|
-
throw new Error(
|
|
66
|
+
throw new Error("DOI minting failed.")
|
|
67
67
|
}
|
|
68
68
|
}
|
|
69
69
|
}
|
|
@@ -81,8 +81,8 @@ const postSnapshot = async (
|
|
|
81
81
|
description_fields: descriptionFieldUpdates,
|
|
82
82
|
snapshot_changes: snapshotChanges,
|
|
83
83
|
})
|
|
84
|
-
.set(
|
|
85
|
-
.set(
|
|
84
|
+
.set("Accept", "application/json")
|
|
85
|
+
.set("Cookie", generateDataladCookie(config)(user))
|
|
86
86
|
|
|
87
87
|
return response.body
|
|
88
88
|
}
|
|
@@ -95,11 +95,11 @@ const postSnapshot = async (
|
|
|
95
95
|
* @param {string} datasetId Dataset accession number
|
|
96
96
|
* @returns {Promise<import('../models/snapshot').SnapshotDocument[]>}
|
|
97
97
|
*/
|
|
98
|
-
export const getSnapshots = datasetId => {
|
|
98
|
+
export const getSnapshots = (datasetId): Promise<SnapshotDocument[]> => {
|
|
99
99
|
const url = `${getDatasetWorker(datasetId)}/datasets/${datasetId}/snapshots`
|
|
100
100
|
return request
|
|
101
101
|
.get(url)
|
|
102
|
-
.set(
|
|
102
|
+
.set("Accept", "application/json")
|
|
103
103
|
.then(({ body: { snapshots } }) => {
|
|
104
104
|
return snapshots.sort(snapshotCreationComparison)
|
|
105
105
|
})
|
|
@@ -109,14 +109,6 @@ const announceNewSnapshot = async (snapshot, datasetId, user) => {
|
|
|
109
109
|
if (snapshot.files) {
|
|
110
110
|
notifications.snapshotCreated(datasetId, snapshot, user) // send snapshot notification to subscribers
|
|
111
111
|
}
|
|
112
|
-
pubsub.publish('snapshotsUpdated', {
|
|
113
|
-
datasetId,
|
|
114
|
-
snapshotsUpdated: {
|
|
115
|
-
id: datasetId,
|
|
116
|
-
snapshots: await getSnapshots(datasetId),
|
|
117
|
-
latestSnapshot: snapshot,
|
|
118
|
-
},
|
|
119
|
-
})
|
|
120
112
|
}
|
|
121
113
|
|
|
122
114
|
/**
|
|
@@ -146,9 +138,11 @@ export const createSnapshot = async (
|
|
|
146
138
|
try {
|
|
147
139
|
await createIfNotExistsDoi(datasetId, tag, descriptionFieldUpdates)
|
|
148
140
|
|
|
149
|
-
const createSnapshotUrl = `${
|
|
150
|
-
|
|
151
|
-
|
|
141
|
+
const createSnapshotUrl = `${
|
|
142
|
+
getDatasetWorker(
|
|
143
|
+
datasetId,
|
|
144
|
+
)
|
|
145
|
+
}/datasets/${datasetId}/snapshots/${tag}`
|
|
152
146
|
const snapshot = await postSnapshot(
|
|
153
147
|
user,
|
|
154
148
|
createSnapshotUrl,
|
|
@@ -184,22 +178,17 @@ export const createSnapshot = async (
|
|
|
184
178
|
}
|
|
185
179
|
|
|
186
180
|
export const deleteSnapshot = (datasetId, tag) => {
|
|
187
|
-
const url = `${
|
|
188
|
-
|
|
189
|
-
|
|
181
|
+
const url = `${
|
|
182
|
+
getDatasetWorker(
|
|
183
|
+
datasetId,
|
|
184
|
+
)
|
|
185
|
+
}/datasets/${datasetId}/snapshots/${tag}`
|
|
190
186
|
return request.del(url).then(async ({ body }) => {
|
|
191
187
|
const snapshotCache = new CacheItem(redis, CacheType.snapshot, [
|
|
192
188
|
datasetId,
|
|
193
189
|
tag,
|
|
194
190
|
])
|
|
195
191
|
await snapshotCache.drop()
|
|
196
|
-
pubsub.publish('snapshotsUpdated', {
|
|
197
|
-
datasetId,
|
|
198
|
-
snapshotsUpdated: {
|
|
199
|
-
id: datasetId,
|
|
200
|
-
snapshots: await getSnapshots(datasetId),
|
|
201
|
-
},
|
|
202
|
-
})
|
|
203
192
|
return body
|
|
204
193
|
})
|
|
205
194
|
}
|
|
@@ -210,16 +199,21 @@ export const deleteSnapshot = (datasetId, tag) => {
|
|
|
210
199
|
* @param {string} commitRef Tag name to retrieve
|
|
211
200
|
* @returns {Promise<import('../models/snapshot').SnapshotDocument>}
|
|
212
201
|
*/
|
|
213
|
-
export const getSnapshot = (
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
202
|
+
export const getSnapshot = (
|
|
203
|
+
datasetId,
|
|
204
|
+
commitRef,
|
|
205
|
+
): Promise<SnapshotDocument> => {
|
|
206
|
+
const url = `${
|
|
207
|
+
getDatasetWorker(
|
|
208
|
+
datasetId,
|
|
209
|
+
)
|
|
210
|
+
}/datasets/${datasetId}/snapshots/${commitRef}`
|
|
217
211
|
const cache = new CacheItem(redis, CacheType.snapshot, [datasetId, commitRef])
|
|
218
212
|
return cache.get(() =>
|
|
219
213
|
request
|
|
220
214
|
.get(url)
|
|
221
|
-
.set(
|
|
222
|
-
.then(({ body }) => body)
|
|
215
|
+
.set("Accept", "application/json")
|
|
216
|
+
.then(({ body }) => body)
|
|
223
217
|
)
|
|
224
218
|
}
|
|
225
219
|
|
|
@@ -234,7 +228,7 @@ export const getSnapshot = (datasetId, commitRef) => {
|
|
|
234
228
|
export const getSnapshotHexsha = (datasetId, tag) => {
|
|
235
229
|
return Snapshot.findOne({ datasetId, tag }, { hexsha: true })
|
|
236
230
|
.exec()
|
|
237
|
-
.then(result => (result ? result.hexsha : null))
|
|
231
|
+
.then((result) => (result ? result.hexsha : null))
|
|
238
232
|
}
|
|
239
233
|
|
|
240
234
|
/**
|
|
@@ -244,22 +238,22 @@ export const getSnapshotHexsha = (datasetId, tag) => {
|
|
|
244
238
|
*/
|
|
245
239
|
export const getPublicSnapshots = () => {
|
|
246
240
|
// query all publicly available dataset
|
|
247
|
-
return Dataset.find({ public: true },
|
|
241
|
+
return Dataset.find({ public: true }, "id")
|
|
248
242
|
.exec()
|
|
249
|
-
.then(datasets => {
|
|
250
|
-
const datasetIds = datasets.map(dataset => dataset.id)
|
|
243
|
+
.then((datasets) => {
|
|
244
|
+
const datasetIds = datasets.map((dataset) => dataset.id)
|
|
251
245
|
return Snapshot.aggregate([
|
|
252
246
|
{ $match: { datasetId: { $in: datasetIds } } },
|
|
253
247
|
{ $sort: { created: -1 } },
|
|
254
248
|
{
|
|
255
249
|
$group: {
|
|
256
|
-
_id:
|
|
257
|
-
snapshots: { $push:
|
|
250
|
+
_id: "$datasetId",
|
|
251
|
+
snapshots: { $push: "$$ROOT" },
|
|
258
252
|
},
|
|
259
253
|
},
|
|
260
254
|
{
|
|
261
255
|
$replaceRoot: {
|
|
262
|
-
newRoot: { $arrayElemAt: [
|
|
256
|
+
newRoot: { $arrayElemAt: ["$snapshots", 0] },
|
|
263
257
|
},
|
|
264
258
|
},
|
|
265
259
|
]).exec()
|
|
@@ -277,13 +271,13 @@ export const downloadFiles = (datasetId, tag) => {
|
|
|
277
271
|
// Return an existing cache object if we have one
|
|
278
272
|
return downloadCache.get(async () => {
|
|
279
273
|
// If not, fetch all trees sequentially and cache the result (hopefully some or all trees are cached)
|
|
280
|
-
const files = await getFilesRecursive(datasetId, tag,
|
|
274
|
+
const files = await getFilesRecursive(datasetId, tag, "")
|
|
281
275
|
files.sort()
|
|
282
276
|
return files
|
|
283
277
|
})
|
|
284
278
|
}
|
|
285
279
|
|
|
286
|
-
export async function getFilesRecursive(datasetId, tree, path =
|
|
280
|
+
export async function getFilesRecursive(datasetId, tree, path = "") {
|
|
287
281
|
const files = []
|
|
288
282
|
// Fetch files
|
|
289
283
|
const fileTree = await getFiles(datasetId, tree)
|
|
@@ -1,9 +1,11 @@
|
|
|
1
|
-
import { getDatasetWorker } from
|
|
1
|
+
import { getDatasetWorker } from "../libs/datalad-service"
|
|
2
2
|
|
|
3
3
|
export const uploadUrl = (datasetId, uploadId) => {
|
|
4
|
-
return `http://${
|
|
5
|
-
|
|
6
|
-
|
|
4
|
+
return `http://${
|
|
5
|
+
getDatasetWorker(
|
|
6
|
+
datasetId,
|
|
7
|
+
)
|
|
8
|
+
}/datasets/${datasetId}/upload/${uploadId}`
|
|
7
9
|
}
|
|
8
10
|
|
|
9
11
|
/**
|
|
@@ -19,7 +21,7 @@ export const finishUploadRequest = async (
|
|
|
19
21
|
forwardToken,
|
|
20
22
|
) => {
|
|
21
23
|
const response = await fetch(uploadUrl(datasetId, uploadId), {
|
|
22
|
-
method:
|
|
24
|
+
method: "POST",
|
|
23
25
|
body: JSON.stringify({}),
|
|
24
26
|
headers: {
|
|
25
27
|
cookie: `accessToken=${forwardToken}`,
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import config from "../config"
|
|
2
|
+
import { Client } from "@elastic/elasticsearch"
|
|
3
|
+
|
|
4
|
+
const elasticConfig = {
|
|
5
|
+
node: config.elasticsearch.connection || "http://mock-client",
|
|
6
|
+
maxRetries: 3,
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
export const elasticClient = new Client(elasticConfig)
|
|
10
|
+
|
|
11
|
+
export default elasticClient
|
|
@@ -1,14 +1,14 @@
|
|
|
1
|
-
import config from
|
|
2
|
-
import { indexDataset,
|
|
3
|
-
import { elasticClient } from
|
|
1
|
+
import config from "../config"
|
|
2
|
+
import { indexDataset, indexingToken, queryForIndex } from "@openneuro/search"
|
|
3
|
+
import { elasticClient } from "./elastic-client"
|
|
4
4
|
import {
|
|
5
|
-
from,
|
|
6
5
|
ApolloClient,
|
|
6
|
+
from,
|
|
7
7
|
InMemoryCache,
|
|
8
8
|
NormalizedCacheObject,
|
|
9
|
-
} from
|
|
10
|
-
import { setContext } from
|
|
11
|
-
import { HttpLink } from
|
|
9
|
+
} from "@apollo/client"
|
|
10
|
+
import { setContext } from "@apollo/client/link/context"
|
|
11
|
+
import { HttpLink } from "@apollo/client/link/http"
|
|
12
12
|
|
|
13
13
|
/**
|
|
14
14
|
* Setup SchemaLink based client for querying
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
|
|
2
|
+
|
|
3
|
+
exports[`resolver permissions helpers > checkDatasetAdmin() > resolves to false for anonymous users 1`] = `"You do not have admin access to this dataset."`;
|
|
4
|
+
|
|
5
|
+
exports[`resolver permissions helpers > checkDatasetWrite() > resolves to false for anonymous users 1`] = `"You do not have access to modify this dataset."`;
|