@openneuro/server 4.36.2 → 4.37.0-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +8 -6
- package/src/__mocks__/config.ts +4 -0
- package/src/app.ts +1 -0
- package/src/cache/types.ts +2 -1
- package/src/datalad/snapshots.ts +2 -2
- package/src/graphql/resolvers/validation.ts +41 -29
- package/src/libs/authentication/orcid.ts +38 -33
- package/src/libs/authentication/passport.ts +2 -3
- package/src/libs/authentication/user-migration.ts +8 -0
- package/src/libs/orcid.ts +100 -90
- package/src/libs/redis.ts +3 -24
- package/src/queues/consumer.ts +18 -0
- package/src/queues/producer-methods.ts +18 -0
- package/src/queues/queues.ts +34 -0
- package/src/queues/setup.ts +29 -0
- package/src/server.ts +8 -13
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@openneuro/server",
|
|
3
|
-
"version": "4.
|
|
3
|
+
"version": "4.37.0-alpha.0",
|
|
4
4
|
"description": "Core service for the OpenNeuro platform.",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"main": "src/server.js",
|
|
@@ -20,8 +20,8 @@
|
|
|
20
20
|
"@apollo/utils.keyvadapter": "3.0.0",
|
|
21
21
|
"@elastic/elasticsearch": "8.13.1",
|
|
22
22
|
"@graphql-tools/schema": "^10.0.0",
|
|
23
|
-
"@keyv/redis": "^
|
|
24
|
-
"@openneuro/search": "^4.
|
|
23
|
+
"@keyv/redis": "^4.5.0",
|
|
24
|
+
"@openneuro/search": "^4.37.0-alpha.0",
|
|
25
25
|
"@sentry/node": "^8.25.0",
|
|
26
26
|
"@sentry/profiling-node": "^8.25.0",
|
|
27
27
|
"base64url": "^3.0.0",
|
|
@@ -37,10 +37,10 @@
|
|
|
37
37
|
"graphql-iso-date": "^3.6.1",
|
|
38
38
|
"graphql-tools": "9.0.0",
|
|
39
39
|
"immutable": "^3.8.2",
|
|
40
|
-
"ioredis": "
|
|
40
|
+
"ioredis": "^5.6.1",
|
|
41
41
|
"jsdom": "24.0.0",
|
|
42
42
|
"jsonwebtoken": "^9.0.0",
|
|
43
|
-
"keyv": "^
|
|
43
|
+
"keyv": "^5.3.4",
|
|
44
44
|
"mime-types": "^2.1.19",
|
|
45
45
|
"mongodb-memory-server": "^9.2.0",
|
|
46
46
|
"mongoose": "^8.9.5",
|
|
@@ -55,6 +55,8 @@
|
|
|
55
55
|
"passport-orcid": "0.0.4",
|
|
56
56
|
"react": "^18.2.0",
|
|
57
57
|
"react-dom": "^18.2.0",
|
|
58
|
+
"redis-smq": "^8.3.1",
|
|
59
|
+
"redis-smq-common": "^8.3.1",
|
|
58
60
|
"redlock": "^4.0.0",
|
|
59
61
|
"request": "^2.83.0",
|
|
60
62
|
"semver": "^5.5.0",
|
|
@@ -86,5 +88,5 @@
|
|
|
86
88
|
"publishConfig": {
|
|
87
89
|
"access": "public"
|
|
88
90
|
},
|
|
89
|
-
"gitHead": "
|
|
91
|
+
"gitHead": "01cd2b234a23c76a234dfb2c2839857041da777b"
|
|
90
92
|
}
|
package/src/__mocks__/config.ts
CHANGED
package/src/app.ts
CHANGED
|
@@ -65,6 +65,7 @@ export async function expressApolloSetup() {
|
|
|
65
65
|
schema,
|
|
66
66
|
// Always allow introspection - our schema is public
|
|
67
67
|
introspection: true,
|
|
68
|
+
// @ts-expect-error Type mismatch for keyv and ioredis recent releases
|
|
68
69
|
cache: new KeyvAdapter(new Keyv({ store: new KeyvRedis(redis) })),
|
|
69
70
|
plugins: [
|
|
70
71
|
ApolloServerPluginLandingPageLocalDefault(),
|
package/src/cache/types.ts
CHANGED
package/src/datalad/snapshots.ts
CHANGED
|
@@ -3,7 +3,6 @@
|
|
|
3
3
|
*/
|
|
4
4
|
import * as Sentry from "@sentry/node"
|
|
5
5
|
import request from "superagent"
|
|
6
|
-
import { reindexDataset } from "../elasticsearch/reindex-dataset"
|
|
7
6
|
import { redis, redlock } from "../libs/redis"
|
|
8
7
|
import CacheItem, { CacheType } from "../cache/item"
|
|
9
8
|
import config from "../config"
|
|
@@ -23,6 +22,7 @@ import { updateDatasetRevision } from "./draft"
|
|
|
23
22
|
import { getDatasetWorker } from "../libs/datalad-service"
|
|
24
23
|
import { join } from "path"
|
|
25
24
|
import { createEvent, updateEvent } from "../libs/events"
|
|
25
|
+
import { queueIndexDataset } from "../queues/producer-methods"
|
|
26
26
|
|
|
27
27
|
const lockSnapshot = (datasetId, tag) => {
|
|
28
28
|
return redlock.lock(
|
|
@@ -177,7 +177,7 @@ export const createSnapshot = async (
|
|
|
177
177
|
await updateEvent(event)
|
|
178
178
|
|
|
179
179
|
// Immediate indexing for new snapshots
|
|
180
|
-
|
|
180
|
+
queueIndexDataset(datasetId)
|
|
181
181
|
|
|
182
182
|
announceNewSnapshot(snapshot, datasetId, user)
|
|
183
183
|
return snapshot
|
|
@@ -2,41 +2,53 @@ import config from "../../config"
|
|
|
2
2
|
import { generateDataladCookie } from "../../libs/authentication/jwt"
|
|
3
3
|
import { getDatasetWorker } from "../../libs/datalad-service"
|
|
4
4
|
import Validation from "../../models/validation"
|
|
5
|
-
import { redlock } from "../../libs/redis"
|
|
5
|
+
import { redis, redlock } from "../../libs/redis"
|
|
6
|
+
import CacheItem from "../../cache/item"
|
|
7
|
+
import { CacheType } from "../../cache/types"
|
|
6
8
|
|
|
7
9
|
/**
|
|
8
10
|
* Issues resolver for schema validator
|
|
9
11
|
*/
|
|
10
12
|
export const validation = async (dataset, _, { userInfo }) => {
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
{ userInfo },
|
|
23
|
-
)
|
|
24
|
-
}
|
|
25
|
-
if (data) {
|
|
26
|
-
// Return with errors and warning counts appended
|
|
27
|
-
return {
|
|
28
|
-
...data.toObject(),
|
|
29
|
-
errors: data.issues.filter((issue) =>
|
|
30
|
-
issue.severity === "error"
|
|
31
|
-
).length,
|
|
32
|
-
warnings: data.issues.filter((issue) =>
|
|
33
|
-
issue.severity === "warning"
|
|
34
|
-
).length,
|
|
35
|
-
}
|
|
36
|
-
} else {
|
|
37
|
-
return null
|
|
38
|
-
}
|
|
13
|
+
const cache = new CacheItem(
|
|
14
|
+
redis,
|
|
15
|
+
CacheType.validation,
|
|
16
|
+
[dataset.id, dataset.revision],
|
|
17
|
+
// This cache is valid forever but may be large, drop inaccessed values weekly
|
|
18
|
+
604800,
|
|
19
|
+
)
|
|
20
|
+
return cache.get((doNotCache) => {
|
|
21
|
+
return Validation.findOne({
|
|
22
|
+
id: dataset.revision,
|
|
23
|
+
datasetId: dataset.id,
|
|
39
24
|
})
|
|
25
|
+
.exec()
|
|
26
|
+
.then((data) => {
|
|
27
|
+
if (!data && userInfo) {
|
|
28
|
+
// If no results were found, acquire a lock and run validation
|
|
29
|
+
revalidate(
|
|
30
|
+
null,
|
|
31
|
+
{ datasetId: dataset.id, ref: dataset.revision },
|
|
32
|
+
{ userInfo },
|
|
33
|
+
)
|
|
34
|
+
}
|
|
35
|
+
if (data) {
|
|
36
|
+
// Return with errors and warning counts appended
|
|
37
|
+
return {
|
|
38
|
+
...data.toObject(),
|
|
39
|
+
errors: data.issues.filter((issue) =>
|
|
40
|
+
issue.severity === "error"
|
|
41
|
+
).length,
|
|
42
|
+
warnings: data.issues.filter((issue) =>
|
|
43
|
+
issue.severity === "warning"
|
|
44
|
+
).length,
|
|
45
|
+
}
|
|
46
|
+
} else {
|
|
47
|
+
doNotCache(true)
|
|
48
|
+
return null
|
|
49
|
+
}
|
|
50
|
+
})
|
|
51
|
+
})
|
|
40
52
|
}
|
|
41
53
|
|
|
42
54
|
/**
|
|
@@ -4,9 +4,11 @@ import * as Sentry from "@sentry/node"
|
|
|
4
4
|
import { userMigration } from "./user-migration"
|
|
5
5
|
import User from "../../models/user"
|
|
6
6
|
|
|
7
|
-
export const requestAuth =
|
|
8
|
-
|
|
9
|
-
|
|
7
|
+
export const requestAuth = (req, res, next) =>
|
|
8
|
+
passport.authenticate("orcid", {
|
|
9
|
+
session: false,
|
|
10
|
+
state: req.query.redirectPath || null,
|
|
11
|
+
})(req, res, next)
|
|
10
12
|
|
|
11
13
|
/**
|
|
12
14
|
* Complete a successful login
|
|
@@ -30,41 +32,44 @@ export function completeRequestLogin(req, res, next, user) {
|
|
|
30
32
|
}
|
|
31
33
|
|
|
32
34
|
export const authCallback = (req, res, next) =>
|
|
33
|
-
passport.authenticate(
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
if (err
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
35
|
+
passport.authenticate(
|
|
36
|
+
"orcid",
|
|
37
|
+
async (err, user) => {
|
|
38
|
+
if (err) {
|
|
39
|
+
Sentry.captureException(err)
|
|
40
|
+
if (err.type) {
|
|
41
|
+
return res.redirect(`/error/orcid/${err.type}`)
|
|
42
|
+
} else {
|
|
43
|
+
return res.redirect("/error/orcid/unknown")
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
if (!user) {
|
|
47
|
+
return res.redirect("/")
|
|
40
48
|
}
|
|
41
|
-
}
|
|
42
|
-
if (!user) {
|
|
43
|
-
return res.redirect("/")
|
|
44
|
-
}
|
|
45
49
|
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
50
|
+
try {
|
|
51
|
+
// adds new date for login/lastSeen
|
|
52
|
+
await User.findByIdAndUpdate(user._id, { lastSeen: new Date() })
|
|
53
|
+
} catch (error: unknown) {
|
|
54
|
+
if (error instanceof Error) {
|
|
55
|
+
Sentry.captureException(error)
|
|
56
|
+
} else {
|
|
57
|
+
Sentry.captureException(new Error(String(error)))
|
|
58
|
+
}
|
|
59
|
+
// Don't block the login flow
|
|
54
60
|
}
|
|
55
|
-
// Don't block the login flow
|
|
56
|
-
}
|
|
57
61
|
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
62
|
+
// Google user
|
|
63
|
+
const existingAuth = parsedJwtFromRequest(req)
|
|
64
|
+
if (
|
|
65
|
+
existingAuth && existingAuth.provider === "google" &&
|
|
66
|
+
existingAuth.exp * 1000 > Date.now()
|
|
67
|
+
) {
|
|
63
68
|
return userMigration(user.providerId, existingAuth.sub).then(() => {
|
|
64
69
|
return completeRequestLogin(req, res, next, user)
|
|
65
70
|
})
|
|
71
|
+
} else {
|
|
72
|
+
return completeRequestLogin(req, res, next, user)
|
|
66
73
|
}
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
}
|
|
70
|
-
})(req, res, next)
|
|
74
|
+
},
|
|
75
|
+
)(req, res, next)
|
|
@@ -95,9 +95,8 @@ export const verifyORCIDUser = (
|
|
|
95
95
|
params,
|
|
96
96
|
done,
|
|
97
97
|
) => {
|
|
98
|
-
const token = `${profile.orcid}:${profile.access_token}`
|
|
99
98
|
orcid
|
|
100
|
-
.getProfile(
|
|
99
|
+
.getProfile(profile.orcid, profile.access_token)
|
|
101
100
|
.then((info) => {
|
|
102
101
|
profile.info = info
|
|
103
102
|
profile.provider = PROVIDERS.ORCID
|
|
@@ -166,7 +165,7 @@ export const setupPassportAuth = () => {
|
|
|
166
165
|
config.auth.orcid.apiURI.includes("sandbox"),
|
|
167
166
|
clientID: config.auth.orcid.clientID,
|
|
168
167
|
clientSecret: config.auth.orcid.clientSecret,
|
|
169
|
-
scope: "/activities/update",
|
|
168
|
+
scope: ["/activities/update", "/read-limited"],
|
|
170
169
|
callbackURL: `${config.url + config.apiPrefix}auth/orcid/callback`,
|
|
171
170
|
},
|
|
172
171
|
verifyORCIDUser,
|
|
@@ -5,6 +5,7 @@ import Dataset from "../../models/dataset"
|
|
|
5
5
|
import Permission from "../../models/permission"
|
|
6
6
|
import Comment from "../../models/comment"
|
|
7
7
|
import Deletion from "../../models/deletion"
|
|
8
|
+
import { queueIndexDataset } from "../../queues/producer-methods"
|
|
8
9
|
import * as Sentry from "@sentry/node"
|
|
9
10
|
|
|
10
11
|
/**
|
|
@@ -20,6 +21,7 @@ import * as Sentry from "@sentry/node"
|
|
|
20
21
|
export async function userMigration(orcid: string, userId: string) {
|
|
21
22
|
const session = await mongoose.startSession()
|
|
22
23
|
try {
|
|
24
|
+
const updateDatasets: Record<string, boolean> = {}
|
|
23
25
|
await session.withTransaction(async () => {
|
|
24
26
|
try {
|
|
25
27
|
// Load both original records
|
|
@@ -55,6 +57,7 @@ export async function userMigration(orcid: string, userId: string) {
|
|
|
55
57
|
// Record this dataset uploader as migrated
|
|
56
58
|
migration.datasets.push(dataset.id)
|
|
57
59
|
await dataset.save({ session })
|
|
60
|
+
updateDatasets[dataset.id] = true
|
|
58
61
|
}
|
|
59
62
|
|
|
60
63
|
// Migrate dataset permissions
|
|
@@ -70,6 +73,7 @@ export async function userMigration(orcid: string, userId: string) {
|
|
|
70
73
|
// Record this permission as migrated
|
|
71
74
|
migration.permissions.push(permission.toObject())
|
|
72
75
|
await permission.save({ session })
|
|
76
|
+
updateDatasets[permission.datasetId] = true
|
|
73
77
|
}
|
|
74
78
|
|
|
75
79
|
// Migrate dataset deletions
|
|
@@ -110,6 +114,10 @@ export async function userMigration(orcid: string, userId: string) {
|
|
|
110
114
|
// Save success
|
|
111
115
|
migration.success = true
|
|
112
116
|
await migration.save({ session })
|
|
117
|
+
// Request reindexing
|
|
118
|
+
for (const updateDataset of Object.keys(updateDatasets)) {
|
|
119
|
+
queueIndexDataset(updateDataset)
|
|
120
|
+
}
|
|
113
121
|
} catch (err) {
|
|
114
122
|
Sentry.captureException(err)
|
|
115
123
|
throw err
|
package/src/libs/orcid.ts
CHANGED
|
@@ -1,110 +1,120 @@
|
|
|
1
1
|
// Camel case rule is disabled since ORCID API uses snake case variables
|
|
2
|
-
import request from "request"
|
|
3
2
|
import xmldoc from "xmldoc"
|
|
4
3
|
import config from "../config"
|
|
5
4
|
import * as Sentry from "@sentry/node"
|
|
6
5
|
|
|
7
6
|
export default {
|
|
8
|
-
getProfile(
|
|
9
|
-
|
|
10
|
-
const
|
|
11
|
-
if (data.length != 2) {
|
|
12
|
-
reject("Invalid token")
|
|
13
|
-
}
|
|
14
|
-
const orcid = data[0]
|
|
15
|
-
const accessToken = data[1]
|
|
16
|
-
|
|
17
|
-
request.get(
|
|
7
|
+
async getProfile(orcid, accessToken) {
|
|
8
|
+
try {
|
|
9
|
+
const response = await fetch(
|
|
18
10
|
`${config.auth.orcid.apiURI}/v2.0/${orcid}/record`,
|
|
19
11
|
{
|
|
20
|
-
headers: {
|
|
12
|
+
headers: {
|
|
13
|
+
Authorization: `Bearer ${accessToken}`,
|
|
14
|
+
},
|
|
21
15
|
},
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
const lastname = doc.valueWithPath(
|
|
38
|
-
"person:person.person:name.personal-details:family-name",
|
|
39
|
-
)
|
|
40
|
-
const email = doc.valueWithPath(
|
|
41
|
-
"person:person.email:emails.email:email.email:email",
|
|
42
|
-
)
|
|
16
|
+
)
|
|
17
|
+
const text = await response.text()
|
|
18
|
+
const doc = new xmldoc.XmlDocument(text)
|
|
19
|
+
let name = doc.valueWithPath(
|
|
20
|
+
"person:person.person:name.personal-details:credit-name",
|
|
21
|
+
)
|
|
22
|
+
const firstname = doc.valueWithPath(
|
|
23
|
+
"person:person.person:name.personal-details:given-names",
|
|
24
|
+
)
|
|
25
|
+
const lastname = doc.valueWithPath(
|
|
26
|
+
"person:person.person:name.personal-details:family-name",
|
|
27
|
+
)
|
|
28
|
+
const email = doc.valueWithPath(
|
|
29
|
+
"person:person.email:emails.email:email.email:email",
|
|
30
|
+
)
|
|
43
31
|
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
32
|
+
if (!name && firstname && lastname) {
|
|
33
|
+
if (firstname && lastname) {
|
|
34
|
+
name = `${firstname} ${lastname}`
|
|
35
|
+
} else {
|
|
36
|
+
name = lastname || firstname
|
|
37
|
+
}
|
|
38
|
+
}
|
|
51
39
|
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
40
|
+
return {
|
|
41
|
+
name,
|
|
42
|
+
email,
|
|
43
|
+
}
|
|
44
|
+
} catch (err) {
|
|
45
|
+
Sentry.captureException(err, {
|
|
46
|
+
extra: {
|
|
47
|
+
orcid,
|
|
56
48
|
},
|
|
57
|
-
)
|
|
58
|
-
}
|
|
49
|
+
})
|
|
50
|
+
}
|
|
59
51
|
},
|
|
60
52
|
|
|
61
|
-
refreshToken(refreshToken, callback) {
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
53
|
+
async refreshToken(refreshToken, callback) {
|
|
54
|
+
try {
|
|
55
|
+
const form = new URLSearchParams({
|
|
56
|
+
client_id: config.auth.orcid.clientID,
|
|
57
|
+
client_secret: config.auth.orcid.clientSecret,
|
|
58
|
+
redirect_uri: config.auth.orcid.redirectURI,
|
|
59
|
+
grant_type: "refresh_token",
|
|
60
|
+
refresh_token: refreshToken,
|
|
61
|
+
})
|
|
62
|
+
const res = await fetch(`${config.auth.orcid.URI}/oauth/token`, {
|
|
63
|
+
method: "POST",
|
|
64
|
+
headers: {
|
|
65
|
+
"Content-Type": "application/x-www-form-urlencoded",
|
|
66
|
+
Accept: "application/json",
|
|
71
67
|
},
|
|
72
|
-
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
}
|
|
81
|
-
|
|
68
|
+
body: form,
|
|
69
|
+
})
|
|
70
|
+
const body = await res.json()
|
|
71
|
+
if (!res.ok) {
|
|
72
|
+
callback(
|
|
73
|
+
new Error(body.error_description || `ORCID API error: ${res.status}`),
|
|
74
|
+
body,
|
|
75
|
+
)
|
|
76
|
+
} else {
|
|
77
|
+
const { orcid, access_token } = body
|
|
78
|
+
body.access_token = `${orcid}:${access_token}`
|
|
79
|
+
callback(null, body)
|
|
80
|
+
}
|
|
81
|
+
} catch (err) {
|
|
82
|
+
Sentry.captureException(err)
|
|
83
|
+
callback(err, null)
|
|
84
|
+
}
|
|
82
85
|
},
|
|
83
86
|
|
|
84
|
-
validateToken(code, callback) {
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
87
|
+
async validateToken(code, callback) {
|
|
88
|
+
try {
|
|
89
|
+
const form = new URLSearchParams({
|
|
90
|
+
client_id: config.auth.orcid.clientID,
|
|
91
|
+
client_secret: config.auth.orcid.clientSecret,
|
|
92
|
+
redirect_uri: config.auth.orcid.redirectURI,
|
|
93
|
+
grant_type: "authorization_code",
|
|
94
|
+
code,
|
|
95
|
+
})
|
|
96
|
+
const res = await fetch(`${config.auth.orcid.URI}/oauth/token`, {
|
|
97
|
+
method: "POST",
|
|
98
|
+
headers: {
|
|
99
|
+
"Content-Type": "application/x-www-form-urlencoded",
|
|
100
|
+
Accept: "application/json",
|
|
94
101
|
},
|
|
95
|
-
|
|
96
|
-
}
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
}
|
|
108
|
-
)
|
|
102
|
+
body: form,
|
|
103
|
+
})
|
|
104
|
+
const body = await res.json()
|
|
105
|
+
if (!res.ok) {
|
|
106
|
+
callback(
|
|
107
|
+
new Error(body.error_description || `ORCID API error: ${res.status}`),
|
|
108
|
+
body,
|
|
109
|
+
)
|
|
110
|
+
} else {
|
|
111
|
+
const { orcid, access_token } = body
|
|
112
|
+
body.access_token = `${orcid}:${access_token}`
|
|
113
|
+
callback(null, body)
|
|
114
|
+
}
|
|
115
|
+
} catch (err) {
|
|
116
|
+
Sentry.captureException(err)
|
|
117
|
+
callback(err, null)
|
|
118
|
+
}
|
|
109
119
|
},
|
|
110
120
|
}
|
package/src/libs/redis.ts
CHANGED
|
@@ -3,28 +3,7 @@
|
|
|
3
3
|
// dependencies --------------------------------------------------
|
|
4
4
|
import Redis from "ioredis"
|
|
5
5
|
import Redlock from "redlock"
|
|
6
|
+
import config from "../config"
|
|
6
7
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
const connect = async (config) => {
|
|
11
|
-
return new Promise((resolve) => {
|
|
12
|
-
if (!redis) {
|
|
13
|
-
console.log(
|
|
14
|
-
'Connecting to Redis "redis://%s:%d/0"',
|
|
15
|
-
config.host,
|
|
16
|
-
config.port,
|
|
17
|
-
)
|
|
18
|
-
redis = new Redis(config)
|
|
19
|
-
redlock = new Redlock([redis])
|
|
20
|
-
redis.on("connect", () => {
|
|
21
|
-
resolve(redis)
|
|
22
|
-
})
|
|
23
|
-
} else {
|
|
24
|
-
resolve(redis)
|
|
25
|
-
}
|
|
26
|
-
})
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
export default { connect }
|
|
30
|
-
export { connect, redis, redlock }
|
|
8
|
+
export const redis = new Redis(config.redis)
|
|
9
|
+
export const redlock = new Redlock([redis])
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import type { Consumer } from "redis-smq"
|
|
2
|
+
import { reindexDataset } from "../elasticsearch/reindex-dataset"
|
|
3
|
+
import { OpenNeuroQueues } from "./queues"
|
|
4
|
+
import * as Sentry from "@sentry/node"
|
|
5
|
+
|
|
6
|
+
export function startConsumer(consumer: Consumer) {
|
|
7
|
+
const reindexMessageHandler = async (msg, cb) => {
|
|
8
|
+
// Index one dataset
|
|
9
|
+
reindexDataset(msg.body.datasetId).then(cb)
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
consumer.consume(OpenNeuroQueues.INDEXING, reindexMessageHandler, (err) => {
|
|
13
|
+
if (err) {
|
|
14
|
+
Sentry.captureException(err)
|
|
15
|
+
}
|
|
16
|
+
})
|
|
17
|
+
return consumer
|
|
18
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { ProducibleMessage } from "redis-smq"
|
|
2
|
+
import { producer } from "./setup"
|
|
3
|
+
import { OpenNeuroQueues } from "./queues"
|
|
4
|
+
import * as Sentry from "@sentry/node"
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Queue search indexing for a dataset
|
|
8
|
+
* @param datasetId Dataset to index
|
|
9
|
+
*/
|
|
10
|
+
export function queueIndexDataset(datasetId: string) {
|
|
11
|
+
const msg = new ProducibleMessage()
|
|
12
|
+
msg.setQueue(OpenNeuroQueues.INDEXING).setBody({ datasetId })
|
|
13
|
+
producer.produce(msg, (err) => {
|
|
14
|
+
if (err) {
|
|
15
|
+
Sentry.captureException(err)
|
|
16
|
+
}
|
|
17
|
+
})
|
|
18
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { Queue } from "redis-smq"
|
|
2
|
+
import { EQueueDeliveryModel, EQueueType, QueueRateLimit } from "redis-smq"
|
|
3
|
+
import * as Sentry from "@sentry/node"
|
|
4
|
+
|
|
5
|
+
export enum OpenNeuroQueues {
|
|
6
|
+
INDEXING = "elasticsearch_indexing",
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
export function setupQueues() {
|
|
10
|
+
const indexingQueue = new Queue()
|
|
11
|
+
indexingQueue.save(
|
|
12
|
+
OpenNeuroQueues.INDEXING,
|
|
13
|
+
EQueueType.FIFO_QUEUE,
|
|
14
|
+
EQueueDeliveryModel.POINT_TO_POINT,
|
|
15
|
+
(err) => {
|
|
16
|
+
// The queue may already exist, don't log that error
|
|
17
|
+
if (err !== "QueueQueueExistsError") {
|
|
18
|
+
Sentry.captureException(err)
|
|
19
|
+
}
|
|
20
|
+
},
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
// Limit indexing queue to 8 runs per minute to avoid stacking indexing excessively
|
|
24
|
+
const queueRateLimit = new QueueRateLimit()
|
|
25
|
+
queueRateLimit.set(
|
|
26
|
+
OpenNeuroQueues.INDEXING,
|
|
27
|
+
{ limit: 8, interval: 60000 },
|
|
28
|
+
(err) => {
|
|
29
|
+
if (err) {
|
|
30
|
+
Sentry.captureException(err)
|
|
31
|
+
}
|
|
32
|
+
},
|
|
33
|
+
)
|
|
34
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import { Configuration, Consumer, Producer } from "redis-smq"
|
|
2
|
+
import type { IRedisSMQConfig } from "redis-smq"
|
|
3
|
+
import { ERedisConfigClient } from "redis-smq-common"
|
|
4
|
+
import { startConsumer } from "./consumer"
|
|
5
|
+
import { setupQueues } from "./queues"
|
|
6
|
+
import config from "../config"
|
|
7
|
+
|
|
8
|
+
const smqConfig: IRedisSMQConfig = {
|
|
9
|
+
redis: {
|
|
10
|
+
// Using ioredis as the Redis client
|
|
11
|
+
client: ERedisConfigClient.IOREDIS,
|
|
12
|
+
// Add any other ioredis options here
|
|
13
|
+
options: {
|
|
14
|
+
host: config.redis.host,
|
|
15
|
+
port: config.redis.port,
|
|
16
|
+
},
|
|
17
|
+
},
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
Configuration.getSetConfig(smqConfig)
|
|
21
|
+
|
|
22
|
+
// Producer starts automatically
|
|
23
|
+
export const producer = new Producer()
|
|
24
|
+
export const consumer = new Consumer()
|
|
25
|
+
|
|
26
|
+
export function initQueues() {
|
|
27
|
+
setupQueues()
|
|
28
|
+
startConsumer(consumer)
|
|
29
|
+
}
|
package/src/server.ts
CHANGED
|
@@ -1,26 +1,19 @@
|
|
|
1
1
|
import "./sentry"
|
|
2
|
+
import "./libs/redis"
|
|
2
3
|
import config from "./config"
|
|
3
4
|
import { createServer } from "http"
|
|
4
5
|
import mongoose from "mongoose"
|
|
5
|
-
import { connect as redisConnect } from "./libs/redis"
|
|
6
6
|
import { expressApolloSetup } from "./app"
|
|
7
|
-
|
|
8
|
-
const redisConnectionSetup = async () => {
|
|
9
|
-
try {
|
|
10
|
-
await redisConnect(config.redis)
|
|
11
|
-
} catch (err) {
|
|
12
|
-
// eslint-disable-next-line no-console
|
|
13
|
-
console.error(err)
|
|
14
|
-
process.exit(1)
|
|
15
|
-
}
|
|
16
|
-
}
|
|
7
|
+
import { initQueues } from "./queues/setup"
|
|
17
8
|
|
|
18
9
|
void mongoose.connect(config.mongo.url, {
|
|
19
10
|
dbName: config.mongo.dbName,
|
|
20
11
|
connectTimeoutMS: config.mongo.connectTimeoutMS,
|
|
21
12
|
})
|
|
22
13
|
|
|
23
|
-
|
|
14
|
+
async function init() {
|
|
15
|
+
// Start redis message queues
|
|
16
|
+
initQueues()
|
|
24
17
|
const app = await expressApolloSetup()
|
|
25
18
|
const server = createServer(app)
|
|
26
19
|
server.listen(config.port, () => {
|
|
@@ -29,4 +22,6 @@ void redisConnectionSetup().then(async () => {
|
|
|
29
22
|
// Setup GraphQL subscription transport
|
|
30
23
|
//subscriptionServerFactory(server)
|
|
31
24
|
})
|
|
32
|
-
}
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
init()
|