@openneuro/server 4.4.5 → 4.4.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +4 -4
- package/src/datalad/__tests__/dataset.spec.js +1 -1
- package/src/datalad/__tests__/snapshots.spec.js +1 -1
- package/src/datalad/description.js +1 -1
- package/src/datalad/readme.js +1 -1
- package/src/datalad/utils.js +0 -12
- package/src/graphql/resolvers/__tests__/brainlife.spec.ts +17 -0
- package/src/graphql/resolvers/__tests__/dataset.spec.js +1 -1
- package/src/graphql/resolvers/brainlife.ts +55 -0
- package/src/graphql/resolvers/dataset.js +1 -18
- package/src/graphql/resolvers/snapshots.js +2 -0
- package/src/graphql/schema.js +2 -0
- package/src/libs/email/index.ts +6 -5
- package/src/models/__tests__/ingestDataset.spec.ts +1 -1
- package/src/models/comment.ts +1 -0
- package/src/models/snapshot.ts +1 -0
- package/src/server.js +0 -4
- package/src/{datalad/__tests__/utils.spec.js → utils/__tests__/datasetOrSnapshot.spec.ts} +9 -1
- package/src/utils/datasetOrSnapshot.ts +42 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@openneuro/server",
|
|
3
|
-
"version": "4.4.
|
|
3
|
+
"version": "4.4.9",
|
|
4
4
|
"description": "Core service for the OpenNeuro platform.",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"main": "src/server.js",
|
|
@@ -31,7 +31,7 @@
|
|
|
31
31
|
"date-fns": "^2.16.1",
|
|
32
32
|
"draft-js": "^0.11.7",
|
|
33
33
|
"draft-js-export-html": "^1.4.1",
|
|
34
|
-
"elastic-apm-node": "3.
|
|
34
|
+
"elastic-apm-node": "^3.28.0",
|
|
35
35
|
"express": "^4.17.1",
|
|
36
36
|
"graphql": "14.7.0",
|
|
37
37
|
"graphql-bigint": "^1.0.0",
|
|
@@ -47,7 +47,7 @@
|
|
|
47
47
|
"jsonwebtoken": "^8.3.0",
|
|
48
48
|
"mime-types": "^2.1.19",
|
|
49
49
|
"moment": "^2.14.1",
|
|
50
|
-
"mongoose": "
|
|
50
|
+
"mongoose": "6.2.0",
|
|
51
51
|
"morgan": "^1.6.1",
|
|
52
52
|
"node-fetch": "^2.6.0",
|
|
53
53
|
"node-mailjet": "^3.3.5",
|
|
@@ -104,5 +104,5 @@
|
|
|
104
104
|
"publishConfig": {
|
|
105
105
|
"access": "public"
|
|
106
106
|
},
|
|
107
|
-
"gitHead": "
|
|
107
|
+
"gitHead": "af313c7d80f6aade5786075853d00023c52db60e"
|
|
108
108
|
}
|
|
@@ -16,7 +16,7 @@ describe('dataset model operations', () => {
|
|
|
16
16
|
// Setup a default sequence value to return for each test
|
|
17
17
|
mockingoose.Counter.toReturn(
|
|
18
18
|
{ _id: 'dataset', sequence_value: 1 },
|
|
19
|
-
'
|
|
19
|
+
'findOne',
|
|
20
20
|
)
|
|
21
21
|
})
|
|
22
22
|
it('resolves to dataset id string', async done => {
|
|
@@ -28,7 +28,7 @@ describe('snapshot model operations', () => {
|
|
|
28
28
|
// Setup a default sequence value to return for each test
|
|
29
29
|
mockingoose.Counter.toReturn(
|
|
30
30
|
{ _id: 'dataset', sequence_value: 1 },
|
|
31
|
-
'
|
|
31
|
+
'findOne',
|
|
32
32
|
)
|
|
33
33
|
})
|
|
34
34
|
it('posts to the DataLad /datasets/{dsId}/snapshots/{snapshot} endpoint', async done => {
|
|
@@ -9,7 +9,7 @@ import { fileUrl, getFiles } from './files.js'
|
|
|
9
9
|
import { generateDataladCookie } from '../libs/authentication/jwt'
|
|
10
10
|
import { getDatasetWorker } from '../libs/datalad-service'
|
|
11
11
|
import CacheItem, { CacheType } from '../cache/item'
|
|
12
|
-
import { datasetOrSnapshot } from '
|
|
12
|
+
import { datasetOrSnapshot } from '../utils/datasetOrSnapshot'
|
|
13
13
|
|
|
14
14
|
export const defaultDescription = {
|
|
15
15
|
Name: 'Unnamed Dataset',
|
package/src/datalad/readme.js
CHANGED
|
@@ -3,7 +3,7 @@ import { addFileString, commitFiles } from './dataset'
|
|
|
3
3
|
import { redis } from '../libs/redis'
|
|
4
4
|
import CacheItem, { CacheType } from '../cache/item'
|
|
5
5
|
import { getDatasetWorker } from '../libs/datalad-service'
|
|
6
|
-
import { datasetOrSnapshot } from '
|
|
6
|
+
import { datasetOrSnapshot } from '../utils/datasetOrSnapshot'
|
|
7
7
|
|
|
8
8
|
export const readmeUrl = (datasetId, revision) => {
|
|
9
9
|
return `http://${getDatasetWorker(
|
package/src/datalad/utils.js
CHANGED
|
@@ -23,15 +23,3 @@ export const addFileUrl = (datasetId, tag) => file => {
|
|
|
23
23
|
}
|
|
24
24
|
}
|
|
25
25
|
}
|
|
26
|
-
|
|
27
|
-
/**
|
|
28
|
-
* Helper for resolvers with dataset and snapshot parents
|
|
29
|
-
* @param {object} obj A snapshot or dataset parent object
|
|
30
|
-
*/
|
|
31
|
-
export function datasetOrSnapshot(obj) {
|
|
32
|
-
if ('tag' in obj) {
|
|
33
|
-
return { datasetId: obj.id.split(':')[0], revision: obj.hexsha || obj.tag }
|
|
34
|
-
} else {
|
|
35
|
-
return { datasetId: obj.id, revision: obj.revision }
|
|
36
|
-
}
|
|
37
|
-
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { HasId } from '../../../utils/datasetOrSnapshot'
|
|
2
|
+
import { brainlifeQuery } from '../brainlife'
|
|
3
|
+
|
|
4
|
+
describe('brainlife resolvers', () => {
|
|
5
|
+
it('correctly queries drafts', () => {
|
|
6
|
+
expect(brainlifeQuery({ id: 'ds000001' } as HasId).toString()).toEqual(
|
|
7
|
+
'https://brainlife.io/api/warehouse/datalad/datasets?find=%7B%22removed%22%3Afalse%2C%22path%22%3A%7B%22%24regex%22%3A%22%5EOpenNeuro%2Fds000001%22%7D%7D',
|
|
8
|
+
)
|
|
9
|
+
})
|
|
10
|
+
it('correctly queries versioned datasets', () => {
|
|
11
|
+
expect(
|
|
12
|
+
brainlifeQuery({ id: 'ds000001:1.0.0', tag: '1.0.0' }).toString(),
|
|
13
|
+
).toEqual(
|
|
14
|
+
'https://brainlife.io/api/warehouse/datalad/datasets?find=%7B%22removed%22%3Afalse%2C%22path%22%3A%7B%22%24regex%22%3A%22%5EOpenNeuro%2Fds000001%22%7D%2C%22version%22%3A%221.0.0%22%7D',
|
|
15
|
+
)
|
|
16
|
+
})
|
|
17
|
+
})
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import {
|
|
2
|
+
DatasetOrSnapshot,
|
|
3
|
+
getDatasetFromSnapshotId,
|
|
4
|
+
} from '../../utils/datasetOrSnapshot'
|
|
5
|
+
|
|
6
|
+
interface BrainlifeFindQuery {
|
|
7
|
+
removed: boolean
|
|
8
|
+
path?: {
|
|
9
|
+
$regex: string
|
|
10
|
+
}
|
|
11
|
+
version?: string
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Construct a query to check if a dataset or snapshot exists on Brainlife
|
|
16
|
+
*/
|
|
17
|
+
export function brainlifeQuery(dataset: DatasetOrSnapshot): URL {
|
|
18
|
+
const find: BrainlifeFindQuery = {
|
|
19
|
+
removed: false,
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
if ('tag' in dataset) {
|
|
23
|
+
find.path = {
|
|
24
|
+
$regex: `^OpenNeuro/${getDatasetFromSnapshotId(dataset.id)}`,
|
|
25
|
+
}
|
|
26
|
+
find.version = dataset.tag
|
|
27
|
+
} else {
|
|
28
|
+
find.path = { $regex: `^OpenNeuro/${dataset.id}` }
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
const url = new URL('https://brainlife.io/api/warehouse/datalad/datasets')
|
|
32
|
+
url.searchParams.append('find', JSON.stringify(find))
|
|
33
|
+
|
|
34
|
+
return url
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Make a request to Brainlife to check if a dataset exists
|
|
39
|
+
*/
|
|
40
|
+
export const onBrainlife = async (
|
|
41
|
+
dataset: DatasetOrSnapshot,
|
|
42
|
+
): Promise<boolean> => {
|
|
43
|
+
try {
|
|
44
|
+
const url = brainlifeQuery(dataset)
|
|
45
|
+
const res = await fetch(url.toString())
|
|
46
|
+
const body = await res.json()
|
|
47
|
+
if (Array.isArray(body) && body.length) {
|
|
48
|
+
return true
|
|
49
|
+
} else {
|
|
50
|
+
return false
|
|
51
|
+
}
|
|
52
|
+
} catch (err) {
|
|
53
|
+
return false
|
|
54
|
+
}
|
|
55
|
+
}
|
|
@@ -22,6 +22,7 @@ import { UpdatedFile } from '../utils/file.js'
|
|
|
22
22
|
import { getDatasetWorker } from '../../libs/datalad-service.js'
|
|
23
23
|
import { getDraftHead } from '../../datalad/dataset.js'
|
|
24
24
|
import { getFileName } from '../../datalad/files.js'
|
|
25
|
+
import { onBrainlife } from './brainlife'
|
|
25
26
|
import semver from 'semver'
|
|
26
27
|
|
|
27
28
|
export const dataset = async (obj, { id }, { user, userInfo }) => {
|
|
@@ -284,24 +285,6 @@ export const starred = (obj, _, { user }) =>
|
|
|
284
285
|
? datalad.getUserStarred(obj.id, user).then(res => (res ? true : false))
|
|
285
286
|
: null
|
|
286
287
|
|
|
287
|
-
/**
|
|
288
|
-
* Is this dataset available on brainlife?
|
|
289
|
-
*/
|
|
290
|
-
export const onBrainlife = async dataset => {
|
|
291
|
-
try {
|
|
292
|
-
const url = `https://brainlife.io/api/warehouse/datalad/datasets?find={"path":{"$regex":"${dataset.id}$"}}`
|
|
293
|
-
const res = await fetch(url)
|
|
294
|
-
const body = await res.json()
|
|
295
|
-
if (Array.isArray(body) && body.length) {
|
|
296
|
-
return body[0].path === `OpenNeuroDatasets/${dataset.id}`
|
|
297
|
-
} else {
|
|
298
|
-
return false
|
|
299
|
-
}
|
|
300
|
-
} catch (err) {
|
|
301
|
-
return false
|
|
302
|
-
}
|
|
303
|
-
}
|
|
304
|
-
|
|
305
288
|
const worker = obj => getDatasetWorker(obj.id)
|
|
306
289
|
|
|
307
290
|
/**
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import * as datalad from '../../datalad/snapshots.js'
|
|
2
2
|
import { dataset, analytics, snapshotCreationComparison } from './dataset.js'
|
|
3
|
+
import { onBrainlife } from './brainlife'
|
|
3
4
|
import { checkDatasetRead, checkDatasetWrite } from '../permissions.js'
|
|
4
5
|
import { readme } from './readme.js'
|
|
5
6
|
import { description } from './description.js'
|
|
@@ -32,6 +33,7 @@ export const snapshot = (obj, { datasetId, tag }, context) => {
|
|
|
32
33
|
.then(filterRemovedAnnexObjects(datasetId, context.userInfo)),
|
|
33
34
|
deprecated: () => deprecated({ datasetId, tag }),
|
|
34
35
|
related: () => related(datasetId),
|
|
36
|
+
onBrainlife: () => onBrainlife(snapshot),
|
|
35
37
|
}))
|
|
36
38
|
},
|
|
37
39
|
)
|
package/src/graphql/schema.js
CHANGED
|
@@ -459,6 +459,8 @@ export const typeDefs = `
|
|
|
459
459
|
deprecated: DeprecatedSnapshot
|
|
460
460
|
# Related DOI references
|
|
461
461
|
related: [RelatedObject]
|
|
462
|
+
# Is the snapshot available for analysis on Brainlife?
|
|
463
|
+
onBrainlife: Boolean @cacheControl(maxAge: 10080, scope: PUBLIC)
|
|
462
464
|
}
|
|
463
465
|
|
|
464
466
|
# RelatedObject nature of relationship
|
package/src/libs/email/index.ts
CHANGED
|
@@ -36,11 +36,12 @@ export const mailjetFormat = (email: Record<string, string>) => ({
|
|
|
36
36
|
* @param email Nodemailer style email record
|
|
37
37
|
*/
|
|
38
38
|
export const send = (email: Record<string, string>): Promise<Response> => {
|
|
39
|
-
|
|
40
|
-
|
|
39
|
+
if (perform_api_call) {
|
|
40
|
+
return transport
|
|
41
|
+
.post('send', { version: 'v3.1', perform_api_call })
|
|
42
|
+
.request(mailjetFormat(email))
|
|
43
|
+
} else {
|
|
44
|
+
// Mailjet is not configured, instead log emails
|
|
41
45
|
console.dir(email)
|
|
42
46
|
}
|
|
43
|
-
return transport
|
|
44
|
-
.post('send', { version: 'v3.1', perform_api_call })
|
|
45
|
-
.request(mailjetFormat(email))
|
|
46
47
|
}
|
package/src/models/comment.ts
CHANGED
package/src/models/snapshot.ts
CHANGED
package/src/server.js
CHANGED
|
@@ -23,12 +23,8 @@ const redisConnectionSetup = async () => {
|
|
|
23
23
|
}
|
|
24
24
|
|
|
25
25
|
mongoose.connect(config.mongo.url, {
|
|
26
|
-
useNewUrlParser: true,
|
|
27
26
|
dbName: config.mongo.dbName,
|
|
28
27
|
connectTimeoutMS: config.mongo.connectTimeoutMS,
|
|
29
|
-
useFindAndModify: false,
|
|
30
|
-
useUnifiedTopology: true,
|
|
31
|
-
useCreateIndex: true,
|
|
32
28
|
})
|
|
33
29
|
|
|
34
30
|
redisConnectionSetup().then(() => {
|
|
@@ -1,4 +1,7 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import {
|
|
2
|
+
datasetOrSnapshot,
|
|
3
|
+
getDatasetFromSnapshotId,
|
|
4
|
+
} from '../datasetOrSnapshot'
|
|
2
5
|
|
|
3
6
|
describe('datasetOrSnapshot()', () => {
|
|
4
7
|
it('resolves a dataset object correctly', () => {
|
|
@@ -39,4 +42,9 @@ describe('datasetOrSnapshot()', () => {
|
|
|
39
42
|
revision: '1.0.1',
|
|
40
43
|
})
|
|
41
44
|
})
|
|
45
|
+
describe('getDatasetFromSnapshotId', () => {
|
|
46
|
+
it('extracts the datasetId correctly', () => {
|
|
47
|
+
expect(getDatasetFromSnapshotId('ds000001:1.0.0')).toBe('ds000001')
|
|
48
|
+
})
|
|
49
|
+
})
|
|
42
50
|
})
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
export interface HasId {
|
|
2
|
+
id: string
|
|
3
|
+
revision: string
|
|
4
|
+
}
|
|
5
|
+
|
|
6
|
+
export interface HasSnapshotId {
|
|
7
|
+
id: string
|
|
8
|
+
tag: string
|
|
9
|
+
hexsha?: string
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export interface DatasetRevisionReference {
|
|
13
|
+
datasetId: string
|
|
14
|
+
revision: string
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export type DatasetOrSnapshot = HasId | HasSnapshotId
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Helper for resolvers with dataset and snapshot parents
|
|
21
|
+
* @param {object} obj A snapshot or dataset parent object
|
|
22
|
+
*/
|
|
23
|
+
export function datasetOrSnapshot(
|
|
24
|
+
obj: DatasetOrSnapshot,
|
|
25
|
+
): DatasetRevisionReference {
|
|
26
|
+
if ('tag' in obj) {
|
|
27
|
+
return {
|
|
28
|
+
datasetId: getDatasetFromSnapshotId(obj.id),
|
|
29
|
+
revision: obj.hexsha || obj.tag,
|
|
30
|
+
}
|
|
31
|
+
} else {
|
|
32
|
+
return { datasetId: obj.id, revision: obj.revision }
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* @param snapshotId 'ds000001:1.0.0' style snapshot ID
|
|
38
|
+
* @returns {string} Dataset id portion 'ds000001'
|
|
39
|
+
*/
|
|
40
|
+
export function getDatasetFromSnapshotId(snapshotId: string): string {
|
|
41
|
+
return snapshotId.split(':')[0]
|
|
42
|
+
}
|