@openneuro/server 4.4.8 → 4.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +3 -3
- package/src/datalad/description.js +1 -1
- package/src/datalad/readme.js +1 -1
- package/src/datalad/utils.js +0 -12
- package/src/graphql/resolvers/__tests__/brainlife.spec.ts +17 -0
- package/src/graphql/resolvers/brainlife.ts +56 -0
- package/src/graphql/resolvers/dataset.js +1 -29
- package/src/graphql/resolvers/permissions.js +3 -1
- package/src/graphql/resolvers/snapshots.js +3 -7
- package/src/models/snapshot.ts +1 -0
- package/src/models/user.ts +10 -0
- package/src/{datalad/__tests__/utils.spec.js → utils/__tests__/datasetOrSnapshot.spec.ts} +9 -1
- package/src/utils/datasetOrSnapshot.ts +42 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@openneuro/server",
|
|
3
|
-
"version": "4.
|
|
3
|
+
"version": "4.5.0",
|
|
4
4
|
"description": "Core service for the OpenNeuro platform.",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"main": "src/server.js",
|
|
@@ -47,7 +47,7 @@
|
|
|
47
47
|
"jsonwebtoken": "^8.3.0",
|
|
48
48
|
"mime-types": "^2.1.19",
|
|
49
49
|
"moment": "^2.14.1",
|
|
50
|
-
"mongoose": "6.2.
|
|
50
|
+
"mongoose": "^6.2.3",
|
|
51
51
|
"morgan": "^1.6.1",
|
|
52
52
|
"node-fetch": "^2.6.0",
|
|
53
53
|
"node-mailjet": "^3.3.5",
|
|
@@ -104,5 +104,5 @@
|
|
|
104
104
|
"publishConfig": {
|
|
105
105
|
"access": "public"
|
|
106
106
|
},
|
|
107
|
-
"gitHead": "
|
|
107
|
+
"gitHead": "8771d59f40e642de9313468e8764f91fb0d811b2"
|
|
108
108
|
}
|
|
@@ -9,7 +9,7 @@ import { fileUrl, getFiles } from './files.js'
|
|
|
9
9
|
import { generateDataladCookie } from '../libs/authentication/jwt'
|
|
10
10
|
import { getDatasetWorker } from '../libs/datalad-service'
|
|
11
11
|
import CacheItem, { CacheType } from '../cache/item'
|
|
12
|
-
import { datasetOrSnapshot } from '
|
|
12
|
+
import { datasetOrSnapshot } from '../utils/datasetOrSnapshot'
|
|
13
13
|
|
|
14
14
|
export const defaultDescription = {
|
|
15
15
|
Name: 'Unnamed Dataset',
|
package/src/datalad/readme.js
CHANGED
|
@@ -3,7 +3,7 @@ import { addFileString, commitFiles } from './dataset'
|
|
|
3
3
|
import { redis } from '../libs/redis'
|
|
4
4
|
import CacheItem, { CacheType } from '../cache/item'
|
|
5
5
|
import { getDatasetWorker } from '../libs/datalad-service'
|
|
6
|
-
import { datasetOrSnapshot } from '
|
|
6
|
+
import { datasetOrSnapshot } from '../utils/datasetOrSnapshot'
|
|
7
7
|
|
|
8
8
|
export const readmeUrl = (datasetId, revision) => {
|
|
9
9
|
return `http://${getDatasetWorker(
|
package/src/datalad/utils.js
CHANGED
|
@@ -23,15 +23,3 @@ export const addFileUrl = (datasetId, tag) => file => {
|
|
|
23
23
|
}
|
|
24
24
|
}
|
|
25
25
|
}
|
|
26
|
-
|
|
27
|
-
/**
|
|
28
|
-
* Helper for resolvers with dataset and snapshot parents
|
|
29
|
-
* @param {object} obj A snapshot or dataset parent object
|
|
30
|
-
*/
|
|
31
|
-
export function datasetOrSnapshot(obj) {
|
|
32
|
-
if ('tag' in obj) {
|
|
33
|
-
return { datasetId: obj.id.split(':')[0], revision: obj.hexsha || obj.tag }
|
|
34
|
-
} else {
|
|
35
|
-
return { datasetId: obj.id, revision: obj.revision }
|
|
36
|
-
}
|
|
37
|
-
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { HasId } from '../../../utils/datasetOrSnapshot'
|
|
2
|
+
import { brainlifeQuery } from '../brainlife'
|
|
3
|
+
|
|
4
|
+
describe('brainlife resolvers', () => {
|
|
5
|
+
it('correctly queries drafts', () => {
|
|
6
|
+
expect(brainlifeQuery({ id: 'ds000001' } as HasId).toString()).toEqual(
|
|
7
|
+
'https://brainlife.io/api/warehouse/datalad/datasets?find=%7B%22removed%22%3Afalse%2C%22path%22%3A%7B%22%24regex%22%3A%22%5EOpenNeuro%2Fds000001%22%7D%7D',
|
|
8
|
+
)
|
|
9
|
+
})
|
|
10
|
+
it('correctly queries versioned datasets', () => {
|
|
11
|
+
expect(
|
|
12
|
+
brainlifeQuery({ id: 'ds000001:1.0.0', tag: '1.0.0' }).toString(),
|
|
13
|
+
).toEqual(
|
|
14
|
+
'https://brainlife.io/api/warehouse/datalad/datasets?find=%7B%22removed%22%3Afalse%2C%22path%22%3A%7B%22%24regex%22%3A%22%5EOpenNeuro%2Fds000001%22%7D%2C%22version%22%3A%221.0.0%22%7D',
|
|
15
|
+
)
|
|
16
|
+
})
|
|
17
|
+
})
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import fetch from 'node-fetch'
|
|
2
|
+
import {
|
|
3
|
+
DatasetOrSnapshot,
|
|
4
|
+
getDatasetFromSnapshotId,
|
|
5
|
+
} from '../../utils/datasetOrSnapshot'
|
|
6
|
+
|
|
7
|
+
interface BrainlifeFindQuery {
|
|
8
|
+
removed: boolean
|
|
9
|
+
path?: {
|
|
10
|
+
$regex: string
|
|
11
|
+
}
|
|
12
|
+
version?: string
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Construct a query to check if a dataset or snapshot exists on Brainlife
|
|
17
|
+
*/
|
|
18
|
+
export function brainlifeQuery(dataset: DatasetOrSnapshot): URL {
|
|
19
|
+
const find: BrainlifeFindQuery = {
|
|
20
|
+
removed: false,
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
if ('tag' in dataset) {
|
|
24
|
+
find.path = {
|
|
25
|
+
$regex: `^OpenNeuro/${getDatasetFromSnapshotId(dataset.id)}`,
|
|
26
|
+
}
|
|
27
|
+
find.version = dataset.tag
|
|
28
|
+
} else {
|
|
29
|
+
find.path = { $regex: `^OpenNeuro/${dataset.id}` }
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
const url = new URL('https://brainlife.io/api/warehouse/datalad/datasets')
|
|
33
|
+
url.searchParams.append('find', JSON.stringify(find))
|
|
34
|
+
|
|
35
|
+
return url
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Make a request to Brainlife to check if a dataset exists
|
|
40
|
+
*/
|
|
41
|
+
export const onBrainlife = async (
|
|
42
|
+
dataset: DatasetOrSnapshot,
|
|
43
|
+
): Promise<boolean> => {
|
|
44
|
+
try {
|
|
45
|
+
const url = brainlifeQuery(dataset)
|
|
46
|
+
const res = await fetch(url.toString())
|
|
47
|
+
const body = await res.json()
|
|
48
|
+
if (Array.isArray(body) && body.length) {
|
|
49
|
+
return true
|
|
50
|
+
} else {
|
|
51
|
+
return false
|
|
52
|
+
}
|
|
53
|
+
} catch (err) {
|
|
54
|
+
return false
|
|
55
|
+
}
|
|
56
|
+
}
|
|
@@ -22,6 +22,7 @@ import { UpdatedFile } from '../utils/file.js'
|
|
|
22
22
|
import { getDatasetWorker } from '../../libs/datalad-service.js'
|
|
23
23
|
import { getDraftHead } from '../../datalad/dataset.js'
|
|
24
24
|
import { getFileName } from '../../datalad/files.js'
|
|
25
|
+
import { onBrainlife } from './brainlife'
|
|
25
26
|
import semver from 'semver'
|
|
26
27
|
|
|
27
28
|
export const dataset = async (obj, { id }, { user, userInfo }) => {
|
|
@@ -284,35 +285,6 @@ export const starred = (obj, _, { user }) =>
|
|
|
284
285
|
? datalad.getUserStarred(obj.id, user).then(res => (res ? true : false))
|
|
285
286
|
: null
|
|
286
287
|
|
|
287
|
-
/**
|
|
288
|
-
* Is this dataset available on brainlife?
|
|
289
|
-
*/
|
|
290
|
-
export const onBrainlife = async datasetOrSnapshot => {
|
|
291
|
-
try {
|
|
292
|
-
const find = {
|
|
293
|
-
removed: false,
|
|
294
|
-
}
|
|
295
|
-
if (datasetOrSnapshot.tag) {
|
|
296
|
-
find.path = { $regex: '^OpenNeuro/' + datasetOrSnapshot.id.split(':')[0] }
|
|
297
|
-
find.version = datasetOrSnapshot.tag
|
|
298
|
-
} else {
|
|
299
|
-
find.path = { $regex: '^OpenNeuro/' + datasetOrSnapshot.id }
|
|
300
|
-
}
|
|
301
|
-
const url = `https://brainlife.io/api/warehouse/datalad/datasets?find=${JSON.stringify(
|
|
302
|
-
find,
|
|
303
|
-
)}`
|
|
304
|
-
const res = await fetch(url)
|
|
305
|
-
const body = await res.json()
|
|
306
|
-
if (Array.isArray(body) && body.length) {
|
|
307
|
-
return true
|
|
308
|
-
} else {
|
|
309
|
-
return false
|
|
310
|
-
}
|
|
311
|
-
} catch (err) {
|
|
312
|
-
return false
|
|
313
|
-
}
|
|
314
|
-
}
|
|
315
|
-
|
|
316
288
|
const worker = obj => getDatasetWorker(obj.id)
|
|
317
289
|
|
|
318
290
|
/**
|
|
@@ -44,7 +44,9 @@ const publishPermissions = async datasetId => {
|
|
|
44
44
|
export const updatePermissions = async (obj, args, { user, userInfo }) => {
|
|
45
45
|
await checkDatasetAdmin(args.datasetId, user, userInfo)
|
|
46
46
|
// get all users the the email specified by permissions arg
|
|
47
|
-
const users = await User.find({ email: args.userEmail })
|
|
47
|
+
const users = await User.find({ email: args.userEmail })
|
|
48
|
+
.collation({ locale: 'en', strength: 2 })
|
|
49
|
+
.exec()
|
|
48
50
|
|
|
49
51
|
if (!users.length) {
|
|
50
52
|
throw new Error('A user with that email address does not exist')
|
|
@@ -1,10 +1,6 @@
|
|
|
1
1
|
import * as datalad from '../../datalad/snapshots.js'
|
|
2
|
-
import {
|
|
3
|
-
|
|
4
|
-
analytics,
|
|
5
|
-
snapshotCreationComparison,
|
|
6
|
-
onBrainlife,
|
|
7
|
-
} from './dataset.js'
|
|
2
|
+
import { dataset, analytics, snapshotCreationComparison } from './dataset.js'
|
|
3
|
+
import { onBrainlife } from './brainlife'
|
|
8
4
|
import { checkDatasetRead, checkDatasetWrite } from '../permissions.js'
|
|
9
5
|
import { readme } from './readme.js'
|
|
10
6
|
import { description } from './description.js'
|
|
@@ -37,7 +33,7 @@ export const snapshot = (obj, { datasetId, tag }, context) => {
|
|
|
37
33
|
.then(filterRemovedAnnexObjects(datasetId, context.userInfo)),
|
|
38
34
|
deprecated: () => deprecated({ datasetId, tag }),
|
|
39
35
|
related: () => related(datasetId),
|
|
40
|
-
onBrainlife,
|
|
36
|
+
onBrainlife: () => onBrainlife(snapshot),
|
|
41
37
|
}))
|
|
42
38
|
},
|
|
43
39
|
)
|
package/src/models/snapshot.ts
CHANGED
package/src/models/user.ts
CHANGED
|
@@ -29,6 +29,16 @@ const userSchema = new Schema({
|
|
|
29
29
|
})
|
|
30
30
|
|
|
31
31
|
userSchema.index({ id: 1, provider: 1 }, { unique: true })
|
|
32
|
+
// Allow case insensitive email queries
|
|
33
|
+
userSchema.index(
|
|
34
|
+
{ email: 1 },
|
|
35
|
+
{
|
|
36
|
+
collation: {
|
|
37
|
+
locale: 'en',
|
|
38
|
+
strength: 2,
|
|
39
|
+
},
|
|
40
|
+
},
|
|
41
|
+
)
|
|
32
42
|
|
|
33
43
|
const User = model<UserDocument>('User', userSchema)
|
|
34
44
|
|
|
@@ -1,4 +1,7 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import {
|
|
2
|
+
datasetOrSnapshot,
|
|
3
|
+
getDatasetFromSnapshotId,
|
|
4
|
+
} from '../datasetOrSnapshot'
|
|
2
5
|
|
|
3
6
|
describe('datasetOrSnapshot()', () => {
|
|
4
7
|
it('resolves a dataset object correctly', () => {
|
|
@@ -39,4 +42,9 @@ describe('datasetOrSnapshot()', () => {
|
|
|
39
42
|
revision: '1.0.1',
|
|
40
43
|
})
|
|
41
44
|
})
|
|
45
|
+
describe('getDatasetFromSnapshotId', () => {
|
|
46
|
+
it('extracts the datasetId correctly', () => {
|
|
47
|
+
expect(getDatasetFromSnapshotId('ds000001:1.0.0')).toBe('ds000001')
|
|
48
|
+
})
|
|
49
|
+
})
|
|
42
50
|
})
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
export interface HasId {
|
|
2
|
+
id: string
|
|
3
|
+
revision: string
|
|
4
|
+
}
|
|
5
|
+
|
|
6
|
+
export interface HasSnapshotId {
|
|
7
|
+
id: string
|
|
8
|
+
tag: string
|
|
9
|
+
hexsha?: string
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export interface DatasetRevisionReference {
|
|
13
|
+
datasetId: string
|
|
14
|
+
revision: string
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export type DatasetOrSnapshot = HasId | HasSnapshotId
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Helper for resolvers with dataset and snapshot parents
|
|
21
|
+
* @param {object} obj A snapshot or dataset parent object
|
|
22
|
+
*/
|
|
23
|
+
export function datasetOrSnapshot(
|
|
24
|
+
obj: DatasetOrSnapshot,
|
|
25
|
+
): DatasetRevisionReference {
|
|
26
|
+
if ('tag' in obj) {
|
|
27
|
+
return {
|
|
28
|
+
datasetId: getDatasetFromSnapshotId(obj.id),
|
|
29
|
+
revision: obj.hexsha || obj.tag,
|
|
30
|
+
}
|
|
31
|
+
} else {
|
|
32
|
+
return { datasetId: obj.id, revision: obj.revision }
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* @param snapshotId 'ds000001:1.0.0' style snapshot ID
|
|
38
|
+
* @returns {string} Dataset id portion 'ds000001'
|
|
39
|
+
*/
|
|
40
|
+
export function getDatasetFromSnapshotId(snapshotId: string): string {
|
|
41
|
+
return snapshotId.split(':')[0]
|
|
42
|
+
}
|