@openneuro/server 4.12.6 → 4.13.0-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/Dockerfile +1 -1
- package/package.json +8 -18
- package/src/__mocks__/config.js +23 -7
- package/src/datalad/__tests__/changelog.spec.js +2 -1
- package/src/datalad/__tests__/dataset.spec.js +14 -14
- package/src/datalad/__tests__/description.spec.js +15 -20
- package/src/datalad/__tests__/files.spec.js +14 -11
- package/src/datalad/__tests__/pagination.spec.js +16 -22
- package/src/datalad/__tests__/snapshots.spec.js +13 -17
- package/src/datalad/changelog.js +0 -1
- package/src/datalad/draft.js +0 -1
- package/src/datalad/readme.js +0 -1
- package/src/datalad/upload.js +0 -1
- package/src/elasticsearch/reindex-dataset.ts +2 -3
- package/src/graphql/__tests__/__snapshots__/permissions.spec.js.snap +3 -3
- package/src/graphql/__tests__/comment.spec.js +27 -38
- package/src/graphql/__tests__/permissions.spec.js +2 -0
- package/src/graphql/resolvers/__tests__/brainlife.spec.ts +3 -0
- package/src/graphql/resolvers/__tests__/dataset-search.spec.js +5 -4
- package/src/graphql/resolvers/__tests__/dataset.spec.js +11 -17
- package/src/graphql/resolvers/__tests__/derivatives.spec.ts +3 -0
- package/src/graphql/resolvers/__tests__/importRemoteDataset.spec.ts +7 -4
- package/src/graphql/resolvers/__tests__/permssions.spec.js +4 -3
- package/src/graphql/resolvers/__tests__/snapshots.spec.js +3 -1
- package/src/graphql/resolvers/__tests__/user.spec.js +2 -0
- package/src/graphql/resolvers/brainlife.ts +0 -1
- package/src/graphql/resolvers/dataset-search.js +10 -2
- package/src/graphql/resolvers/derivatives.ts +0 -1
- package/src/graphql/resolvers/history.js +0 -1
- package/src/graphql/resolvers/importRemoteDataset.ts +0 -1
- package/src/graphql/resolvers/validation.js +0 -1
- package/src/libs/__tests__/apikey.spec.js +2 -1
- package/src/libs/__tests__/datalad-service.spec.js +2 -0
- package/src/libs/__tests__/dataset.spec.js +6 -19
- package/src/libs/authentication/__tests__/jwt.spec.js +3 -1
- package/src/libs/counter.js +13 -18
- package/src/libs/dataset.js +4 -9
- package/src/libs/doi/__tests__/__snapshots__/doi.spec.js.snap +2 -2
- package/src/libs/doi/__tests__/doi.spec.js +2 -0
- package/src/libs/doi/__tests__/normalize.spec.ts +3 -0
- package/src/libs/email/__tests__/index.spec.ts +3 -1
- package/src/libs/email/templates/__tests__/__snapshots__/comment-created.spec.ts.snap +2 -2
- package/src/libs/email/templates/__tests__/__snapshots__/dataset-deleted.spec.ts.snap +2 -2
- package/src/libs/email/templates/__tests__/__snapshots__/owner-unsubscribed.spec.ts.snap +2 -2
- package/src/libs/email/templates/__tests__/__snapshots__/snapshot-created.spec.ts.snap +2 -2
- package/src/libs/email/templates/__tests__/__snapshots__/snapshot-reminder.spec.ts.snap +2 -2
- package/src/models/__tests__/ingestDataset.spec.ts +8 -8
- package/src/utils/__tests__/datasetOrSnapshot.spec.ts +3 -0
- package/src/utils/__tests__/validateUrl.spec.ts +3 -0
- package/tsconfig.json +2 -1
- package/src/__mocks__/mongoose.js +0 -3
- package/src/__mocks__/superagent.js +0 -69
- package/src/libs/__mocks__/redis.js +0 -5
package/Dockerfile
CHANGED
package/package.json
CHANGED
|
@@ -1,14 +1,13 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@openneuro/server",
|
|
3
|
-
"version": "4.
|
|
3
|
+
"version": "4.13.0-alpha.0",
|
|
4
4
|
"description": "Core service for the OpenNeuro platform.",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"main": "src/server.js",
|
|
7
7
|
"scripts": {
|
|
8
8
|
"lint": "eslint src",
|
|
9
9
|
"build": "tsc -b",
|
|
10
|
-
"start": "nodemon --watch ./dist --enable-source-maps ./dist/server.js"
|
|
11
|
-
"test": "yarn jest"
|
|
10
|
+
"start": "nodemon --watch ./dist --enable-source-maps ./dist/server.js"
|
|
12
11
|
},
|
|
13
12
|
"repository": {
|
|
14
13
|
"type": "git",
|
|
@@ -18,7 +17,7 @@
|
|
|
18
17
|
"dependencies": {
|
|
19
18
|
"@apollo/client": "3.4.17",
|
|
20
19
|
"@elastic/elasticsearch": "7.15.0",
|
|
21
|
-
"@openneuro/search": "^4.
|
|
20
|
+
"@openneuro/search": "^4.13.0-alpha.0",
|
|
22
21
|
"@passport-next/passport-google-oauth2": "^1.0.0",
|
|
23
22
|
"@sentry/node": "^4.5.3",
|
|
24
23
|
"apollo-server": "2.25.4",
|
|
@@ -43,14 +42,12 @@
|
|
|
43
42
|
"graphql-tools": "4.0.6",
|
|
44
43
|
"immutable": "^3.8.2",
|
|
45
44
|
"ioredis": "4.17.3",
|
|
46
|
-
"jest-fetch-mock": "^3.0.3",
|
|
47
45
|
"jsdom": "^11.6.2",
|
|
48
46
|
"jsonwebtoken": "^8.3.0",
|
|
49
47
|
"mime-types": "^2.1.19",
|
|
50
48
|
"moment": "^2.14.1",
|
|
51
49
|
"mongoose": "^6.2.3",
|
|
52
50
|
"morgan": "^1.6.1",
|
|
53
|
-
"node-fetch": "^2.6.0",
|
|
54
51
|
"node-mailjet": "^3.3.5",
|
|
55
52
|
"object-hash": "2.1.1",
|
|
56
53
|
"passport": "^0.6.0",
|
|
@@ -82,28 +79,21 @@
|
|
|
82
79
|
"@babel/runtime-corejs3": "^7.13.10",
|
|
83
80
|
"@types/draft-js": "^0.10.43",
|
|
84
81
|
"@types/ioredis": "^4.17.1",
|
|
85
|
-
"@types/jest": "^26.0.23",
|
|
86
82
|
"@types/node-mailjet": "^3",
|
|
87
83
|
"@types/semver": "^5",
|
|
88
84
|
"apollo-link-schema": "^1.2.5",
|
|
89
85
|
"babel-eslint": "^10.1.0",
|
|
90
86
|
"core-js": "^3.10.1",
|
|
91
|
-
"ioredis-mock": "^
|
|
92
|
-
"jest": "27.5.1",
|
|
93
|
-
"mockingoose": "2.11.0",
|
|
87
|
+
"ioredis-mock": "^8.2.2",
|
|
94
88
|
"nodemon": "^2.0.7",
|
|
95
89
|
"supertest": "^3.0.0",
|
|
96
90
|
"ts-node-dev": "1.1.6",
|
|
97
|
-
"tsc-watch": "^4.2.9"
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
"testEnvironment": "node",
|
|
101
|
-
"testPathIgnorePatterns": [
|
|
102
|
-
"dist"
|
|
103
|
-
]
|
|
91
|
+
"tsc-watch": "^4.2.9",
|
|
92
|
+
"vitest": "^0.25.2",
|
|
93
|
+
"vitest-fetch-mock": "^0.2.1"
|
|
104
94
|
},
|
|
105
95
|
"publishConfig": {
|
|
106
96
|
"access": "public"
|
|
107
97
|
},
|
|
108
|
-
"gitHead": "
|
|
98
|
+
"gitHead": "fe4c230250d73e24a758a0bce058b23aa93d0aee"
|
|
109
99
|
}
|
package/src/__mocks__/config.js
CHANGED
|
@@ -1,9 +1,25 @@
|
|
|
1
|
-
const config =
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
1
|
+
const config = {
|
|
2
|
+
auth: {
|
|
3
|
+
jwt: {
|
|
4
|
+
secret: '123456',
|
|
5
|
+
},
|
|
6
|
+
},
|
|
7
|
+
datalad: {
|
|
8
|
+
uri: 'datalad',
|
|
9
|
+
workers: 4,
|
|
10
|
+
},
|
|
11
|
+
mongo: {
|
|
12
|
+
url: 'mongodb://',
|
|
13
|
+
},
|
|
14
|
+
notifications: {
|
|
15
|
+
email: {
|
|
16
|
+
from: 'notifications@example.com',
|
|
17
|
+
},
|
|
18
|
+
},
|
|
19
|
+
elasticsearch: {},
|
|
20
|
+
doi: {
|
|
21
|
+
username: '',
|
|
22
|
+
},
|
|
23
|
+
}
|
|
8
24
|
|
|
9
25
|
export default config
|
|
@@ -1,26 +1,25 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { vi } from 'vitest'
|
|
2
2
|
import request from 'superagent'
|
|
3
3
|
import { createDataset, datasetsFilter, testBlacklist } from '../dataset.js'
|
|
4
4
|
import { getDatasetWorker } from '../../libs/datalad-service'
|
|
5
|
+
import { connect } from 'mongoose'
|
|
5
6
|
|
|
6
7
|
// Mock requests to Datalad service
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
8
|
+
vi.mock('superagent')
|
|
9
|
+
vi.mock('ioredis')
|
|
10
|
+
vi.mock('../../libs/redis.js')
|
|
11
|
+
vi.mock('../../config.js')
|
|
12
|
+
vi.mock('../../libs/notifications.js')
|
|
11
13
|
|
|
12
14
|
describe('dataset model operations', () => {
|
|
13
15
|
describe('createDataset()', () => {
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
mockingoose.Counter.toReturn(
|
|
18
|
-
{ _id: 'dataset', sequence_value: 1 },
|
|
19
|
-
'findOne',
|
|
20
|
-
)
|
|
16
|
+
beforeAll(() => {
|
|
17
|
+
// Setup MongoDB with mongodb-memory-server
|
|
18
|
+
connect(globalThis.__MONGO_URI__)
|
|
21
19
|
})
|
|
22
20
|
it('resolves to dataset id string', async () => {
|
|
23
|
-
const { id:
|
|
21
|
+
const user = { id: '1234' }
|
|
22
|
+
const { id: dsId } = await createDataset(user.id, user, {
|
|
24
23
|
affirmedDefaced: true,
|
|
25
24
|
affirmedConsent: true,
|
|
26
25
|
})
|
|
@@ -28,9 +27,10 @@ describe('dataset model operations', () => {
|
|
|
28
27
|
expect(dsId.slice(0, 2)).toBe('ds')
|
|
29
28
|
})
|
|
30
29
|
it('posts to the DataLad /datasets/{dsId} endpoint', async () => {
|
|
30
|
+
const user = { id: '1234' }
|
|
31
31
|
// Reset call count for request.post
|
|
32
32
|
request.post.mockClear()
|
|
33
|
-
const { id: dsId } = await createDataset(
|
|
33
|
+
const { id: dsId } = await createDataset(user.id, user, {
|
|
34
34
|
affirmedDefaced: true,
|
|
35
35
|
affirmedConsent: true,
|
|
36
36
|
})
|
|
@@ -7,8 +7,9 @@ import {
|
|
|
7
7
|
} from '../description.js'
|
|
8
8
|
|
|
9
9
|
// Mock requests to Datalad service
|
|
10
|
-
|
|
11
|
-
|
|
10
|
+
vi.mock('superagent')
|
|
11
|
+
vi.mock('ioredis')
|
|
12
|
+
vi.mock('../../config.js')
|
|
12
13
|
|
|
13
14
|
describe('datalad dataset descriptions', () => {
|
|
14
15
|
describe('appendSeniorAuthor', () => {
|
|
@@ -89,37 +90,31 @@ describe('datalad dataset descriptions', () => {
|
|
|
89
90
|
expect(Array.isArray(repaired.Funding)).toBe(true)
|
|
90
91
|
})
|
|
91
92
|
})
|
|
92
|
-
it('returns the parsed dataset_description.json object',
|
|
93
|
+
it('returns the parsed dataset_description.json object', async () => {
|
|
93
94
|
request.post.mockClear()
|
|
94
95
|
request.__setMockResponse({
|
|
95
96
|
body: { Name: 'Balloon Analog Risk-taking Task' },
|
|
96
97
|
type: 'application/json',
|
|
97
98
|
})
|
|
98
|
-
getDescriptionObject('ds000001')([
|
|
99
|
+
const description = await getDescriptionObject('ds000001')([
|
|
99
100
|
{ filename: 'dataset_description.json', id: '12345' },
|
|
100
|
-
])
|
|
101
|
-
|
|
102
|
-
end()
|
|
103
|
-
})
|
|
101
|
+
])
|
|
102
|
+
expect(description).toEqual({ Name: 'Balloon Analog Risk-taking Task' })
|
|
104
103
|
})
|
|
105
|
-
it('handles a corrupted response',
|
|
104
|
+
it('handles a corrupted response', async () => {
|
|
106
105
|
request.post.mockClear()
|
|
107
106
|
request.__setMockResponse({
|
|
108
107
|
body: Buffer.from('0x5f3759df', 'hex'),
|
|
109
108
|
})
|
|
110
|
-
getDescriptionObject('ds000001')([
|
|
109
|
+
const description = await getDescriptionObject('ds000001')([
|
|
111
110
|
{ filename: 'dataset_description.json', id: '12345' },
|
|
112
|
-
])
|
|
113
|
-
|
|
114
|
-
end()
|
|
115
|
-
})
|
|
111
|
+
])
|
|
112
|
+
expect(description).toEqual(defaultDescription)
|
|
116
113
|
})
|
|
117
|
-
it('works without a dataset_description.json being present',
|
|
118
|
-
getDescriptionObject('ds000001')([
|
|
114
|
+
it('works without a dataset_description.json being present', async () => {
|
|
115
|
+
const description = await getDescriptionObject('ds000001')([
|
|
119
116
|
{ filename: 'LICENSE', id: '12345' },
|
|
120
|
-
])
|
|
121
|
-
|
|
122
|
-
end()
|
|
123
|
-
})
|
|
117
|
+
])
|
|
118
|
+
expect(description).toEqual(defaultDescription)
|
|
124
119
|
})
|
|
125
120
|
})
|
|
@@ -6,7 +6,8 @@ import {
|
|
|
6
6
|
computeTotalSize,
|
|
7
7
|
} from '../files.js'
|
|
8
8
|
|
|
9
|
-
|
|
9
|
+
vi.mock('ioredis')
|
|
10
|
+
vi.mock('../../config.js')
|
|
10
11
|
|
|
11
12
|
const filename = 'sub-01/anat/sub-01_T1w.nii.gz'
|
|
12
13
|
|
|
@@ -106,15 +107,17 @@ describe('datalad files', () => {
|
|
|
106
107
|
})
|
|
107
108
|
})
|
|
108
109
|
describe('computeTotalSize()', () => {
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
110
|
+
it('computes the size correctly', () => {
|
|
111
|
+
const mockFileSizes = [
|
|
112
|
+
{ filename: 'README', size: 234 },
|
|
113
|
+
{ filename: 'dataset_description.json', size: 432 },
|
|
114
|
+
{ filename: 'sub-01/anat/sub-01_T1w.nii.gz', size: 10858 },
|
|
115
|
+
{
|
|
116
|
+
filename: 'sub-01/func/sub-01_task-onebacktask_run-01_bold.nii.gz',
|
|
117
|
+
size: 1945682,
|
|
118
|
+
},
|
|
119
|
+
]
|
|
120
|
+
expect(computeTotalSize(mockFileSizes)).toBe(1957206)
|
|
121
|
+
})
|
|
119
122
|
})
|
|
120
123
|
})
|
|
@@ -1,6 +1,8 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { vi } from 'vitest'
|
|
2
|
+
vi.mock('ioredis')
|
|
2
3
|
import * as pagination from '../pagination.js'
|
|
3
|
-
import { Types } from 'mongoose'
|
|
4
|
+
import { connect, Types } from 'mongoose'
|
|
5
|
+
import Dataset from '../../models/dataset'
|
|
4
6
|
const ObjectID = Types.ObjectId
|
|
5
7
|
|
|
6
8
|
const base64 = /^([A-Za-z0-9+/]{4})*([A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{2}==)?$/
|
|
@@ -35,31 +37,23 @@ describe('pagination model operations', () => {
|
|
|
35
37
|
})
|
|
36
38
|
})
|
|
37
39
|
describe('datasetsConnection()', () => {
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
uploader: 'f8d5a57c-879a-40e6-b151-e34c4a28ff70',
|
|
50
|
-
revision: '262a8e610e32b5766cbf669acc71911c1ece7126',
|
|
51
|
-
},
|
|
52
|
-
],
|
|
53
|
-
count: 1,
|
|
54
|
-
},
|
|
55
|
-
],
|
|
56
|
-
'aggregate',
|
|
57
|
-
)
|
|
40
|
+
beforeAll(async () => {
|
|
41
|
+
await connect(globalThis.__MONGO_URI__)
|
|
42
|
+
const ds = new Dataset({
|
|
43
|
+
_id: ObjectID('5bef51a1ed211400c08e5524'),
|
|
44
|
+
id: 'ds001001',
|
|
45
|
+
created: new Date('2018-11-16T23:24:17.203Z'),
|
|
46
|
+
modified: new Date('2018-11-16T23:24:25.050Z'),
|
|
47
|
+
uploader: 'f8d5a57c-879a-40e6-b151-e34c4a28ff70',
|
|
48
|
+
revision: '262a8e610e32b5766cbf669acc71911c1ece7126',
|
|
49
|
+
})
|
|
50
|
+
await ds.save()
|
|
58
51
|
})
|
|
59
52
|
it('returns a connection shaped result', async () => {
|
|
60
53
|
const res = await pagination.datasetsConnection({
|
|
61
54
|
orderBy: { created: 'ascending' },
|
|
62
55
|
limit: 5,
|
|
56
|
+
first: 10,
|
|
63
57
|
})([])
|
|
64
58
|
expect(res).toHaveProperty('pageInfo')
|
|
65
59
|
expect(res).toHaveProperty('edges')
|
|
@@ -1,39 +1,35 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { vi } from 'vitest'
|
|
2
|
+
vi.mock('ioredis')
|
|
2
3
|
import request from 'superagent'
|
|
3
4
|
import { createDataset } from '../dataset.js'
|
|
4
5
|
import { createSnapshot } from '../snapshots.js'
|
|
5
6
|
import { getDatasetWorker } from '../../libs/datalad-service'
|
|
7
|
+
import { connect } from 'mongoose'
|
|
6
8
|
|
|
7
9
|
// Mock requests to Datalad service
|
|
8
|
-
|
|
9
|
-
|
|
10
|
+
vi.mock('superagent')
|
|
11
|
+
vi.mock('../../libs/redis.js', () => ({
|
|
10
12
|
redis: {
|
|
11
|
-
del:
|
|
13
|
+
del: vi.fn(),
|
|
12
14
|
},
|
|
13
15
|
redlock: {
|
|
14
|
-
lock:
|
|
16
|
+
lock: vi.fn().mockImplementation(() => ({ unlock: vi.fn() })),
|
|
15
17
|
},
|
|
16
18
|
}))
|
|
17
19
|
// Mock draft files calls
|
|
18
|
-
|
|
20
|
+
vi.mock('../draft.js', () => ({
|
|
19
21
|
updateDatasetRevision: () => () => Promise.resolve(),
|
|
20
22
|
}))
|
|
21
|
-
|
|
22
|
-
|
|
23
|
+
vi.mock('../../config.js')
|
|
24
|
+
vi.mock('../../libs/notifications.js')
|
|
23
25
|
|
|
24
26
|
describe('snapshot model operations', () => {
|
|
25
27
|
describe('createSnapshot()', () => {
|
|
26
|
-
beforeEach(() => {
|
|
27
|
-
mockingoose.resetAll()
|
|
28
|
-
// Setup a default sequence value to return for each test
|
|
29
|
-
mockingoose.Counter.toReturn(
|
|
30
|
-
{ _id: 'dataset', sequence_value: 1 },
|
|
31
|
-
'findOne',
|
|
32
|
-
)
|
|
33
|
-
})
|
|
34
28
|
it('posts to the DataLad /datasets/{dsId}/snapshots/{snapshot} endpoint', async () => {
|
|
29
|
+
const user = { id: '1234' }
|
|
35
30
|
const tag = 'snapshot'
|
|
36
|
-
|
|
31
|
+
await connect(globalThis.__MONGO_URI__)
|
|
32
|
+
const { id: dsId } = await createDataset(user.id, user, {
|
|
37
33
|
affirmedDefaced: true,
|
|
38
34
|
affirmedConsent: true,
|
|
39
35
|
})
|
package/src/datalad/changelog.js
CHANGED
package/src/datalad/draft.js
CHANGED
package/src/datalad/readme.js
CHANGED
package/src/datalad/upload.js
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import config from '../config.js'
|
|
1
2
|
import { indexDataset, queryForIndex, indexingToken } from '@openneuro/search'
|
|
2
3
|
import { elasticClient } from './elastic-client'
|
|
3
4
|
import {
|
|
@@ -8,13 +9,12 @@ import {
|
|
|
8
9
|
} from '@apollo/client'
|
|
9
10
|
import { setContext } from '@apollo/client/link/context'
|
|
10
11
|
import { HttpLink } from '@apollo/client/link/http'
|
|
11
|
-
import fetch from 'node-fetch'
|
|
12
12
|
|
|
13
13
|
/**
|
|
14
14
|
* Setup SchemaLink based client for querying
|
|
15
15
|
*/
|
|
16
16
|
export const schemaLinkClient = (): ApolloClient<NormalizedCacheObject> => {
|
|
17
|
-
const accessToken = indexingToken()
|
|
17
|
+
const accessToken = indexingToken(config.auth.jwt.secret)
|
|
18
18
|
const authLink = setContext((_, { headers }) => {
|
|
19
19
|
return {
|
|
20
20
|
headers: {
|
|
@@ -25,7 +25,6 @@ export const schemaLinkClient = (): ApolloClient<NormalizedCacheObject> => {
|
|
|
25
25
|
})
|
|
26
26
|
const httpLink = new HttpLink({
|
|
27
27
|
uri: process.env.GRAPHQL_URI,
|
|
28
|
-
fetch,
|
|
29
28
|
})
|
|
30
29
|
return new ApolloClient({
|
|
31
30
|
link: from([authLink, httpLink]),
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
//
|
|
1
|
+
// Vitest Snapshot v1
|
|
2
2
|
|
|
3
|
-
exports[`resolver permissions helpers checkDatasetAdmin() resolves to false for anonymous users 1`] = `"You do not have admin access to this dataset."`;
|
|
3
|
+
exports[`resolver permissions helpers > checkDatasetAdmin() > resolves to false for anonymous users 1`] = `"You do not have admin access to this dataset."`;
|
|
4
4
|
|
|
5
|
-
exports[`resolver permissions helpers checkDatasetWrite() resolves to false for anonymous users 1`] = `"You do not have access to modify this dataset."`;
|
|
5
|
+
exports[`resolver permissions helpers > checkDatasetWrite() > resolves to false for anonymous users 1`] = `"You do not have access to modify this dataset."`;
|
|
@@ -1,50 +1,37 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { vi } from 'vitest'
|
|
2
|
+
import { connect } from 'mongoose'
|
|
2
3
|
import { deleteComment, flatten } from '../resolvers/comment'
|
|
3
4
|
import Comment from '../../models/comment'
|
|
4
5
|
|
|
5
|
-
|
|
6
|
-
hex = hex.toString()
|
|
7
|
-
const splitLen = 14
|
|
8
|
-
const start = hex.substr(0, splitLen)
|
|
9
|
-
const end = hex.substr(splitLen)
|
|
10
|
-
let dec = parseInt(end, 16)
|
|
11
|
-
dec++
|
|
12
|
-
return start + dec.toString(16)
|
|
13
|
-
}
|
|
6
|
+
vi.mock('ioredis')
|
|
14
7
|
|
|
15
8
|
describe('comment resolver helpers', () => {
|
|
16
9
|
describe('deleteComment', () => {
|
|
17
10
|
let aId
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
}
|
|
30
|
-
commentA = {
|
|
11
|
+
const adminUser = {
|
|
12
|
+
user: '1234',
|
|
13
|
+
userInfo: { admin: true },
|
|
14
|
+
}
|
|
15
|
+
const nonAdminUser = {
|
|
16
|
+
user: '5678',
|
|
17
|
+
userInfo: { admin: false },
|
|
18
|
+
}
|
|
19
|
+
beforeAll(async () => {
|
|
20
|
+
await connect(globalThis.__MONGO_URI__)
|
|
21
|
+
const comment = new Comment({
|
|
31
22
|
text: 'a',
|
|
32
23
|
createDate: new Date().toISOString(),
|
|
33
|
-
user: {
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
aId =
|
|
37
|
-
mockingoose.Comment.toReturn([], 'find')
|
|
24
|
+
user: { id: '5678' },
|
|
25
|
+
})
|
|
26
|
+
await comment.save()
|
|
27
|
+
aId = comment.id
|
|
38
28
|
})
|
|
39
29
|
|
|
40
30
|
it('returns an array of the deleted comment ids', async () => {
|
|
41
31
|
const deletedIds = (
|
|
42
32
|
await deleteComment({}, { commentId: aId }, adminUser)
|
|
43
33
|
).map(id => id.toString())
|
|
44
|
-
|
|
45
|
-
// mockingoose ids are sequential, and the documents returned from each query get new ids for some reason
|
|
46
|
-
// TODO: Replace mockingoose with better library
|
|
47
|
-
expect(deletedIds[0].slice(-5)).toEqual(incrementHex(aId).slice(-5))
|
|
34
|
+
expect(deletedIds[0]).toEqual(aId)
|
|
48
35
|
})
|
|
49
36
|
|
|
50
37
|
it('prevents non-admin users from deleting comments', () => {
|
|
@@ -55,11 +42,13 @@ describe('comment resolver helpers', () => {
|
|
|
55
42
|
})
|
|
56
43
|
|
|
57
44
|
describe('flatten', () => {
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
45
|
+
it('unrolls an array', () => {
|
|
46
|
+
const arrarr = [
|
|
47
|
+
[1, 2, 3],
|
|
48
|
+
[4, 5],
|
|
49
|
+
]
|
|
50
|
+
const arr = flatten(arrarr)
|
|
51
|
+
expect(arr).toEqual([1, 2, 3, 4, 5])
|
|
52
|
+
})
|
|
64
53
|
})
|
|
65
54
|
})
|
|
@@ -1,6 +1,9 @@
|
|
|
1
|
+
import { vi } from 'vitest'
|
|
1
2
|
import { HasId } from '../../../utils/datasetOrSnapshot'
|
|
2
3
|
import { brainlifeQuery } from '../brainlife'
|
|
3
4
|
|
|
5
|
+
vi.mock('ioredis')
|
|
6
|
+
|
|
4
7
|
describe('brainlife resolvers', () => {
|
|
5
8
|
it('correctly queries drafts', () => {
|
|
6
9
|
expect(brainlifeQuery({ id: 'ds000001' } as HasId).toString()).toEqual(
|
|
@@ -4,8 +4,9 @@ import {
|
|
|
4
4
|
elasticRelayConnection,
|
|
5
5
|
} from '../dataset-search'
|
|
6
6
|
|
|
7
|
-
|
|
8
|
-
|
|
7
|
+
vi.mock('ioredis')
|
|
8
|
+
vi.mock('../../../elasticsearch/elastic-client.js')
|
|
9
|
+
vi.mock('../../../config.js')
|
|
9
10
|
|
|
10
11
|
describe('dataset search resolvers', () => {
|
|
11
12
|
describe('encodeCursor()', () => {
|
|
@@ -46,14 +47,14 @@ describe('dataset search resolvers', () => {
|
|
|
46
47
|
},
|
|
47
48
|
}
|
|
48
49
|
const connection = await elasticRelayConnection(emptyApiResponse, {
|
|
49
|
-
dataset:
|
|
50
|
+
dataset: vi.fn(),
|
|
50
51
|
})
|
|
51
52
|
expect(connection).toMatchObject(nullRelayConnection)
|
|
52
53
|
})
|
|
53
54
|
|
|
54
55
|
it('returns a relay cursor for ApiResponse with results', () => {
|
|
55
56
|
const mockResolvers = {
|
|
56
|
-
dataset:
|
|
57
|
+
dataset: vi.fn(),
|
|
57
58
|
}
|
|
58
59
|
|
|
59
60
|
const expectedApiResponse = {
|
|
@@ -1,19 +1,16 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { vi } from 'vitest'
|
|
2
|
+
import { connect } from 'mongoose'
|
|
2
3
|
import request from 'superagent'
|
|
3
4
|
import * as ds from '../dataset'
|
|
4
5
|
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
6
|
+
vi.mock('superagent')
|
|
7
|
+
vi.mock('ioredis')
|
|
8
|
+
vi.mock('../../../config.js')
|
|
9
|
+
vi.mock('../../../libs/notifications.js')
|
|
9
10
|
|
|
10
11
|
describe('dataset resolvers', () => {
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
mockingoose.Counter.toReturn(
|
|
14
|
-
{ _id: 'dataset', sequence_value: 1 },
|
|
15
|
-
'findOne',
|
|
16
|
-
)
|
|
12
|
+
beforeAll(() => {
|
|
13
|
+
connect(globalThis.__MONGO_URI__)
|
|
17
14
|
})
|
|
18
15
|
describe('createDataset()', () => {
|
|
19
16
|
it('createDataset mutation succeeds', async () => {
|
|
@@ -145,12 +142,9 @@ describe('dataset resolvers', () => {
|
|
|
145
142
|
})
|
|
146
143
|
describe('deleteFiles', () => {
|
|
147
144
|
beforeEach(() => {
|
|
148
|
-
mockingoose.resetAll()
|
|
149
145
|
request.post.mockClear()
|
|
150
146
|
})
|
|
151
|
-
it('makes correct delete call to datalad',
|
|
152
|
-
// pass checkDatasetExists()
|
|
153
|
-
mockingoose.Dataset.toReturn(true, 'count')
|
|
147
|
+
it('makes correct delete call to datalad', () => {
|
|
154
148
|
// capture and check datalad delete request
|
|
155
149
|
request.del = url => ({
|
|
156
150
|
set: (header1, headerValue1) => ({
|
|
@@ -165,7 +159,7 @@ describe('dataset resolvers', () => {
|
|
|
165
159
|
}),
|
|
166
160
|
})
|
|
167
161
|
|
|
168
|
-
ds.deleteFiles(
|
|
162
|
+
return ds.deleteFiles(
|
|
169
163
|
null,
|
|
170
164
|
{ datasetId: 'ds999999', files: [{ path: '/sub-99' }] },
|
|
171
165
|
{
|
|
@@ -175,7 +169,7 @@ describe('dataset resolvers', () => {
|
|
|
175
169
|
admin: true,
|
|
176
170
|
},
|
|
177
171
|
},
|
|
178
|
-
)
|
|
172
|
+
)
|
|
179
173
|
})
|
|
180
174
|
})
|
|
181
175
|
})
|
|
@@ -1,12 +1,15 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { vi } from 'vitest'
|
|
2
2
|
import { importRemoteDataset, allowedImportUrl } from '../importRemoteDataset'
|
|
3
|
-
import
|
|
3
|
+
import createFetchMock from 'vitest-fetch-mock'
|
|
4
4
|
|
|
5
|
-
|
|
6
|
-
|
|
5
|
+
vi.mock('ioredis')
|
|
6
|
+
vi.mock('../../../config')
|
|
7
|
+
vi.mock('../../permissions')
|
|
7
8
|
|
|
8
9
|
describe('importRemoteDataset mutation', () => {
|
|
9
10
|
it('given a user with access, it creates an import record for later processing', () => {
|
|
11
|
+
const fetchMock = createFetchMock(vi)
|
|
12
|
+
fetchMock.doMock()
|
|
10
13
|
fetchMock.mockOnce(JSON.stringify(true))
|
|
11
14
|
importRemoteDataset(
|
|
12
15
|
{},
|
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
import { updatePermissions } from '../permissions'
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
vi.mock('ioredis')
|
|
4
|
+
vi.mock('../../permissions', () => ({
|
|
4
5
|
checkDatasetAdmin: async () => Promise.resolve(),
|
|
5
6
|
}))
|
|
6
7
|
|
|
7
|
-
const mockExec =
|
|
8
|
+
const mockExec = vi.fn()
|
|
8
9
|
|
|
9
|
-
|
|
10
|
+
vi.mock('../../../models/user', () => ({
|
|
10
11
|
find: () => ({
|
|
11
12
|
exec: mockExec,
|
|
12
13
|
}),
|
|
@@ -1,6 +1,8 @@
|
|
|
1
|
-
jest.mock('../../../config')
|
|
2
1
|
import { matchKnownObjects, filterLatestSnapshot } from '../snapshots.js'
|
|
3
2
|
|
|
3
|
+
vi.mock('ioredis')
|
|
4
|
+
vi.mock('../../../config.js')
|
|
5
|
+
|
|
4
6
|
describe('snapshot resolvers', () => {
|
|
5
7
|
describe('matchKnownObjects()', () => {
|
|
6
8
|
it('should return NDA as a source when given a description containing 10.15154 DOIs', () => {
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import elasticClient from '../../elasticsearch/elastic-client'
|
|
1
|
+
import { elasticClient } from '../../elasticsearch/elastic-client'
|
|
2
2
|
import { dataset } from './dataset'
|
|
3
3
|
import Star from '../../models/stars'
|
|
4
4
|
import Subscription from '../../models/subscription'
|
|
@@ -203,7 +203,15 @@ const parseQuery = async (query, datasetType, datasetStatus, userId) => {
|
|
|
203
203
|
*/
|
|
204
204
|
export const advancedDatasetSearchConnection = async (
|
|
205
205
|
obj,
|
|
206
|
-
{
|
|
206
|
+
{
|
|
207
|
+
query,
|
|
208
|
+
allDatasets = false,
|
|
209
|
+
datasetType,
|
|
210
|
+
datasetStatus,
|
|
211
|
+
sortBy,
|
|
212
|
+
after,
|
|
213
|
+
first = 25,
|
|
214
|
+
},
|
|
207
215
|
{ user, userInfo },
|
|
208
216
|
) => {
|
|
209
217
|
const searchId = hashObject({
|
|
@@ -1,39 +1,26 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { vi } from 'vitest'
|
|
2
|
+
import { connect } from 'mongoose'
|
|
2
3
|
import { getAccessionNumber } from '../dataset.js'
|
|
3
4
|
|
|
5
|
+
vi.mock('ioredis')
|
|
6
|
+
|
|
4
7
|
describe('libs/dataset.js', () => {
|
|
5
8
|
describe('getAccessionNumber', () => {
|
|
6
|
-
|
|
7
|
-
|
|
9
|
+
beforeAll(() => {
|
|
10
|
+
connect(globalThis.__MONGO_URI__)
|
|
8
11
|
})
|
|
9
12
|
it('returns strings starting with "ds"', async () => {
|
|
10
|
-
mockingoose.Counter.toReturn(
|
|
11
|
-
{ _id: 'dataset', sequence_value: 2 },
|
|
12
|
-
'findOne',
|
|
13
|
-
)
|
|
14
13
|
const ds = await getAccessionNumber()
|
|
15
14
|
expect(ds.slice(0, 2)).toEqual('ds')
|
|
16
15
|
})
|
|
17
16
|
it('generates sequential numbers', async () => {
|
|
18
|
-
mockingoose.Counter.toReturn(
|
|
19
|
-
{ _id: 'dataset', sequence_value: 2 },
|
|
20
|
-
'findOne',
|
|
21
|
-
)
|
|
22
17
|
const first = await getAccessionNumber()
|
|
23
|
-
mockingoose.Counter.toReturn(
|
|
24
|
-
{ _id: 'dataset', sequence_value: 3 },
|
|
25
|
-
'findOne',
|
|
26
|
-
)
|
|
27
18
|
const second = await getAccessionNumber()
|
|
28
19
|
const fNum = parseInt(first.slice(2))
|
|
29
20
|
const sNum = parseInt(second.slice(2))
|
|
30
21
|
expect(fNum).toBeLessThan(sNum)
|
|
31
22
|
})
|
|
32
23
|
it('returns 6 digits for ds ids', async () => {
|
|
33
|
-
mockingoose.Counter.toReturn(
|
|
34
|
-
{ _id: 'dataset', sequence_value: 2 },
|
|
35
|
-
'findOne',
|
|
36
|
-
)
|
|
37
24
|
const ds = await getAccessionNumber()
|
|
38
25
|
const num = ds.slice(2)
|
|
39
26
|
expect(num).toHaveLength(6)
|
package/src/libs/counter.js
CHANGED
|
@@ -5,22 +5,17 @@ import Counter from '../models/counter'
|
|
|
5
5
|
*
|
|
6
6
|
* A helper library for getting persistent serial numbers.
|
|
7
7
|
*/
|
|
8
|
-
export
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
} else {
|
|
22
|
-
Counter.create({ _id: type, sequence_value: 1 }).then(callback(1))
|
|
23
|
-
}
|
|
24
|
-
})
|
|
25
|
-
},
|
|
8
|
+
export async function getNext(type) {
|
|
9
|
+
const found = await Counter.findOne({ _id: type }).exec()
|
|
10
|
+
if (found) {
|
|
11
|
+
await Counter.updateOne(
|
|
12
|
+
{ _id: type },
|
|
13
|
+
{ $inc: { sequence_value: 1 } },
|
|
14
|
+
).exec()
|
|
15
|
+
return found.sequence_value + 1
|
|
16
|
+
} else {
|
|
17
|
+
const counter = new Counter({ _id: type, sequence_value: 1 })
|
|
18
|
+
await counter.save()
|
|
19
|
+
return 1
|
|
20
|
+
}
|
|
26
21
|
}
|
package/src/libs/dataset.js
CHANGED
|
@@ -1,14 +1,9 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { getNext } from './counter'
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
4
|
* Returns the next accession number string
|
|
5
5
|
*/
|
|
6
|
-
export
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
const offset = 1000
|
|
10
|
-
datasetNumber += offset
|
|
11
|
-
resolve('ds' + ('000000' + datasetNumber).substr(-6, 6))
|
|
12
|
-
})
|
|
13
|
-
})
|
|
6
|
+
export async function getAccessionNumber() {
|
|
7
|
+
const datasetNumber = (await getNext('datasets')) + 1000
|
|
8
|
+
return `ds${('000000' + datasetNumber).substr(-6, 6)}`
|
|
14
9
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
//
|
|
1
|
+
// Vitest Snapshot v1
|
|
2
2
|
|
|
3
|
-
exports[`DOI minting utils template() accepts expected arguments 1`] = `
|
|
3
|
+
exports[`DOI minting utils > template() > accepts expected arguments 1`] = `
|
|
4
4
|
"<?xml version=\\"1.0\\" encoding=\\"UTF-8\\"?>
|
|
5
5
|
<resource xmlns:xsi=\\"http://www.w3.org/2001/XMLSchema-instance\\" xmlns=\\"http://datacite.org/schema/kernel-4\\" xsi:schemaLocation=\\"http://datacite.org/schema/kernel-4 http://schema.datacite.org/meta/kernel-4/metadata.xsd\\">
|
|
6
6
|
<identifier identifierType=\\"DOI\\">12345</identifier>
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
//
|
|
1
|
+
// Vitest Snapshot v1
|
|
2
2
|
|
|
3
|
-
exports[`email template -> comment created renders with expected arguments 1`] = `
|
|
3
|
+
exports[`email template -> comment created > renders with expected arguments 1`] = `
|
|
4
4
|
"<html>
|
|
5
5
|
<head>
|
|
6
6
|
<style>
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
//
|
|
1
|
+
// Vitest Snapshot v1
|
|
2
2
|
|
|
3
|
-
exports[`email template -> comment created renders with expected arguments 1`] = `
|
|
3
|
+
exports[`email template -> comment created > renders with expected arguments 1`] = `
|
|
4
4
|
"<html>
|
|
5
5
|
<head>
|
|
6
6
|
<style>
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
//
|
|
1
|
+
// Vitest Snapshot v1
|
|
2
2
|
|
|
3
|
-
exports[`email template -> comment created renders with expected arguments 1`] = `
|
|
3
|
+
exports[`email template -> comment created > renders with expected arguments 1`] = `
|
|
4
4
|
"<html>
|
|
5
5
|
<head>
|
|
6
6
|
<style>
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
//
|
|
1
|
+
// Vitest Snapshot v1
|
|
2
2
|
|
|
3
|
-
exports[`email template -> comment created renders with expected arguments 1`] = `
|
|
3
|
+
exports[`email template -> comment created > renders with expected arguments 1`] = `
|
|
4
4
|
"<html>
|
|
5
5
|
<head>
|
|
6
6
|
<style>
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
//
|
|
1
|
+
// Vitest Snapshot v1
|
|
2
2
|
|
|
3
|
-
exports[`email template -> comment created renders with expected arguments 1`] = `
|
|
3
|
+
exports[`email template -> comment created > renders with expected arguments 1`] = `
|
|
4
4
|
"<html>
|
|
5
5
|
<head>
|
|
6
6
|
<style>
|
|
@@ -1,14 +1,16 @@
|
|
|
1
|
+
import { vi } from 'vitest'
|
|
1
2
|
import IngestDataset from '../ingestDataset'
|
|
2
3
|
|
|
4
|
+
vi.mock('ioredis')
|
|
5
|
+
|
|
3
6
|
describe('IngestDataset model', () => {
|
|
4
|
-
it('IngestDataset model fails if required fields are missing',
|
|
7
|
+
it('IngestDataset model fails if required fields are missing', () => {
|
|
5
8
|
const model = new IngestDataset()
|
|
6
9
|
model.validate(result => {
|
|
7
10
|
expect(result.name).toEqual('ValidationError')
|
|
8
|
-
done()
|
|
9
11
|
})
|
|
10
12
|
})
|
|
11
|
-
it('IngestDataset model URL validation fails with a bad URL',
|
|
13
|
+
it('IngestDataset model URL validation fails with a bad URL', async () => {
|
|
12
14
|
const badUrlModel = new IngestDataset({
|
|
13
15
|
datasetId: 'ds00000',
|
|
14
16
|
userId: 'b3df6399-d1be-4e07-b997-9f7aa3ed1f8e',
|
|
@@ -16,12 +18,11 @@ describe('IngestDataset model', () => {
|
|
|
16
18
|
imported: false,
|
|
17
19
|
notified: false,
|
|
18
20
|
})
|
|
19
|
-
badUrlModel.validate(result => {
|
|
21
|
+
await badUrlModel.validate(result => {
|
|
20
22
|
expect(result.name).toEqual('ValidationError')
|
|
21
|
-
done()
|
|
22
23
|
})
|
|
23
24
|
})
|
|
24
|
-
it('IngestDataset model URL validation succeeds with a good URL',
|
|
25
|
+
it('IngestDataset model URL validation succeeds with a good URL', async () => {
|
|
25
26
|
const goodUrlModel = new IngestDataset({
|
|
26
27
|
datasetId: 'ds00000',
|
|
27
28
|
userId: 'b3df6399-d1be-4e07-b997-9f7aa3ed1f8e',
|
|
@@ -29,9 +30,8 @@ describe('IngestDataset model', () => {
|
|
|
29
30
|
imported: false,
|
|
30
31
|
notified: false,
|
|
31
32
|
})
|
|
32
|
-
goodUrlModel.validate(result => {
|
|
33
|
+
await goodUrlModel.validate(result => {
|
|
33
34
|
expect(result).toBe(undefined)
|
|
34
|
-
done()
|
|
35
35
|
})
|
|
36
36
|
})
|
|
37
37
|
})
|
|
@@ -1,8 +1,11 @@
|
|
|
1
|
+
import { vi } from 'vitest'
|
|
1
2
|
import {
|
|
2
3
|
datasetOrSnapshot,
|
|
3
4
|
getDatasetFromSnapshotId,
|
|
4
5
|
} from '../datasetOrSnapshot'
|
|
5
6
|
|
|
7
|
+
vi.mock('ioredis')
|
|
8
|
+
|
|
6
9
|
describe('datasetOrSnapshot()', () => {
|
|
7
10
|
it('resolves a dataset object correctly', () => {
|
|
8
11
|
const dataset = {
|
package/tsconfig.json
CHANGED
|
@@ -3,7 +3,8 @@
|
|
|
3
3
|
"compilerOptions": {
|
|
4
4
|
"rootDir": "./src",
|
|
5
5
|
"outDir": "./dist",
|
|
6
|
-
"tsBuildInfoFile": "../../.build-cache/server.tsbuildinfo"
|
|
6
|
+
"tsBuildInfoFile": "../../.build-cache/server.tsbuildinfo",
|
|
7
|
+
"types": ["vitest/globals"]
|
|
7
8
|
},
|
|
8
9
|
"include": ["./src"],
|
|
9
10
|
"files": ["./src/lerna.json"],
|
|
@@ -1,69 +0,0 @@
|
|
|
1
|
-
// mock for superagent - __mocks__/superagent.js
|
|
2
|
-
class MockResponse {
|
|
3
|
-
status() {
|
|
4
|
-
return 200
|
|
5
|
-
}
|
|
6
|
-
ok() {
|
|
7
|
-
return true
|
|
8
|
-
}
|
|
9
|
-
body() {
|
|
10
|
-
return {}
|
|
11
|
-
}
|
|
12
|
-
get() {
|
|
13
|
-
return jest.fn()
|
|
14
|
-
}
|
|
15
|
-
toError() {
|
|
16
|
-
return jest.fn()
|
|
17
|
-
}
|
|
18
|
-
}
|
|
19
|
-
|
|
20
|
-
const createRequestStub = obj => jest.fn(() => obj)
|
|
21
|
-
|
|
22
|
-
function Request() {
|
|
23
|
-
this.mockResponse = new MockResponse()
|
|
24
|
-
this.mockDelay = null
|
|
25
|
-
this.mockError = null
|
|
26
|
-
|
|
27
|
-
this.post = createRequestStub(this)
|
|
28
|
-
this.get = createRequestStub(this)
|
|
29
|
-
this.send = createRequestStub(this)
|
|
30
|
-
this.query = createRequestStub(this)
|
|
31
|
-
this.field = createRequestStub(this)
|
|
32
|
-
this.set = createRequestStub(this)
|
|
33
|
-
this.accept = createRequestStub(this)
|
|
34
|
-
this.timeout = createRequestStub(this)
|
|
35
|
-
this.then = cb => {
|
|
36
|
-
return new Promise((resolve, reject) => {
|
|
37
|
-
if (this.mockError) {
|
|
38
|
-
return reject(this.mockError)
|
|
39
|
-
}
|
|
40
|
-
return resolve(cb(this.mockResponse))
|
|
41
|
-
})
|
|
42
|
-
}
|
|
43
|
-
this.end = jest.fn().mockImplementation(callback => {
|
|
44
|
-
if (this.mockDelay) {
|
|
45
|
-
this.delayTimer = setTimeout(
|
|
46
|
-
callback,
|
|
47
|
-
0,
|
|
48
|
-
this.mockError,
|
|
49
|
-
this.mockResponse,
|
|
50
|
-
)
|
|
51
|
-
|
|
52
|
-
return
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
callback(this.mockError, this.mockResponse)
|
|
56
|
-
})
|
|
57
|
-
//expose helper methods for tests to set
|
|
58
|
-
this.__setMockDelay = boolValue => {
|
|
59
|
-
this.mockDelay = boolValue
|
|
60
|
-
}
|
|
61
|
-
this.__setMockResponse = mockRes => {
|
|
62
|
-
this.mockResponse = mockRes
|
|
63
|
-
}
|
|
64
|
-
this.__setMockError = mockErr => {
|
|
65
|
-
this.mockError = mockErr
|
|
66
|
-
}
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
module.exports = new Request()
|