@openneuro/server 4.3.2 → 4.4.0-alpha.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@openneuro/server",
3
- "version": "4.3.2",
3
+ "version": "4.4.0-alpha.4",
4
4
  "description": "Core service for the OpenNeuro platform.",
5
5
  "license": "MIT",
6
6
  "main": "src/server.js",
@@ -42,6 +42,7 @@
42
42
  "graphql-tools": "4.0.6",
43
43
  "immutable": "^3.8.2",
44
44
  "ioredis": "4.17.3",
45
+ "jest-fetch-mock": "^3.0.3",
45
46
  "jsdom": "^11.6.2",
46
47
  "jsonwebtoken": "^8.3.0",
47
48
  "mime-types": "^2.1.19",
@@ -49,7 +50,8 @@
49
50
  "mongoose": "5.12.3",
50
51
  "morgan": "^1.6.1",
51
52
  "node-fetch": "^2.6.0",
52
- "nodemailer": "6.4.16",
53
+ "nodemailer": "^6.7.2",
54
+ "nodemailer-mailjet-transport": "^1.0.4",
53
55
  "object-hash": "2.1.1",
54
56
  "passport": "^0.4.0",
55
57
  "passport-globus": "^0.0.1",
@@ -103,5 +105,5 @@
103
105
  "publishConfig": {
104
106
  "access": "public"
105
107
  },
106
- "gitHead": "48f3428ae8bd2f596291c780c0542c0beeb929a7"
108
+ "gitHead": "e6fae8f69ec77f67577425dd839c27a36cce409e"
107
109
  }
package/src/config.js CHANGED
@@ -46,6 +46,7 @@ const config = {
46
46
  user: process.env.CRN_SERVER_MAIL_USER,
47
47
  url: process.env.CRN_SERVER_MAIL_URL,
48
48
  pass: process.env.CRN_SERVER_MAIL_PASS,
49
+ from: process.env.CRN_SERVER_MAIL_FROM,
49
50
  },
50
51
  },
51
52
  sentry: {
@@ -95,10 +95,16 @@ describe('dataset model operations', () => {
95
95
  expect(testBlacklist('.git', 'HEAD')).toBe(true)
96
96
  })
97
97
  it('returns true for root level .DS_Store files', () => {
98
- expect(testBlacklist('', '.DS_Store'))
98
+ expect(testBlacklist('', '.DS_Store')).toBe(true)
99
99
  })
100
100
  it('returns true for nested .DS_Store files', () => {
101
- expect(testBlacklist('sub-01/anat/', '.DS_Store'))
101
+ expect(testBlacklist('sub-01/anat/', '.DS_Store')).toBe(true)
102
+ })
103
+ // https://github.com/OpenNeuroOrg/openneuro/issues/2519
104
+ it('skips ._ prefixed files created by macOS', () => {
105
+ expect(testBlacklist('', '._.DS_Store')).toBe(true)
106
+ expect(testBlacklist('stimuli/', '._1002.png')).toBe(true)
107
+ expect(testBlacklist('stimuli/', 'test._1002.png')).toBe(false)
102
108
  })
103
109
  })
104
110
  })
@@ -298,7 +298,7 @@ export const getDatasets = options => {
298
298
  }
299
299
 
300
300
  // Files to skip in uploads
301
- const filenameBlacklist = new RegExp(/.DS_Store|Icon\r/)
301
+ const filenameBlacklist = new RegExp(/.DS_Store|Icon\r|^\._/)
302
302
  const pathBlacklist = new RegExp(/^.git|^.gitattributes|^.datalad|^.heudiconv/)
303
303
  export const testBlacklist = (path, filename) =>
304
304
  filenameBlacklist.test(filename) || pathBlacklist.test(path)
@@ -505,7 +505,7 @@ export const getDatasetAnalytics = (datasetId, tag) => {
505
505
  export const getStars = datasetId => Star.find({ datasetId })
506
506
 
507
507
  export const getUserStarred = (datasetId, userId) =>
508
- Star.count({ datasetId, userId }).exec()
508
+ Star.countDocuments({ datasetId, userId }).exec()
509
509
 
510
510
  export const getFollowers = datasetId => {
511
511
  return Subscription.find({
@@ -28,7 +28,7 @@ const lockSnapshot = (datasetId, tag) => {
28
28
  }
29
29
 
30
30
  const createSnapshotMetadata = (datasetId, tag, hexsha, created) => {
31
- return Snapshot.update(
31
+ return Snapshot.updateOne(
32
32
  { datasetId: datasetId, tag: tag },
33
33
  {
34
34
  $set: {
@@ -50,7 +50,7 @@ export const checkPermissionLevel = (permission, state) => {
50
50
  }
51
51
 
52
52
  export const checkDatasetExists = async datasetId => {
53
- const found = await Dataset.count({ id: datasetId }).exec()
53
+ const found = await Dataset.countDocuments({ id: datasetId }).exec()
54
54
  if (!found) throw new Error(`Dataset ${datasetId} does not exist.`)
55
55
  }
56
56
 
@@ -0,0 +1,41 @@
1
+ import fetchMock from 'jest-fetch-mock'
2
+ import { importRemoteDataset, allowedImportUrl } from '../importRemoteDataset'
3
+ import { checkDatasetWrite } from '../../permissions'
4
+
5
+ jest.mock('../../../config')
6
+ jest.mock('../../permissions')
7
+
8
+ describe('importRemoteDataset mutation', () => {
9
+ it('given a user with access, it creates an import record for later processing', () => {
10
+ fetchMock.mockOnce(JSON.stringify(true))
11
+ importRemoteDataset(
12
+ {},
13
+ { datasetId: 'ds000000', url: '' },
14
+ { user: '1234', userInfo: { admin: true } },
15
+ )
16
+ })
17
+ describe('allowedImportUrl()', () => {
18
+ it('allows brainlife.io', () => {
19
+ expect(
20
+ allowedImportUrl('https://brainlife.io/ezbids/dataset-to-import.zip'),
21
+ ).toBe(true)
22
+ })
23
+ it('allows a test bucket for OpenNeuro use', () => {
24
+ expect(
25
+ allowedImportUrl(
26
+ 'https://openneuro-test-import-bucket.s3.us-west-2.amazonaws.com/ds000003.zip',
27
+ ),
28
+ ).toBe(true)
29
+ })
30
+ it('does not allow other URLs', () => {
31
+ expect(allowedImportUrl('https://openneuro.org')).toBe(false)
32
+ expect(allowedImportUrl('iiajsdfoijawe')).toBe(false)
33
+ expect(allowedImportUrl('http://google.com/some-zip-file.zip')).toBe(
34
+ false,
35
+ )
36
+ expect(
37
+ allowedImportUrl('http://github.com/brainlife.io/somewhere-else.zip'),
38
+ ).toBe(false)
39
+ })
40
+ })
41
+ })
@@ -83,7 +83,7 @@ export const datasetName = obj => {
83
83
  */
84
84
  export const updateDatasetName = datasetId =>
85
85
  datasetName({ id: datasetId }).then(name =>
86
- DatasetModel.update({ id: datasetId }, { $set: { name } }).exec(),
86
+ DatasetModel.updateOne({ id: datasetId }, { $set: { name } }).exec(),
87
87
  )
88
88
 
89
89
  /**
@@ -0,0 +1,81 @@
1
+ import fetch from 'node-fetch'
2
+ import IngestDataset from '../../models/ingestDataset'
3
+ import { checkDatasetWrite } from '../permissions.js'
4
+ import { getDatasetWorker } from '../../libs/datalad-service'
5
+ import { generateDataladCookie } from '../../libs/authentication/jwt'
6
+ import notifications from '../../libs/notifications'
7
+ import config from '../../config'
8
+
9
+ /**
10
+ * Test if a URL is allowed to be imported
11
+ * @param raw String URL
12
+ * @returns {boolean}
13
+ */
14
+ export function allowedImportUrl(raw: string): boolean {
15
+ let url
16
+ try {
17
+ url = new URL(raw)
18
+ } catch (_) {
19
+ return false
20
+ }
21
+ if (url.hostname === 'brainlife.io') {
22
+ return true
23
+ } else if (
24
+ url.hostname === 'openneuro-test-import-bucket.s3.us-west-2.amazonaws.com'
25
+ ) {
26
+ return true
27
+ } else {
28
+ return false
29
+ }
30
+ }
31
+
32
+ /**
33
+ * Queue a bundle of files for import into an existing dataset
34
+ */
35
+ export async function importRemoteDataset(
36
+ _: Record<string, unknown>,
37
+ { datasetId, url }: { datasetId: string; url: string },
38
+ { user, userInfo }: { user: string; userInfo: Record<string, unknown> },
39
+ ): Promise<string | null> {
40
+ await checkDatasetWrite(datasetId, user, userInfo)
41
+ if (!allowedImportUrl(url)) {
42
+ return
43
+ }
44
+ const ingest = new IngestDataset({ datasetId, url, userId: user })
45
+ // undefined validateSync() means no errors
46
+ if (ingest.validateSync() == undefined) {
47
+ await ingest.save()
48
+ const importId = ingest._id.toString()
49
+ const worker = getDatasetWorker(datasetId)
50
+ const importUrl = `http://${worker}/datasets/${datasetId}/import/${importId}`
51
+ await fetch(importUrl, {
52
+ method: 'POST',
53
+ headers: {
54
+ 'Content-Type': 'application/json',
55
+ Cookie: generateDataladCookie(config)(userInfo),
56
+ },
57
+ body: JSON.stringify({ url }),
58
+ })
59
+ return ingest._id.toString()
60
+ } else {
61
+ return
62
+ }
63
+ }
64
+
65
+ export async function finishImportRemoteDataset(
66
+ _: Record<string, unknown>,
67
+ { id, success, message }: { id: string; success: boolean; message: string },
68
+ { user, userInfo }: { user: string; userInfo: Record<string, unknown> },
69
+ ): Promise<boolean> {
70
+ const ingest = await IngestDataset.findById(id)
71
+ ingest.imported = success
72
+ await ingest.save()
73
+ await notifications.datasetImported(
74
+ ingest.datasetId,
75
+ ingest.userId,
76
+ success,
77
+ message,
78
+ ingest.url,
79
+ )
80
+ return true
81
+ }
@@ -36,6 +36,10 @@ import { reexportRemotes } from './reexporter'
36
36
  import { resetDraft } from './reset'
37
37
  import { createReviewer, deleteReviewer } from './reviewer'
38
38
  import { createRelation, deleteRelation } from './relation'
39
+ import {
40
+ importRemoteDataset,
41
+ finishImportRemoteDataset,
42
+ } from './importRemoteDataset'
39
43
 
40
44
  const Mutation = {
41
45
  createDataset,
@@ -79,6 +83,8 @@ const Mutation = {
79
83
  deleteReviewer,
80
84
  createRelation,
81
85
  deleteRelation,
86
+ importRemoteDataset,
87
+ finishImportRemoteDataset,
82
88
  }
83
89
 
84
90
  export default Mutation
@@ -187,6 +187,10 @@ export const typeDefs = `
187
187
  createRelation(datasetId: ID!, doi: String!, relation: RelatedObjectRelation!, kind: RelatedObjectKind!, description: String): Dataset
188
188
  # Remove a relationship to an external DOI
189
189
  deleteRelation(datasetId: ID!, doi: String!): Dataset
190
+ # Submit an import for a remote dataset, returns id if the URL is accepted for import
191
+ importRemoteDataset(datasetId: ID!, url: String!): ID
192
+ # Finish and notify import is done, returns true if successful
193
+ finishImportRemoteDataset(id: ID!, success: Boolean!, message: String): Boolean
190
194
  }
191
195
 
192
196
  # Anonymous dataset reviewer
@@ -15,7 +15,7 @@ export default {
15
15
  getNext(type, callback) {
16
16
  Counter.findOne({ _id: type }).then(found => {
17
17
  if (found) {
18
- Counter.update({ _id: type }, { $inc: { sequence_value: 1 } }).then(
18
+ Counter.updateOne({ _id: type }, { $inc: { sequence_value: 1 } }).then(
19
19
  callback(found.sequence_value + 1),
20
20
  )
21
21
  } else {
@@ -26,7 +26,7 @@ export const send = (
26
26
 
27
27
  // configure mail options
28
28
  const mailOptions = {
29
- from: '"OpenNeuro" <notifications@openneuro.org>',
29
+ from: `"OpenNeuro" <${config.notifications.email.from}>`,
30
30
  replyTo: from,
31
31
  to: email.to,
32
32
  subject: email.subject,
@@ -0,0 +1,93 @@
1
+ export const datasetImportFailed = ({
2
+ siteUrl,
3
+ name,
4
+ datasetId,
5
+ message,
6
+ retryUrl,
7
+ }: {
8
+ siteUrl: string
9
+ name: string
10
+ datasetId: string
11
+ message: string
12
+ retryUrl: string
13
+ }): string => `<html>
14
+ <head>
15
+ <style>
16
+ body {
17
+ font-family: 'Open Sans', sans-serif;
18
+ font-weight: lighter;
19
+ background: #F5F5F5;
20
+ }
21
+ footer {
22
+ border-top: 1px solid #333;
23
+ padding-top: 15px;
24
+ background: #F5F5F5;
25
+ }
26
+ .link {
27
+ color: #00505c
28
+ }
29
+ .link:hover {
30
+ color: #0093a9
31
+ }
32
+ .top-bar {
33
+ width: 100%;
34
+ background: #333;
35
+ padding: 8px 0px 8px 15px;
36
+ }
37
+ .content {
38
+ padding: 15px;
39
+ }
40
+ p {
41
+ font-size: 16px;
42
+ font-weight: lighter;
43
+ }
44
+ b {
45
+ font-weight: bold;
46
+ }
47
+ .dataset-link {
48
+ display: inline-block;
49
+ background: #008599;
50
+ color: #FFF;
51
+ font-size: 20px;
52
+ padding: 8px 15px;
53
+ text-decoration: none;
54
+ cursor: pointer;
55
+ }
56
+ .comment {
57
+ border: 1px solid #ccc;
58
+ padding: 15px;
59
+ }
60
+ .log {
61
+ white-space: pre-wrap;
62
+ }
63
+ .FAILED {color: #d9534f;}
64
+ .FINISHED {color: #5cb85c;}
65
+ </style>
66
+ </head>
67
+ <body>
68
+ <div class="top-bar">
69
+ <img src="${siteUrl}/assets/CRN-Logo-Placeholder.png" />
70
+ </div>
71
+ <div class="content">
72
+ <h2>Hi, ${name}</h2>
73
+
74
+ <p>
75
+ A dataset imported you requested failed. It was imported as <b>${datasetId}</b>.
76
+ </p>
77
+
78
+ <div>
79
+ <a class='dataset-link' href="${siteUrl}/import?url=${retryUrl}&datasetId=${datasetId}">Click here to retry the import &raquo;</a>
80
+ </div>
81
+
82
+ <div>
83
+ <h4>Import Log</h4>
84
+ <p class="log">${message}</p>
85
+ </div>
86
+
87
+ <p>
88
+ Sincerely,
89
+ The CRN Team
90
+ </p>
91
+ </div>
92
+ </body>
93
+ <html>`
@@ -0,0 +1,84 @@
1
+ export const datasetImportEmail = ({
2
+ siteUrl,
3
+ name,
4
+ datasetId,
5
+ }: {
6
+ siteUrl: string
7
+ name: string
8
+ datasetId: string
9
+ }): string => `<html>
10
+ <head>
11
+ <style>
12
+ body {
13
+ font-family: 'Open Sans', sans-serif;
14
+ font-weight: lighter;
15
+ background: #F5F5F5;
16
+ }
17
+ footer {
18
+ border-top: 1px solid #333;
19
+ padding-top: 15px;
20
+ background: #F5F5F5;
21
+ }
22
+ .link {
23
+ color: #00505c
24
+ }
25
+ .link:hover {
26
+ color: #0093a9
27
+ }
28
+ .top-bar {
29
+ width: 100%;
30
+ background: #333;
31
+ padding: 8px 0px 8px 15px;
32
+ }
33
+ .content {
34
+ padding: 15px;
35
+ }
36
+ p {
37
+ font-size: 16px;
38
+ font-weight: lighter;
39
+ }
40
+ b {
41
+ font-weight: bold;
42
+ }
43
+ .dataset-link {
44
+ display: inline-block;
45
+ background: #008599;
46
+ color: #FFF;
47
+ font-size: 20px;
48
+ padding: 8px 15px;
49
+ text-decoration: none;
50
+ cursor: pointer;
51
+ }
52
+ .comment {
53
+ border: 1px solid #ccc;
54
+ padding: 15px;
55
+ }
56
+ .log {
57
+ white-space: pre-wrap;
58
+ }
59
+ .FAILED {color: #d9534f;}
60
+ .FINISHED {color: #5cb85c;}
61
+ </style>
62
+ </head>
63
+ <body>
64
+ <div class="top-bar">
65
+ <img src="${siteUrl}/assets/CRN-Logo-Placeholder.png" />
66
+ </div>
67
+ <div class="content">
68
+ <h2>Hi, ${name}</h2>
69
+
70
+ <p>
71
+ A dataset imported you requested has finished. It was imported as <b>${datasetId}</b>.
72
+ </p>
73
+
74
+ <div>
75
+ <a class='dataset-link' href="${siteUrl}/datasets/${datasetId}">Click here to view the dataset on OpenNeuro &raquo;</a>
76
+ </div>
77
+
78
+ <p>
79
+ Sincerely,
80
+ The CRN Team
81
+ </p>
82
+ </div>
83
+ </body>
84
+ <html>`
@@ -1,13 +1,9 @@
1
1
  /*eslint no-console: ["error", { allow: ["log"] }] */
2
- import toDate from 'date-fns/toDate'
3
- import subHours from 'date-fns/subHours'
4
2
  import config from '../config'
5
3
  import { send as emailSend } from './email'
6
4
  import request from 'superagent'
7
- import Notification from '../models/notification'
8
5
  import User from '../models/user'
9
6
  import Subscription from '../models/subscription'
10
- import MailgunIdentifier from '../models/mailgunIdentifier'
11
7
  import moment from 'moment'
12
8
  import url from 'url'
13
9
  import bidsId from './bidsId'
@@ -19,6 +15,8 @@ import { datasetDeleted } from '../libs/email/templates/dataset-deleted'
19
15
  import { ownerUnsubscribed } from '../libs/email/templates/owner-unsubscribed'
20
16
  import { snapshotCreated } from '../libs/email/templates/snapshot-created'
21
17
  import { snapshotReminder } from '../libs/email/templates/snapshot-reminder'
18
+ import { datasetImportEmail } from '../libs/email/templates/dataset-imported'
19
+ import { datasetImportFailed } from '../libs/email/templates/dataset-import-failed'
22
20
 
23
21
  function noop() {
24
22
  // No callback helper
@@ -27,20 +25,6 @@ function noop() {
27
25
  // public api ---------------------------------------------
28
26
 
29
27
  const notifications = {
30
- /**
31
- * Add
32
- *
33
- * Takes a notification object and
34
- * adds it to the database to be processed by
35
- * the cron.
36
- */
37
- add(notification, callback) {
38
- Notification.updateOne({ _id: notification._id }, notification, {
39
- upsert: true,
40
- new: true,
41
- }).then(callback)
42
- },
43
-
44
28
  /**
45
29
  * Send
46
30
  */
@@ -60,32 +44,23 @@ const notifications = {
60
44
  */
61
45
  async snapshotCreated(datasetId, body, uploader) {
62
46
  const tag = body.tag
63
- // if we still have a promise for the body files, await it
64
- const files = await body.files
65
47
  const uploaderId = uploader ? uploader.id : null
66
- const datasetDescription = files.find(
67
- file => file.filename == 'dataset_description.json',
68
- )
69
- const datasetDescriptionId = datasetDescription
70
- ? datasetDescription.id
71
- : null
72
48
  const URI = getDatasetWorker(datasetId)
73
- const datasetDescriptionUrl = `${URI}/datasets/${datasetId}/objects/${datasetDescriptionId}`
74
-
75
- const changesFile = files.find(file => file.filename == 'CHANGES')
76
- const changesId = changesFile ? changesFile.id : null
77
- const changesUrl = `${URI}/datasets/${datasetId}/objects/${changesId}`
49
+ const datasetDescriptionUrl = `${URI}/datasets/${datasetId}/snapshots/${tag}/files/dataset_description.json`
50
+ const changesUrl = `${URI}/datasets/${datasetId}/snapshots/${tag}/files/CHANGES`
78
51
 
79
52
  // get the dataset description
80
- let response = await request.get(datasetDescriptionUrl)
81
- const description = response.body
53
+ const descriptionResponse = await request.get(datasetDescriptionUrl)
54
+ const description = descriptionResponse.body
82
55
  const datasetLabel = description.Name ? description.Name : 'Unnamed Dataset'
83
56
 
84
57
  // get the snapshot changelog
85
- response = await request
58
+ const changesResponse = await request
86
59
  .get(changesUrl)
87
60
  .responseType('application/octet-stream')
88
- const changelog = response.body ? response.body.toString() : null
61
+ const changelog = changesResponse.body
62
+ ? changesResponse.body.toString()
63
+ : null
89
64
  // get all users that are subscribed to the dataset
90
65
  const subscriptions = await Subscription.find({
91
66
  datasetId: datasetId,
@@ -113,8 +88,8 @@ const notifications = {
113
88
  }),
114
89
  },
115
90
  }
116
- // send the email to the notifications database for distribution
117
- notifications.add(emailContent, noop)
91
+ // send the email
92
+ notifications.send(emailContent, noop)
118
93
  }
119
94
  })
120
95
  },
@@ -186,7 +161,7 @@ const notifications = {
186
161
  },
187
162
  }
188
163
  // send each email to the notification database for distribution
189
- notifications.add(emailContent, noop)
164
+ notifications.send(emailContent, noop)
190
165
  }
191
166
  })
192
167
  })
@@ -231,8 +206,7 @@ const notifications = {
231
206
  }),
232
207
  },
233
208
  }
234
- // send each email to the notification database for distribution
235
- notifications.add(emailContent, noop)
209
+ notifications.send(emailContent, noop)
236
210
  }
237
211
  })
238
212
  })
@@ -281,8 +255,7 @@ const notifications = {
281
255
  }),
282
256
  },
283
257
  }
284
- // send each email to the notification database for distribution
285
- notifications.add(emailContent, noop)
258
+ notifications.send(emailContent, noop)
286
259
  }
287
260
  })
288
261
  })
@@ -334,7 +307,7 @@ const notifications = {
334
307
  },
335
308
  }
336
309
  // send each email to the notification database for distribution
337
- notifications.add(emailContent, noop)
310
+ notifications.send(emailContent, noop)
338
311
  }
339
312
  })
340
313
  })
@@ -342,40 +315,48 @@ const notifications = {
342
315
  )
343
316
  },
344
317
 
345
- initCron() {
346
- setInterval(() => {
347
- // After one hour, retry a notification even if we have a lock
348
- Notification.findOneAndUpdate(
349
- { notificationLock: { $lte: toDate(subHours(Date.now(), 1)) } },
350
- { $set: { notificationLock: new Date(Date.now()) } },
351
- ).exec((err, notification) => {
352
- if (err) {
353
- console.log(
354
- 'NOTIFICATION ERROR - Could not find notifications collection',
355
- )
356
- } else {
357
- if (notification) {
358
- notifications.send(notification, (err, response) => {
359
- if (!err) {
360
- notification.remove()
361
- if (response && response.messageId) {
362
- new MailgunIdentifier({
363
- messageId: response.messageId,
364
- }).save(err => {
365
- if (err) {
366
- throw `failed to save mailgunIdentifier ${response.messageId}`
367
- }
368
- })
369
- }
370
- } else {
371
- console.log('NOTIFICATION ERROR ----------')
372
- console.log(err)
373
- }
374
- })
375
- }
376
- }
318
+ /**
319
+ * Import of a remote resource finished
320
+ * @param {string} datasetId
321
+ * @param {string} userId
322
+ * @param {boolean} success
323
+ * @param {string} message
324
+ */
325
+ async datasetImported(datasetId, userId, success, message, retryUrl) {
326
+ const user = await User.findOne({ id: userId }).exec()
327
+ let html
328
+ if (success) {
329
+ html = datasetImportEmail({
330
+ name: user.name,
331
+ datasetId: datasetId,
332
+ siteUrl:
333
+ url.parse(config.url).protocol +
334
+ '//' +
335
+ url.parse(config.url).hostname,
336
+ })
337
+ } else {
338
+ html = datasetImportFailed({
339
+ name: user.name,
340
+ datasetId: datasetId,
341
+ message: success ? '' : message,
342
+ siteUrl:
343
+ url.parse(config.url).protocol +
344
+ '//' +
345
+ url.parse(config.url).hostname,
346
+ retryUrl: retryUrl,
377
347
  })
378
- }, 3600000)
348
+ }
349
+ const emailContent = {
350
+ _id: datasetId + '_' + user._id + '_' + 'dataset_imported',
351
+ type: 'email',
352
+ email: {
353
+ to: user.email,
354
+ subject: `Dataset Import ${success ? 'Success' : 'Failed'}`,
355
+ html: html,
356
+ },
357
+ }
358
+ // send the email to the notifications database for distribution
359
+ notifications.send(emailContent, noop)
379
360
  },
380
361
  }
381
362
 
@@ -0,0 +1,37 @@
1
+ import IngestDataset from '../ingestDataset'
2
+
3
+ describe('IngestDataset model', () => {
4
+ it('IngestDataset model fails if required fields are missing', done => {
5
+ const model = new IngestDataset()
6
+ model.validate(result => {
7
+ expect(result.name).toEqual('ValidationError')
8
+ done()
9
+ })
10
+ })
11
+ it('IngestDataset model URL validation fails with a bad URL', done => {
12
+ const badUrlModel = new IngestDataset({
13
+ datasetId: 'ds00000',
14
+ userId: 'b3df6399-d1be-4e07-b997-9f7aa3ed1f8e',
15
+ url: 'this is not a valid URL',
16
+ imported: false,
17
+ notified: false,
18
+ })
19
+ badUrlModel.validate(result => {
20
+ expect(result.name).toEqual('ValidationError')
21
+ done()
22
+ })
23
+ })
24
+ it('IngestDataset model URL validation succeeds with a good URL', done => {
25
+ const goodUrlModel = new IngestDataset({
26
+ datasetId: 'ds00000',
27
+ userId: 'b3df6399-d1be-4e07-b997-9f7aa3ed1f8e',
28
+ url: 'https://example.com',
29
+ imported: false,
30
+ notified: false,
31
+ })
32
+ goodUrlModel.validate(result => {
33
+ expect(result).toBe(null)
34
+ done()
35
+ })
36
+ })
37
+ })
@@ -0,0 +1,35 @@
1
+ /**
2
+ * Model for ingest of new datasets from a remote URL (zip/tarball)
3
+ */
4
+ import mongoose, { Document, ObjectId } from 'mongoose'
5
+ const { Schema, model } = mongoose
6
+ import { validateUrl } from '../utils/validateUrl'
7
+
8
+ export interface IngestDatasetDocument extends Document {
9
+ _id: ObjectId
10
+ datasetId: string
11
+ userId: string
12
+ url: string
13
+ imported: boolean
14
+ }
15
+
16
+ const ingestDatasetSchema = new Schema({
17
+ datasetId: { type: String, required: true },
18
+ userId: { type: String, required: true },
19
+ url: {
20
+ type: String,
21
+ required: true,
22
+ validate: {
23
+ validator: validateUrl,
24
+ message: 'Must be a valid HTTPS URL',
25
+ },
26
+ },
27
+ imported: { type: Boolean, required: true, default: false },
28
+ })
29
+
30
+ const IngestDataset = model<IngestDatasetDocument>(
31
+ 'IngestDataset',
32
+ ingestDatasetSchema,
33
+ )
34
+
35
+ export default IngestDataset
@@ -9,6 +9,7 @@ export interface NotificationDocument extends Document {
9
9
  from: string
10
10
  subject: string
11
11
  template: string
12
+ html: string
12
13
  data: {
13
14
  name: string
14
15
  appName: string
@@ -43,6 +44,7 @@ const notificationSchema = new Schema({
43
44
  from: String,
44
45
  subject: String,
45
46
  template: String,
47
+ html: String,
46
48
  data: {
47
49
  name: String,
48
50
  appName: String,
package/src/server.js CHANGED
@@ -8,7 +8,6 @@ import { createServer } from 'http'
8
8
  import mongoose from 'mongoose'
9
9
  import subscriptionServerFactory from './libs/subscription-server.js'
10
10
  import { connect as redisConnect } from './libs/redis'
11
- import notifications from './libs/notifications'
12
11
  import config from './config'
13
12
  import createApp from './app'
14
13
  import { version } from './lerna.json'
@@ -16,8 +15,6 @@ import { version } from './lerna.json'
16
15
  const redisConnectionSetup = async () => {
17
16
  try {
18
17
  await redisConnect(config.redis)
19
- // start background tasks
20
- notifications.initCron()
21
18
  } catch (err) {
22
19
  // eslint-disable-next-line no-console
23
20
  console.error(err)
@@ -0,0 +1,13 @@
1
+ import { validateUrl } from '../validateUrl'
2
+
3
+ describe('validateUrl', () => {
4
+ it('returns true for a regular HTTPS url', () => {
5
+ expect(validateUrl('https://openneuro.org')).toBe(true)
6
+ })
7
+ it('returns false for a regular HTTP url', () => {
8
+ expect(validateUrl('http://openneuro.org')).toBe(false)
9
+ })
10
+ it('returns false for something that is not really a URL', () => {
11
+ expect(validateUrl('openneuro.org/robots.txt')).toBe(false)
12
+ })
13
+ })
@@ -0,0 +1,13 @@
1
+ /**
2
+ * Test if a string is an HTTPS URL
3
+ * @param value Text string to test for a URL
4
+ * @returns {boolean} True if valid
5
+ */
6
+ export function validateUrl(value: string): boolean {
7
+ try {
8
+ const url = new URL(value)
9
+ return url.protocol === 'https:'
10
+ } catch (err) {
11
+ return false
12
+ }
13
+ }
@@ -1,20 +0,0 @@
1
- /* eslint-disable no-console */
2
- /**
3
- * Prepopulate Dataset.name field on upgrade
4
- */
5
- import path from 'path'
6
- import Dataset from '../models/dataset'
7
- import { datasetName } from '../graphql/resolvers/dataset.js'
8
-
9
- export default {
10
- id: path.basename(module.filename),
11
- update: async () => {
12
- const datasets = await Dataset.find().exec()
13
- for (const dataset of datasets) {
14
- await datasetName(dataset).then(name => {
15
- console.log(`Updating ${dataset.id} with name "${name}"`)
16
- return Dataset.update({ id: dataset.id }, { $set: { name } }).exec()
17
- })
18
- }
19
- },
20
- }
@@ -1,4 +0,0 @@
1
- import nameField from './05-nameField.js'
2
-
3
- // Ordered list should match a sort of the filenames
4
- export default [nameField]
@@ -1,50 +0,0 @@
1
- /* eslint-disable no-console */
2
- // Run all pending migrations
3
- import config from '../config.js'
4
- import { connect as redisConnect } from '../libs/redis.js'
5
- import mongoose from 'mongoose'
6
- import Migration from '../models/migration'
7
- import migrations from './index.js'
8
-
9
- // Setup Mongoose
10
- mongoose.connect(`${config.mongo.url}crn`)
11
-
12
- /**
13
- * This is a basic migration system, runs any unapplied updates in order
14
- * from the index provided in index.js
15
- *
16
- * Will yell at you if there are errors.
17
- *
18
- * Runs manually for now but could run at startup.
19
- */
20
- const upgradeAll = async () => {
21
- await redisConnect(config.redis)
22
- // Connect to old database(s)
23
- for (const migrationDefinition of migrations) {
24
- const key = migrationDefinition.id
25
- const migrate = await Migration.findOneAndUpdate(
26
- { id: key },
27
- {},
28
- { upsert: true, new: true, setDefaultsOnInsert: true },
29
- )
30
- try {
31
- if (migrate.complete) {
32
- console.log(`${key} has already run - continuing`)
33
- } else {
34
- await migrationDefinition.update()
35
- console.log(`${key} migration complete`)
36
- migrate.complete = true
37
- await migrate.save()
38
- }
39
- } catch (e) {
40
- console.log(`${key} failed to execute - exiting`)
41
- throw e
42
- }
43
- }
44
- }
45
-
46
- // Entrypoint
47
- upgradeAll().then(() => {
48
- mongoose.connection.close()
49
- process.exit(0)
50
- })
@@ -1,16 +0,0 @@
1
- import mongoose, { Document } from 'mongoose'
2
- const { Schema, model } = mongoose
3
-
4
- export interface MigrationDocument extends Document {
5
- id: string
6
- complete: boolean
7
- }
8
-
9
- const migrationSchema = new Schema({
10
- id: String,
11
- complete: { type: Boolean, default: false },
12
- })
13
-
14
- const Migration = model<MigrationDocument>('Migration', migrationSchema)
15
-
16
- export default Migration