@openneuro/server 4.3.0 → 4.4.0-alpha.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@openneuro/server",
3
- "version": "4.3.0",
3
+ "version": "4.4.0-alpha.2",
4
4
  "description": "Core service for the OpenNeuro platform.",
5
5
  "license": "MIT",
6
6
  "main": "src/server.js",
@@ -42,6 +42,7 @@
42
42
  "graphql-tools": "4.0.6",
43
43
  "immutable": "^3.8.2",
44
44
  "ioredis": "4.17.3",
45
+ "jest-fetch-mock": "^3.0.3",
45
46
  "jsdom": "^11.6.2",
46
47
  "jsonwebtoken": "^8.3.0",
47
48
  "mime-types": "^2.1.19",
@@ -49,7 +50,8 @@
49
50
  "mongoose": "5.12.3",
50
51
  "morgan": "^1.6.1",
51
52
  "node-fetch": "^2.6.0",
52
- "nodemailer": "6.4.16",
53
+ "nodemailer": "^6.7.2",
54
+ "nodemailer-mailjet-transport": "^1.0.4",
53
55
  "object-hash": "2.1.1",
54
56
  "passport": "^0.4.0",
55
57
  "passport-globus": "^0.0.1",
@@ -103,5 +105,5 @@
103
105
  "publishConfig": {
104
106
  "access": "public"
105
107
  },
106
- "gitHead": "2e777ea3f979e05dab85d612ea2b0788e699b96b"
108
+ "gitHead": "69aec829ed581c71c46bb4bdb3561f669e619f4e"
107
109
  }
@@ -95,10 +95,16 @@ describe('dataset model operations', () => {
95
95
  expect(testBlacklist('.git', 'HEAD')).toBe(true)
96
96
  })
97
97
  it('returns true for root level .DS_Store files', () => {
98
- expect(testBlacklist('', '.DS_Store'))
98
+ expect(testBlacklist('', '.DS_Store')).toBe(true)
99
99
  })
100
100
  it('returns true for nested .DS_Store files', () => {
101
- expect(testBlacklist('sub-01/anat/', '.DS_Store'))
101
+ expect(testBlacklist('sub-01/anat/', '.DS_Store')).toBe(true)
102
+ })
103
+ // https://github.com/OpenNeuroOrg/openneuro/issues/2519
104
+ it('skips ._ prefixed files created by macOS', () => {
105
+ expect(testBlacklist('', '._.DS_Store')).toBe(true)
106
+ expect(testBlacklist('stimuli/', '._1002.png')).toBe(true)
107
+ expect(testBlacklist('stimuli/', 'test._1002.png')).toBe(false)
102
108
  })
103
109
  })
104
110
  })
@@ -298,7 +298,7 @@ export const getDatasets = options => {
298
298
  }
299
299
 
300
300
  // Files to skip in uploads
301
- const filenameBlacklist = new RegExp(/.DS_Store|Icon\r/)
301
+ const filenameBlacklist = new RegExp(/.DS_Store|Icon\r|^\._/)
302
302
  const pathBlacklist = new RegExp(/^.git|^.gitattributes|^.datalad|^.heudiconv/)
303
303
  export const testBlacklist = (path, filename) =>
304
304
  filenameBlacklist.test(filename) || pathBlacklist.test(path)
@@ -505,7 +505,7 @@ export const getDatasetAnalytics = (datasetId, tag) => {
505
505
  export const getStars = datasetId => Star.find({ datasetId })
506
506
 
507
507
  export const getUserStarred = (datasetId, userId) =>
508
- Star.count({ datasetId, userId }).exec()
508
+ Star.countDocuments({ datasetId, userId }).exec()
509
509
 
510
510
  export const getFollowers = datasetId => {
511
511
  return Subscription.find({
@@ -28,7 +28,7 @@ const lockSnapshot = (datasetId, tag) => {
28
28
  }
29
29
 
30
30
  const createSnapshotMetadata = (datasetId, tag, hexsha, created) => {
31
- return Snapshot.update(
31
+ return Snapshot.updateOne(
32
32
  { datasetId: datasetId, tag: tag },
33
33
  {
34
34
  $set: {
@@ -50,7 +50,7 @@ export const checkPermissionLevel = (permission, state) => {
50
50
  }
51
51
 
52
52
  export const checkDatasetExists = async datasetId => {
53
- const found = await Dataset.count({ id: datasetId }).exec()
53
+ const found = await Dataset.countDocuments({ id: datasetId }).exec()
54
54
  if (!found) throw new Error(`Dataset ${datasetId} does not exist.`)
55
55
  }
56
56
 
@@ -0,0 +1,41 @@
1
+ import fetchMock from 'jest-fetch-mock'
2
+ import { importRemoteDataset, allowedImportUrl } from '../importRemoteDataset'
3
+ import { checkDatasetWrite } from '../../permissions'
4
+
5
+ jest.mock('../../../config')
6
+ jest.mock('../../permissions')
7
+
8
+ describe('importRemoteDataset mutation', () => {
9
+ it('given a user with access, it creates an import record for later processing', () => {
10
+ fetchMock.mockOnce(JSON.stringify(true))
11
+ importRemoteDataset(
12
+ {},
13
+ { datasetId: 'ds000000', url: '' },
14
+ { user: '1234', userInfo: { admin: true } },
15
+ )
16
+ })
17
+ describe('allowedImportUrl()', () => {
18
+ it('allows brainlife.io', () => {
19
+ expect(
20
+ allowedImportUrl('https://brainlife.io/ezbids/dataset-to-import.zip'),
21
+ ).toBe(true)
22
+ })
23
+ it('allows a test bucket for OpenNeuro use', () => {
24
+ expect(
25
+ allowedImportUrl(
26
+ 'https://openneuro-test-import-bucket.s3.us-west-2.amazonaws.com/ds000003.zip',
27
+ ),
28
+ ).toBe(true)
29
+ })
30
+ it('does not allow other URLs', () => {
31
+ expect(allowedImportUrl('https://openneuro.org')).toBe(false)
32
+ expect(allowedImportUrl('iiajsdfoijawe')).toBe(false)
33
+ expect(allowedImportUrl('http://google.com/some-zip-file.zip')).toBe(
34
+ false,
35
+ )
36
+ expect(
37
+ allowedImportUrl('http://github.com/brainlife.io/somewhere-else.zip'),
38
+ ).toBe(false)
39
+ })
40
+ })
41
+ })
@@ -83,7 +83,7 @@ export const datasetName = obj => {
83
83
  */
84
84
  export const updateDatasetName = datasetId =>
85
85
  datasetName({ id: datasetId }).then(name =>
86
- DatasetModel.update({ id: datasetId }, { $set: { name } }).exec(),
86
+ DatasetModel.updateOne({ id: datasetId }, { $set: { name } }).exec(),
87
87
  )
88
88
 
89
89
  /**
@@ -0,0 +1,81 @@
1
+ import fetch from 'node-fetch'
2
+ import IngestDataset from '../../models/ingestDataset'
3
+ import { checkDatasetWrite } from '../permissions.js'
4
+ import { getDatasetWorker } from '../../libs/datalad-service'
5
+ import { generateDataladCookie } from '../../libs/authentication/jwt'
6
+ import notifications from '../../libs/notifications'
7
+ import config from '../../config'
8
+
9
+ /**
10
+ * Test if a URL is allowed to be imported
11
+ * @param raw String URL
12
+ * @returns {boolean}
13
+ */
14
+ export function allowedImportUrl(raw: string): boolean {
15
+ let url
16
+ try {
17
+ url = new URL(raw)
18
+ } catch (_) {
19
+ return false
20
+ }
21
+ if (url.hostname === 'brainlife.io') {
22
+ return true
23
+ } else if (
24
+ url.hostname === 'openneuro-test-import-bucket.s3.us-west-2.amazonaws.com'
25
+ ) {
26
+ return true
27
+ } else {
28
+ return false
29
+ }
30
+ }
31
+
32
+ /**
33
+ * Queue a bundle of files for import into an existing dataset
34
+ */
35
+ export async function importRemoteDataset(
36
+ _: Record<string, unknown>,
37
+ { datasetId, url }: { datasetId: string; url: string },
38
+ { user, userInfo }: { user: string; userInfo: Record<string, unknown> },
39
+ ): Promise<string | null> {
40
+ await checkDatasetWrite(datasetId, user, userInfo)
41
+ if (!allowedImportUrl(url)) {
42
+ return
43
+ }
44
+ const ingest = new IngestDataset({ datasetId, url, userId: user })
45
+ // undefined validateSync() means no errors
46
+ if (ingest.validateSync() == undefined) {
47
+ await ingest.save()
48
+ const importId = ingest._id.toString()
49
+ const worker = getDatasetWorker(datasetId)
50
+ const importUrl = `http://${worker}/datasets/${datasetId}/import/${importId}`
51
+ await fetch(importUrl, {
52
+ method: 'POST',
53
+ headers: {
54
+ 'Content-Type': 'application/json',
55
+ Cookie: generateDataladCookie(config)(userInfo),
56
+ },
57
+ body: JSON.stringify({ url }),
58
+ })
59
+ return ingest._id.toString()
60
+ } else {
61
+ return
62
+ }
63
+ }
64
+
65
+ export async function finishImportRemoteDataset(
66
+ _: Record<string, unknown>,
67
+ { id, success, message }: { id: string; success: boolean; message: string },
68
+ { user, userInfo }: { user: string; userInfo: Record<string, unknown> },
69
+ ): Promise<boolean> {
70
+ const ingest = await IngestDataset.findById(id)
71
+ ingest.imported = success
72
+ await ingest.save()
73
+ await notifications.datasetImported(
74
+ ingest.datasetId,
75
+ ingest.userId,
76
+ success,
77
+ message,
78
+ ingest.url,
79
+ )
80
+ return true
81
+ }
@@ -36,6 +36,10 @@ import { reexportRemotes } from './reexporter'
36
36
  import { resetDraft } from './reset'
37
37
  import { createReviewer, deleteReviewer } from './reviewer'
38
38
  import { createRelation, deleteRelation } from './relation'
39
+ import {
40
+ importRemoteDataset,
41
+ finishImportRemoteDataset,
42
+ } from './importRemoteDataset'
39
43
 
40
44
  const Mutation = {
41
45
  createDataset,
@@ -79,6 +83,8 @@ const Mutation = {
79
83
  deleteReviewer,
80
84
  createRelation,
81
85
  deleteRelation,
86
+ importRemoteDataset,
87
+ finishImportRemoteDataset,
82
88
  }
83
89
 
84
90
  export default Mutation
@@ -187,6 +187,10 @@ export const typeDefs = `
187
187
  createRelation(datasetId: ID!, doi: String!, relation: RelatedObjectRelation!, kind: RelatedObjectKind!, description: String): Dataset
188
188
  # Remove a relationship to an external DOI
189
189
  deleteRelation(datasetId: ID!, doi: String!): Dataset
190
+ # Submit an import for a remote dataset, returns id if the URL is accepted for import
191
+ importRemoteDataset(datasetId: ID!, url: String!): ID
192
+ # Finish and notify import is done, returns true if successful
193
+ finishImportRemoteDataset(id: ID!, success: Boolean!, message: String): Boolean
190
194
  }
191
195
 
192
196
  # Anonymous dataset reviewer
@@ -15,7 +15,7 @@ export default {
15
15
  getNext(type, callback) {
16
16
  Counter.findOne({ _id: type }).then(found => {
17
17
  if (found) {
18
- Counter.update({ _id: type }, { $inc: { sequence_value: 1 } }).then(
18
+ Counter.updateOne({ _id: type }, { $inc: { sequence_value: 1 } }).then(
19
19
  callback(found.sequence_value + 1),
20
20
  )
21
21
  } else {
@@ -0,0 +1,93 @@
1
+ export const datasetImportFailed = ({
2
+ siteUrl,
3
+ name,
4
+ datasetId,
5
+ message,
6
+ retryUrl,
7
+ }: {
8
+ siteUrl: string
9
+ name: string
10
+ datasetId: string
11
+ message: string
12
+ retryUrl: string
13
+ }): string => `<html>
14
+ <head>
15
+ <style>
16
+ body {
17
+ font-family: 'Open Sans', sans-serif;
18
+ font-weight: lighter;
19
+ background: #F5F5F5;
20
+ }
21
+ footer {
22
+ border-top: 1px solid #333;
23
+ padding-top: 15px;
24
+ background: #F5F5F5;
25
+ }
26
+ .link {
27
+ color: #00505c
28
+ }
29
+ .link:hover {
30
+ color: #0093a9
31
+ }
32
+ .top-bar {
33
+ width: 100%;
34
+ background: #333;
35
+ padding: 8px 0px 8px 15px;
36
+ }
37
+ .content {
38
+ padding: 15px;
39
+ }
40
+ p {
41
+ font-size: 16px;
42
+ font-weight: lighter;
43
+ }
44
+ b {
45
+ font-weight: bold;
46
+ }
47
+ .dataset-link {
48
+ display: inline-block;
49
+ background: #008599;
50
+ color: #FFF;
51
+ font-size: 20px;
52
+ padding: 8px 15px;
53
+ text-decoration: none;
54
+ cursor: pointer;
55
+ }
56
+ .comment {
57
+ border: 1px solid #ccc;
58
+ padding: 15px;
59
+ }
60
+ .log {
61
+ white-space: pre-wrap;
62
+ }
63
+ .FAILED {color: #d9534f;}
64
+ .FINISHED {color: #5cb85c;}
65
+ </style>
66
+ </head>
67
+ <body>
68
+ <div class="top-bar">
69
+ <img src="${siteUrl}/assets/CRN-Logo-Placeholder.png" />
70
+ </div>
71
+ <div class="content">
72
+ <h2>Hi, ${name}</h2>
73
+
74
+ <p>
75
+ A dataset imported you requested failed. It was imported as <b>${datasetId}</b>.
76
+ </p>
77
+
78
+ <div>
79
+ <a class='dataset-link' href="${siteUrl}/import?url=${retryUrl}&datasetId=${datasetId}">Click here to retry the import &raquo;</a>
80
+ </div>
81
+
82
+ <div>
83
+ <h4>Import Log</h4>
84
+ <p class="log">${message}</p>
85
+ </div>
86
+
87
+ <p>
88
+ Sincerely,
89
+ The CRN Team
90
+ </p>
91
+ </div>
92
+ </body>
93
+ <html>`
@@ -0,0 +1,84 @@
1
+ export const datasetImportEmail = ({
2
+ siteUrl,
3
+ name,
4
+ datasetId,
5
+ }: {
6
+ siteUrl: string
7
+ name: string
8
+ datasetId: string
9
+ }): string => `<html>
10
+ <head>
11
+ <style>
12
+ body {
13
+ font-family: 'Open Sans', sans-serif;
14
+ font-weight: lighter;
15
+ background: #F5F5F5;
16
+ }
17
+ footer {
18
+ border-top: 1px solid #333;
19
+ padding-top: 15px;
20
+ background: #F5F5F5;
21
+ }
22
+ .link {
23
+ color: #00505c
24
+ }
25
+ .link:hover {
26
+ color: #0093a9
27
+ }
28
+ .top-bar {
29
+ width: 100%;
30
+ background: #333;
31
+ padding: 8px 0px 8px 15px;
32
+ }
33
+ .content {
34
+ padding: 15px;
35
+ }
36
+ p {
37
+ font-size: 16px;
38
+ font-weight: lighter;
39
+ }
40
+ b {
41
+ font-weight: bold;
42
+ }
43
+ .dataset-link {
44
+ display: inline-block;
45
+ background: #008599;
46
+ color: #FFF;
47
+ font-size: 20px;
48
+ padding: 8px 15px;
49
+ text-decoration: none;
50
+ cursor: pointer;
51
+ }
52
+ .comment {
53
+ border: 1px solid #ccc;
54
+ padding: 15px;
55
+ }
56
+ .log {
57
+ white-space: pre-wrap;
58
+ }
59
+ .FAILED {color: #d9534f;}
60
+ .FINISHED {color: #5cb85c;}
61
+ </style>
62
+ </head>
63
+ <body>
64
+ <div class="top-bar">
65
+ <img src="${siteUrl}/assets/CRN-Logo-Placeholder.png" />
66
+ </div>
67
+ <div class="content">
68
+ <h2>Hi, ${name}</h2>
69
+
70
+ <p>
71
+ A dataset imported you requested has finished. It was imported as <b>${datasetId}</b>.
72
+ </p>
73
+
74
+ <div>
75
+ <a class='dataset-link' href="${siteUrl}/datasets/${datasetId}">Click here to view the dataset on OpenNeuro &raquo;</a>
76
+ </div>
77
+
78
+ <p>
79
+ Sincerely,
80
+ The CRN Team
81
+ </p>
82
+ </div>
83
+ </body>
84
+ <html>`
@@ -19,6 +19,8 @@ import { datasetDeleted } from '../libs/email/templates/dataset-deleted'
19
19
  import { ownerUnsubscribed } from '../libs/email/templates/owner-unsubscribed'
20
20
  import { snapshotCreated } from '../libs/email/templates/snapshot-created'
21
21
  import { snapshotReminder } from '../libs/email/templates/snapshot-reminder'
22
+ import { datasetImportEmail } from '../libs/email/templates/dataset-imported'
23
+ import { datasetImportFailed } from '../libs/email/templates/dataset-import-failed'
22
24
 
23
25
  function noop() {
24
26
  // No callback helper
@@ -60,32 +62,23 @@ const notifications = {
60
62
  */
61
63
  async snapshotCreated(datasetId, body, uploader) {
62
64
  const tag = body.tag
63
- // if we still have a promise for the body files, await it
64
- const files = await body.files
65
65
  const uploaderId = uploader ? uploader.id : null
66
- const datasetDescription = files.find(
67
- file => file.filename == 'dataset_description.json',
68
- )
69
- const datasetDescriptionId = datasetDescription
70
- ? datasetDescription.id
71
- : null
72
66
  const URI = getDatasetWorker(datasetId)
73
- const datasetDescriptionUrl = `${URI}/datasets/${datasetId}/objects/${datasetDescriptionId}`
74
-
75
- const changesFile = files.find(file => file.filename == 'CHANGES')
76
- const changesId = changesFile ? changesFile.id : null
77
- const changesUrl = `${URI}/datasets/${datasetId}/objects/${changesId}`
67
+ const datasetDescriptionUrl = `${URI}/datasets/${datasetId}/snapshots/${tag}/files/dataset_description.json`
68
+ const changesUrl = `${URI}/datasets/${datasetId}/snapshots/${tag}/files/CHANGES`
78
69
 
79
70
  // get the dataset description
80
- let response = await request.get(datasetDescriptionUrl)
81
- const description = response.body
71
+ const descriptionResponse = await request.get(datasetDescriptionUrl)
72
+ const description = descriptionResponse.body
82
73
  const datasetLabel = description.Name ? description.Name : 'Unnamed Dataset'
83
74
 
84
75
  // get the snapshot changelog
85
- response = await request
76
+ const changesResponse = await request
86
77
  .get(changesUrl)
87
78
  .responseType('application/octet-stream')
88
- const changelog = response.body ? response.body.toString() : null
79
+ const changelog = changesResponse.body
80
+ ? changesResponse.body.toString()
81
+ : null
89
82
  // get all users that are subscribed to the dataset
90
83
  const subscriptions = await Subscription.find({
91
84
  datasetId: datasetId,
@@ -342,6 +335,50 @@ const notifications = {
342
335
  )
343
336
  },
344
337
 
338
+ /**
339
+ * Import of a remote resource finished
340
+ * @param {string} datasetId
341
+ * @param {string} userId
342
+ * @param {boolean} success
343
+ * @param {string} message
344
+ */
345
+ async datasetImported(datasetId, userId, success, message, retryUrl) {
346
+ const user = await User.findOne({ id: userId }).exec()
347
+ let html
348
+ if (success) {
349
+ html = datasetImportEmail({
350
+ name: user.name,
351
+ datasetId: datasetId,
352
+ siteUrl:
353
+ url.parse(config.url).protocol +
354
+ '//' +
355
+ url.parse(config.url).hostname,
356
+ })
357
+ } else {
358
+ html = datasetImportFailed({
359
+ name: user.name,
360
+ datasetId: datasetId,
361
+ message: success ? '' : message,
362
+ siteUrl:
363
+ url.parse(config.url).protocol +
364
+ '//' +
365
+ url.parse(config.url).hostname,
366
+ retryUrl: retryUrl,
367
+ })
368
+ }
369
+ const emailContent = {
370
+ _id: datasetId + '_' + user._id + '_' + 'dataset_imported',
371
+ type: 'email',
372
+ email: {
373
+ to: user.email,
374
+ subject: `Dataset Import ${success ? 'Success' : 'Failed'}`,
375
+ html: html,
376
+ },
377
+ }
378
+ // send the email to the notifications database for distribution
379
+ notifications.add(emailContent, noop)
380
+ },
381
+
345
382
  initCron() {
346
383
  setInterval(() => {
347
384
  // After one hour, retry a notification even if we have a lock
@@ -0,0 +1,37 @@
1
+ import IngestDataset from '../ingestDataset'
2
+
3
+ describe('IngestDataset model', () => {
4
+ it('IngestDataset model fails if required fields are missing', done => {
5
+ const model = new IngestDataset()
6
+ model.validate(result => {
7
+ expect(result.name).toEqual('ValidationError')
8
+ done()
9
+ })
10
+ })
11
+ it('IngestDataset model URL validation fails with a bad URL', done => {
12
+ const badUrlModel = new IngestDataset({
13
+ datasetId: 'ds00000',
14
+ userId: 'b3df6399-d1be-4e07-b997-9f7aa3ed1f8e',
15
+ url: 'this is not a valid URL',
16
+ imported: false,
17
+ notified: false,
18
+ })
19
+ badUrlModel.validate(result => {
20
+ expect(result.name).toEqual('ValidationError')
21
+ done()
22
+ })
23
+ })
24
+ it('IngestDataset model URL validation succeeds with a good URL', done => {
25
+ const goodUrlModel = new IngestDataset({
26
+ datasetId: 'ds00000',
27
+ userId: 'b3df6399-d1be-4e07-b997-9f7aa3ed1f8e',
28
+ url: 'https://example.com',
29
+ imported: false,
30
+ notified: false,
31
+ })
32
+ goodUrlModel.validate(result => {
33
+ expect(result).toBe(null)
34
+ done()
35
+ })
36
+ })
37
+ })
@@ -0,0 +1,35 @@
1
+ /**
2
+ * Model for ingest of new datasets from a remote URL (zip/tarball)
3
+ */
4
+ import mongoose, { Document, ObjectId } from 'mongoose'
5
+ const { Schema, model } = mongoose
6
+ import { validateUrl } from '../utils/validateUrl'
7
+
8
+ export interface IngestDatasetDocument extends Document {
9
+ _id: ObjectId
10
+ datasetId: string
11
+ userId: string
12
+ url: string
13
+ imported: boolean
14
+ }
15
+
16
+ const ingestDatasetSchema = new Schema({
17
+ datasetId: { type: String, required: true },
18
+ userId: { type: String, required: true },
19
+ url: {
20
+ type: String,
21
+ required: true,
22
+ validate: {
23
+ validator: validateUrl,
24
+ message: 'Must be a valid HTTPS URL',
25
+ },
26
+ },
27
+ imported: { type: Boolean, required: true, default: false },
28
+ })
29
+
30
+ const IngestDataset = model<IngestDatasetDocument>(
31
+ 'IngestDataset',
32
+ ingestDatasetSchema,
33
+ )
34
+
35
+ export default IngestDataset
@@ -0,0 +1,13 @@
1
+ import { validateUrl } from '../validateUrl'
2
+
3
+ describe('validateUrl', () => {
4
+ it('returns true for a regular HTTPS url', () => {
5
+ expect(validateUrl('https://openneuro.org')).toBe(true)
6
+ })
7
+ it('returns false for a regular HTTP url', () => {
8
+ expect(validateUrl('http://openneuro.org')).toBe(false)
9
+ })
10
+ it('returns false for something that is not really a URL', () => {
11
+ expect(validateUrl('openneuro.org/robots.txt')).toBe(false)
12
+ })
13
+ })
@@ -0,0 +1,13 @@
1
+ /**
2
+ * Test if a string is an HTTPS URL
3
+ * @param value Text string to test for a URL
4
+ * @returns {boolean} True if valid
5
+ */
6
+ export function validateUrl(value: string): boolean {
7
+ try {
8
+ const url = new URL(value)
9
+ return url.protocol === 'https:'
10
+ } catch (err) {
11
+ return false
12
+ }
13
+ }
@@ -1,20 +0,0 @@
1
- /* eslint-disable no-console */
2
- /**
3
- * Prepopulate Dataset.name field on upgrade
4
- */
5
- import path from 'path'
6
- import Dataset from '../models/dataset'
7
- import { datasetName } from '../graphql/resolvers/dataset.js'
8
-
9
- export default {
10
- id: path.basename(module.filename),
11
- update: async () => {
12
- const datasets = await Dataset.find().exec()
13
- for (const dataset of datasets) {
14
- await datasetName(dataset).then(name => {
15
- console.log(`Updating ${dataset.id} with name "${name}"`)
16
- return Dataset.update({ id: dataset.id }, { $set: { name } }).exec()
17
- })
18
- }
19
- },
20
- }
@@ -1,4 +0,0 @@
1
- import nameField from './05-nameField.js'
2
-
3
- // Ordered list should match a sort of the filenames
4
- export default [nameField]
@@ -1,50 +0,0 @@
1
- /* eslint-disable no-console */
2
- // Run all pending migrations
3
- import config from '../config.js'
4
- import { connect as redisConnect } from '../libs/redis.js'
5
- import mongoose from 'mongoose'
6
- import Migration from '../models/migration'
7
- import migrations from './index.js'
8
-
9
- // Setup Mongoose
10
- mongoose.connect(`${config.mongo.url}crn`)
11
-
12
- /**
13
- * This is a basic migration system, runs any unapplied updates in order
14
- * from the index provided in index.js
15
- *
16
- * Will yell at you if there are errors.
17
- *
18
- * Runs manually for now but could run at startup.
19
- */
20
- const upgradeAll = async () => {
21
- await redisConnect(config.redis)
22
- // Connect to old database(s)
23
- for (const migrationDefinition of migrations) {
24
- const key = migrationDefinition.id
25
- const migrate = await Migration.findOneAndUpdate(
26
- { id: key },
27
- {},
28
- { upsert: true, new: true, setDefaultsOnInsert: true },
29
- )
30
- try {
31
- if (migrate.complete) {
32
- console.log(`${key} has already run - continuing`)
33
- } else {
34
- await migrationDefinition.update()
35
- console.log(`${key} migration complete`)
36
- migrate.complete = true
37
- await migrate.save()
38
- }
39
- } catch (e) {
40
- console.log(`${key} failed to execute - exiting`)
41
- throw e
42
- }
43
- }
44
- }
45
-
46
- // Entrypoint
47
- upgradeAll().then(() => {
48
- mongoose.connection.close()
49
- process.exit(0)
50
- })
@@ -1,16 +0,0 @@
1
- import mongoose, { Document } from 'mongoose'
2
- const { Schema, model } = mongoose
3
-
4
- export interface MigrationDocument extends Document {
5
- id: string
6
- complete: boolean
7
- }
8
-
9
- const migrationSchema = new Schema({
10
- id: String,
11
- complete: { type: Boolean, default: false },
12
- })
13
-
14
- const Migration = model<MigrationDocument>('Migration', migrationSchema)
15
-
16
- export default Migration