@cloudant/couchbackup 2.7.1-SNAPSHOT.53 → 2.7.1-SNAPSHOT.57

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/.secrets.baseline CHANGED
@@ -3,7 +3,7 @@
3
3
  "files": "package-lock.json|test/fixtures|^.secrets.baseline$",
4
4
  "lines": null
5
5
  },
6
- "generated_at": "2021-11-08T12:18:50Z",
6
+ "generated_at": "2021-11-16T14:34:16Z",
7
7
  "plugins_used": [
8
8
  {
9
9
  "name": "AWSKeyDetector"
@@ -82,7 +82,7 @@
82
82
  "hashed_secret": "90c240c0ecfb254c589319f022de059fc739c54a",
83
83
  "is_secret": false,
84
84
  "is_verified": false,
85
- "line_number": 140,
85
+ "line_number": 152,
86
86
  "type": "NPM tokens",
87
87
  "verified_result": null
88
88
  }
@@ -130,7 +130,7 @@
130
130
  "hashed_secret": "6367c48dd193d56ea7b0baad25b19455e529f5ee",
131
131
  "is_secret": false,
132
132
  "is_verified": false,
133
- "line_number": 167,
133
+ "line_number": 178,
134
134
  "type": "Secret Keyword",
135
135
  "verified_result": null
136
136
  },
@@ -138,7 +138,7 @@
138
138
  "hashed_secret": "e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98",
139
139
  "is_secret": false,
140
140
  "is_verified": false,
141
- "line_number": 167,
141
+ "line_number": 178,
142
142
  "type": "Basic Auth Credentials",
143
143
  "verified_result": null
144
144
  }
@@ -200,7 +200,7 @@
200
200
  "hashed_secret": "9d4e1e23bd5b727046a9e3b4b7db57bd8d6ee684",
201
201
  "is_secret": false,
202
202
  "is_verified": false,
203
- "line_number": 126,
203
+ "line_number": 143,
204
204
  "type": "Basic Auth Credentials",
205
205
  "verified_result": null
206
206
  },
@@ -208,7 +208,7 @@
208
208
  "hashed_secret": "e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98",
209
209
  "is_secret": false,
210
210
  "is_verified": false,
211
- "line_number": 264,
211
+ "line_number": 305,
212
212
  "type": "Basic Auth Credentials",
213
213
  "verified_result": null
214
214
  }
package/CHANGES.md CHANGED
@@ -2,6 +2,8 @@
2
2
  - [FIXED] Corrected `user-agent` header on requests.
3
3
  - [FIXED] Restore of shallow backups created with versions <=2.4.2.
4
4
  - [IMPROVED] Added quiet option to backup and restore to suppress batch messages.
5
+ - [IMPROVED] Added a preflight check for restore function to make sure that a target database is new and empty.
6
+ - [IMPROVED] Added handling for errors reading log file.
5
7
 
6
8
  # 2.7.0 (2021-09-14)
7
9
  - [UPGRADED] Cloudant client dependency from `@cloudant/cloudant` to `@ibm-cloud/cloudant`.
package/README.md CHANGED
@@ -425,6 +425,8 @@ details them.
425
425
 
426
426
  ### `couchrestore`
427
427
 
428
+ * `13`: restore target database is not new and empty.
429
+
428
430
  ## Note on attachments
429
431
 
430
432
  TLDR; If you backup a database that contains attachments you will not be able to restore it.
package/app.js CHANGED
@@ -154,31 +154,63 @@ function addEventListener(indicator, emitter, event, f) {
154
154
  }
155
155
 
156
156
  /*
157
- Check the referenced database exists and that the credentials used have
157
+ Check the backup database exists and that the credentials used have
158
158
  visibility. Callback with a fatal error if there is a problem with the DB.
159
159
  @param {string} db - database object
160
160
  @param {function(err)} callback - error is undefined if DB exists
161
161
  */
162
- function proceedIfDbValid(db, callback) {
162
+ function proceedIfBackupDbValid(db, callback) {
163
163
  db.service.headDatabase({ db: db.db }).then(() => callback()).catch(err => {
164
- err = error.convertResponseError(err, function(err) {
165
- if (err && err.status === 404) {
166
- // Override the error type and mesasge for the DB not found case
167
- var msg = `Database ${db.url}` +
168
- `${db.db} does not exist. ` +
169
- 'Check the URL and database name have been specified correctly.';
170
- var noDBErr = new Error(msg);
171
- noDBErr.name = 'DatabaseNotFound';
172
- return noDBErr;
173
- } else {
174
- // Delegate to the default error factory if it wasn't a 404
175
- return error.convertResponseError(err);
176
- }
177
- });
164
+ err = error.convertResponseError(err, err => parseIfDbValidResponseError(db, err));
165
+ callback(err);
166
+ });
167
+ }
168
+
169
+ /*
170
+ Check that the restore database exists, is new and is empty. Also verify that the credentials used have
171
+ visibility. Callback with a fatal error if there is a problem with the DB.
172
+ @param {string} db - database object
173
+ @param {function(err)} callback - error is undefined if DB exists, new and empty
174
+ */
175
+ function proceedIfRestoreDbValid(db, callback) {
176
+ db.service.getDatabaseInformation({ db: db.db }).then(response => {
177
+ const { doc_count: docCount, doc_del_count: deletedDocCount } = response.result;
178
+ // The system databases can have a validation ddoc(s) injected in them on creation.
179
+ // This sets the doc count off, so we just complitely exclude the system databases from this check.
180
+ // The assumption here is that users restoring system databases know what they are doing.
181
+ if (!db.db.startsWith('_') && (docCount !== 0 || deletedDocCount !== 0)) {
182
+ const notEmptyDBErr = new Error(`Target database ${db.url}${db.db} is not empty.`);
183
+ notEmptyDBErr.name = 'DatabaseNotEmpty';
184
+ callback(notEmptyDBErr);
185
+ } else {
186
+ callback();
187
+ }
188
+ }).catch(err => {
189
+ err = error.convertResponseError(err, err => parseIfDbValidResponseError(db, err));
178
190
  callback(err);
179
191
  });
180
192
  }
181
193
 
194
+ /*
195
+ Convert the database validation response error to a special DatabaseNotFound error
196
+ in case the database is missing. Otherwise delegate to the default error factory.
197
+ @param {object} db - database object
198
+ @param {object} err - HTTP response error
199
+ */
200
+ function parseIfDbValidResponseError(db, err) {
201
+ if (err && err.status === 404) {
202
+ // Override the error type and message for the DB not found case
203
+ const msg = `Database ${db.url}` +
204
+ `${db.db} does not exist. ` +
205
+ 'Check the URL and database name have been specified correctly.';
206
+ const noDBErr = new Error(msg);
207
+ noDBErr.name = 'DatabaseNotFound';
208
+ return noDBErr;
209
+ }
210
+ // Delegate to the default error factory if it wasn't a 404
211
+ return error.convertResponseError(err);
212
+ }
213
+
182
214
  module.exports = {
183
215
 
184
216
  /**
@@ -197,7 +229,7 @@ module.exports = {
197
229
  * @param {backupRestoreCallback} callback - Called on completion.
198
230
  */
199
231
  backup: function(srcUrl, targetStream, opts, callback) {
200
- var listenerErrorIndicator = { errored: false };
232
+ const listenerErrorIndicator = { errored: false };
201
233
  if (typeof callback === 'undefined' && typeof opts === 'function') {
202
234
  callback = opts;
203
235
  opts = {};
@@ -222,7 +254,7 @@ module.exports = {
222
254
  const backupDB = request.client(srcUrl, opts);
223
255
 
224
256
  // Validate the DB exists, before proceeding to backup
225
- proceedIfDbValid(backupDB, function(err) {
257
+ proceedIfBackupDbValid(backupDB, function(err) {
226
258
  if (err) {
227
259
  if (err.name === 'DatabaseNotFound') {
228
260
  err.message = `${err.message} Ensure the backup source database exists.`;
@@ -231,7 +263,7 @@ module.exports = {
231
263
  callback(err);
232
264
  return;
233
265
  }
234
- var backup = null;
266
+ let backup = null;
235
267
  if (opts.mode === 'shallow') {
236
268
  backup = backupShallow;
237
269
  } else { // full mode
@@ -316,7 +348,7 @@ module.exports = {
316
348
  * @param {backupRestoreCallback} callback - Called on completion.
317
349
  */
318
350
  restore: function(srcStream, targetUrl, opts, callback) {
319
- var listenerErrorIndicator = { errored: false };
351
+ const listenerErrorIndicator = { errored: false };
320
352
  if (typeof callback === 'undefined' && typeof opts === 'function') {
321
353
  callback = opts;
322
354
  opts = {};
@@ -330,10 +362,12 @@ module.exports = {
330
362
  const restoreDB = request.client(targetUrl, opts);
331
363
 
332
364
  // Validate the DB exists, before proceeding to restore
333
- proceedIfDbValid(restoreDB, function(err) {
365
+ proceedIfRestoreDbValid(restoreDB, function(err) {
334
366
  if (err) {
335
367
  if (err.name === 'DatabaseNotFound') {
336
368
  err.message = `${err.message} Create the target database before restoring.`;
369
+ } else if (err.name === 'DatabaseNotEmpty') {
370
+ err.message = `${err.message} A target database must be a new and empty database.`;
337
371
  }
338
372
  // Didn't exist, or another fatal error, exit
339
373
  callback(err);
@@ -25,10 +25,10 @@ const backupBatchDebug = debug('couchbackup:backup:batch');
25
25
 
26
26
  backupDebug.enabled = true;
27
27
 
28
- var program = parser.parseBackupArgs();
28
+ const program = parser.parseBackupArgs();
29
29
 
30
- var databaseUrl = cliutils.databaseUrl(program.url, program.db);
31
- var opts = {
30
+ const databaseUrl = cliutils.databaseUrl(program.url, program.db);
31
+ const opts = {
32
32
  bufferSize: program.bufferSize,
33
33
  log: program.log,
34
34
  mode: program.mode,
@@ -47,11 +47,11 @@ console.error('='.repeat(80));
47
47
 
48
48
  backupBatchDebug.enabled = !program.quiet;
49
49
 
50
- var ws = process.stdout;
50
+ let ws = process.stdout;
51
51
 
52
52
  // open output file
53
53
  if (program.output) {
54
- var flags = 'w';
54
+ let flags = 'w';
55
55
  if (program.log && program.resume) {
56
56
  flags = 'a';
57
57
  }
@@ -24,9 +24,9 @@ const restoreBatchDebug = debug('couchbackup:restore:batch');
24
24
 
25
25
  restoreDebug.enabled = true;
26
26
 
27
- var program = parser.parseRestoreArgs();
28
- var databaseUrl = cliutils.databaseUrl(program.url, program.db);
29
- var opts = {
27
+ const program = parser.parseRestoreArgs();
28
+ const databaseUrl = cliutils.databaseUrl(program.url, program.db);
29
+ const opts = {
30
30
  bufferSize: program.bufferSize,
31
31
  parallelism: program.parallelism,
32
32
  requestTimeout: program.requestTimeout,
@@ -105,8 +105,8 @@ function validateBulkGetSupport(db, callback) {
105
105
  * (err, {batches: batch, docs: doccount}) {@see spoolchanges}.
106
106
  */
107
107
  function downloadRemainingBatches(log, db, ee, startTime, batchesPerDownloadSession, parallelism) {
108
- var total = 0; // running total of documents downloaded so far
109
- var noRemainingBatches = false;
108
+ let total = 0; // running total of documents downloaded so far
109
+ let noRemainingBatches = false;
110
110
 
111
111
  // Generate a set of batches (up to batchesPerDownloadSession) to download from the
112
112
  // log file and download them. Set noRemainingBatches to `true` for last batch.
@@ -114,12 +114,18 @@ function downloadRemainingBatches(log, db, ee, startTime, batchesPerDownloadSess
114
114
  // Fetch the doc IDs for the batches in the current set to
115
115
  // download them.
116
116
  function batchSetComplete(err, data) {
117
- total = data.total;
118
- done();
117
+ if (!err) {
118
+ total = data.total;
119
+ }
120
+ done(err);
119
121
  }
120
122
  function processRetrievedBatches(err, batches) {
121
- // process them in parallelised queue
122
- processBatchSet(db, parallelism, log, batches, ee, startTime, total, batchSetComplete);
123
+ if (!err) {
124
+ // process them in parallelised queue
125
+ processBatchSet(db, parallelism, log, batches, ee, startTime, total, batchSetComplete);
126
+ } else {
127
+ batchSetComplete(err);
128
+ }
123
129
  }
124
130
 
125
131
  readBatchSetIdsFromLogFile(log, batchesPerDownloadSession, function(err, batchSetIds) {
@@ -157,18 +163,22 @@ function downloadRemainingBatches(log, db, ee, startTime, batchesPerDownloadSess
157
163
  */
158
164
  function readBatchSetIdsFromLogFile(log, batchesPerDownloadSession, callback) {
159
165
  logfilesummary(log, function processSummary(err, summary) {
160
- if (!summary.changesComplete) {
161
- callback(new error.BackupError('IncompleteChangesInLogFile',
162
- 'WARNING: Changes did not finish spooling'));
163
- return;
164
- }
165
- if (Object.keys(summary.batches).length === 0) {
166
- return callback(null, []);
167
- }
166
+ if (!err) {
167
+ if (!summary.changesComplete) {
168
+ callback(new error.BackupError('IncompleteChangesInLogFile',
169
+ 'WARNING: Changes did not finish spooling'));
170
+ return;
171
+ }
172
+ if (Object.keys(summary.batches).length === 0) {
173
+ return callback(null, []);
174
+ }
168
175
 
169
- // batch IDs are the property names of summary.batches
170
- var batchSetIds = getPropertyNames(summary.batches, batchesPerDownloadSession);
171
- callback(null, batchSetIds);
176
+ // batch IDs are the property names of summary.batches
177
+ const batchSetIds = getPropertyNames(summary.batches, batchesPerDownloadSession);
178
+ callback(null, batchSetIds);
179
+ } else {
180
+ callback(err);
181
+ }
172
182
  });
173
183
  }
174
184
 
@@ -188,13 +198,13 @@ function readBatchSetIdsFromLogFile(log, batchesPerDownloadSession, callback) {
188
198
  * @param {any} callback - completion callback, (err, {total: number}).
189
199
  */
190
200
  function processBatchSet(db, parallelism, log, batches, ee, start, grandtotal, callback) {
191
- var hasErrored = false;
192
- var total = grandtotal;
201
+ let hasErrored = false;
202
+ let total = grandtotal;
193
203
 
194
204
  // queue to process the fetch requests in an orderly fashion using _bulk_get
195
- var q = async.queue(function(payload, done) {
196
- var output = [];
197
- var thisBatch = payload.batch;
205
+ const q = async.queue(function(payload, done) {
206
+ const output = [];
207
+ const thisBatch = payload.batch;
198
208
  delete payload.batch;
199
209
  delete payload.command;
200
210
 
@@ -223,7 +233,7 @@ function processBatchSet(db, parallelism, log, batches, ee, start, grandtotal, c
223
233
  }
224
234
  });
225
235
  total += output.length;
226
- var t = (new Date().getTime() - start) / 1000;
236
+ const t = (new Date().getTime() - start) / 1000;
227
237
  ee.emit('received', {
228
238
  batch: thisBatch,
229
239
  data: output,
@@ -243,7 +253,7 @@ function processBatchSet(db, parallelism, log, batches, ee, start, grandtotal, c
243
253
  });
244
254
  }, parallelism);
245
255
 
246
- for (var i in batches) {
256
+ for (const i in batches) {
247
257
  q.push(batches[i]);
248
258
  }
249
259
 
@@ -260,9 +270,9 @@ function processBatchSet(db, parallelism, log, batches, ee, start, grandtotal, c
260
270
  */
261
271
  function getPropertyNames(obj, count) {
262
272
  // decide which batch numbers to deal with
263
- var batchestofetch = [];
264
- var j = 0;
265
- for (var i in obj) {
273
+ const batchestofetch = [];
274
+ let j = 0;
275
+ for (const i in obj) {
266
276
  batchestofetch.push(parseInt(i));
267
277
  j++;
268
278
  if (j >= count) break;
@@ -14,13 +14,13 @@
14
14
  'use strict';
15
15
 
16
16
  // stolen from http://strongloop.com/strongblog/practical-examples-of-the-new-node-js-streams-api/
17
- var stream = require('stream');
17
+ const stream = require('stream');
18
18
 
19
19
  module.exports = function(onChange) {
20
- var change = new stream.Transform({ objectMode: true });
20
+ const change = new stream.Transform({ objectMode: true });
21
21
 
22
22
  change._transform = function(line, encoding, done) {
23
- var obj = null;
23
+ let obj = null;
24
24
 
25
25
  // one change per line - remove the trailing comma
26
26
  line = line.trim().replace(/,$/, '');
@@ -13,8 +13,8 @@
13
13
  // limitations under the License.
14
14
  'use strict';
15
15
 
16
- var path = require('path');
17
- var tmp = require('tmp');
16
+ const path = require('path');
17
+ const tmp = require('tmp');
18
18
 
19
19
  /**
20
20
  Return API default settings.
@@ -34,7 +34,7 @@ function apiDefaults() {
34
34
  Return CLI default settings.
35
35
  */
36
36
  function cliDefaults() {
37
- var defaults = apiDefaults();
37
+ const defaults = apiDefaults();
38
38
 
39
39
  // add additional legacy settings
40
40
  defaults.db = 'test';
package/includes/error.js CHANGED
@@ -20,6 +20,7 @@ const codes = {
20
20
  DatabaseNotFound: 10,
21
21
  Unauthorized: 11,
22
22
  Forbidden: 12,
23
+ DatabaseNotEmpty: 13,
23
24
  NoLogFileName: 20,
24
25
  LogDoesNotExist: 21,
25
26
  IncompleteChangesInLogFile: 22,
package/includes/liner.js CHANGED
@@ -14,21 +14,21 @@
14
14
  'use strict';
15
15
 
16
16
  // stolen from http://strongloop.com/strongblog/practical-examples-of-the-new-node-js-streams-api/
17
- var stream = require('stream');
17
+ const stream = require('stream');
18
18
 
19
19
  module.exports = function() {
20
- var liner = new stream.Transform({ objectMode: true });
20
+ const liner = new stream.Transform({ objectMode: true });
21
21
 
22
22
  liner._transform = function(chunk, encoding, done) {
23
- var data = chunk.toString();
23
+ let data = chunk.toString();
24
24
  if (this._lastLineData) {
25
25
  data = this._lastLineData + data;
26
26
  }
27
27
 
28
- var lines = data.split('\n');
28
+ const lines = data.split('\n');
29
29
  this._lastLineData = lines.splice(lines.length - 1, 1)[0];
30
30
 
31
- for (var i in lines) {
31
+ for (const i in lines) {
32
32
  this.push(lines[i]);
33
33
  }
34
34
  done();
@@ -17,17 +17,17 @@ const fs = require('fs');
17
17
  const stream = require('stream');
18
18
  const liner = require('./liner.js');
19
19
 
20
- var onLine = function(onCommand, batches) {
21
- var change = new stream.Transform({ objectMode: true });
20
+ const onLine = function(onCommand, batches) {
21
+ const change = new stream.Transform({ objectMode: true });
22
22
  change._transform = function(line, encoding, done) {
23
23
  if (line && line[0] === ':') {
24
- var obj = {
24
+ const obj = {
25
25
  command: null,
26
26
  batch: null,
27
27
  docs: []
28
28
  };
29
29
 
30
- var matches;
30
+ let matches;
31
31
 
32
32
  // extract command
33
33
  matches = line.match(/^:([a-z_]+) ?/);
@@ -43,7 +43,7 @@ var onLine = function(onCommand, batches) {
43
43
 
44
44
  // if this is one we want
45
45
  if (obj.command === 't' && batches.indexOf(obj.batch) > -1) {
46
- var json = line.replace(/^.* batch[0-9]+ /, '').trim();
46
+ const json = line.replace(/^.* batch[0-9]+ /, '').trim();
47
47
  obj.docs = JSON.parse(json);
48
48
  onCommand(obj);
49
49
  }
@@ -55,10 +55,10 @@ var onLine = function(onCommand, batches) {
55
55
 
56
56
  module.exports = function(log, batches, callback) {
57
57
  // our sense of state
58
- var retval = { };
58
+ const retval = { };
59
59
 
60
60
  // called with each line from the log file
61
- var onCommand = function(obj) {
61
+ const onCommand = function(obj) {
62
62
  retval[obj.batch] = obj;
63
63
  };
64
64
 
@@ -66,6 +66,9 @@ module.exports = function(log, batches, callback) {
66
66
  fs.createReadStream(log)
67
67
  .pipe(liner())
68
68
  .pipe(onLine(onCommand, batches))
69
+ .on('error', function(err) {
70
+ callback(err);
71
+ })
69
72
  .on('finish', function() {
70
73
  callback(null, retval);
71
74
  });
@@ -17,18 +17,18 @@ const fs = require('fs');
17
17
  const stream = require('stream');
18
18
  const liner = require('./liner.js');
19
19
 
20
- var onLine = function(onCommand, getDocs) {
21
- var change = new stream.Transform({ objectMode: true });
20
+ const onLine = function(onCommand, getDocs) {
21
+ const change = new stream.Transform({ objectMode: true });
22
22
 
23
23
  change._transform = function(line, encoding, done) {
24
24
  if (line && line[0] === ':') {
25
- var obj = {
25
+ const obj = {
26
26
  command: null,
27
27
  batch: null,
28
28
  docs: []
29
29
  };
30
30
 
31
- var matches;
31
+ let matches;
32
32
 
33
33
  // extract command
34
34
  matches = line.match(/^:([a-z_]+) ?/);
@@ -44,7 +44,7 @@ var onLine = function(onCommand, getDocs) {
44
44
 
45
45
  // extract doc ids
46
46
  if (getDocs && obj.command === 't') {
47
- var json = line.replace(/^.* batch[0-9]+ /, '').trim();
47
+ const json = line.replace(/^.* batch[0-9]+ /, '').trim();
48
48
  obj.docs = JSON.parse(json);
49
49
  }
50
50
  onCommand(obj);
@@ -65,13 +65,13 @@ var onLine = function(onCommand, getDocs) {
65
65
  */
66
66
  module.exports = function(log, callback) {
67
67
  // our sense of state
68
- var state = {
68
+ const state = {
69
69
 
70
70
  };
71
- var changesComplete = false;
71
+ let changesComplete = false;
72
72
 
73
73
  // called with each line from the log file
74
- var onCommand = function(obj) {
74
+ const onCommand = function(obj) {
75
75
  if (obj.command === 't') {
76
76
  state[obj.batch] = true;
77
77
  } else if (obj.command === 'd') {
@@ -86,7 +86,7 @@ module.exports = function(log, callback) {
86
86
  .pipe(liner())
87
87
  .pipe(onLine(onCommand, false))
88
88
  .on('finish', function() {
89
- var obj = { changesComplete: changesComplete, batches: state };
89
+ const obj = { changesComplete: changesComplete, batches: state };
90
90
  callback(null, obj);
91
91
  });
92
92
  };
@@ -20,7 +20,7 @@ const path = require('path');
20
20
  const pkg = require('../package.json');
21
21
 
22
22
  function parseBackupArgs() {
23
- var program = require('commander');
23
+ const program = require('commander');
24
24
 
25
25
  // Option CLI defaults
26
26
  const defaults = config.cliDefaults();
@@ -84,7 +84,7 @@ function parseBackupArgs() {
84
84
  }
85
85
 
86
86
  function parseRestoreArgs() {
87
- var program = require('commander');
87
+ const program = require('commander');
88
88
 
89
89
  // Option CLI defaults
90
90
  const defaults = config.cliDefaults();
@@ -102,7 +102,7 @@ const errorHelper = async function(err) {
102
102
  module.exports = {
103
103
  client: function(rawUrl, opts) {
104
104
  const url = new URL(rawUrl);
105
- var protocol = (url.protocol.match(/^https/)) ? https : http;
105
+ const protocol = (url.protocol.match(/^https/)) ? https : http;
106
106
  const keepAliveAgent = new protocol.Agent({
107
107
  keepAlive: true,
108
108
  keepAliveMsecs: 30000,
@@ -14,8 +14,8 @@
14
14
  'use strict';
15
15
 
16
16
  module.exports = function(db, options, readstream, ee, callback) {
17
- var liner = require('../includes/liner.js')();
18
- var writer = require('../includes/writer.js')(db, options.bufferSize, options.parallelism, ee);
17
+ const liner = require('../includes/liner.js')();
18
+ const writer = require('../includes/writer.js')(db, options.bufferSize, options.parallelism, ee);
19
19
 
20
20
  // pipe the input to the output, via transformation functions
21
21
  readstream
@@ -20,16 +20,16 @@ const events = require('events');
20
20
  module.exports = function(db, options) {
21
21
  const ee = new events.EventEmitter();
22
22
  const start = new Date().getTime();
23
- var batch = 0;
24
- var hasErrored = false;
25
- var startKey = null;
26
- var total = 0;
23
+ let batch = 0;
24
+ let hasErrored = false;
25
+ let startKey = null;
26
+ let total = 0;
27
27
 
28
28
  async.doUntil(
29
29
  function(callback) {
30
30
  // Note, include_docs: true is set automatically when using the
31
31
  // fetch function.
32
- var opts = { db: db.db, limit: options.bufferSize, includeDocs: true };
32
+ const opts = { db: db.db, limit: options.bufferSize, includeDocs: true };
33
33
 
34
34
  // To avoid double fetching a document solely for the purposes of getting
35
35
  // the next ID to use as a startkey for the next page we instead use the
@@ -49,7 +49,7 @@ module.exports = function(db, options) {
49
49
  startKey = body.rows[opts.limit - 1].id;
50
50
  }
51
51
 
52
- var docs = [];
52
+ const docs = [];
53
53
  body.rows.forEach(function(doc) {
54
54
  docs.push(doc.doc);
55
55
  });
@@ -30,16 +30,16 @@ const debug = require('debug')('couchbackup:spoolchanges');
30
30
  */
31
31
  module.exports = function(db, log, bufferSize, ee, callback) {
32
32
  // list of document ids to process
33
- var buffer = [];
34
- var batch = 0;
35
- var lastSeq = null;
36
- var logStream = fs.createWriteStream(log);
33
+ const buffer = [];
34
+ let batch = 0;
35
+ let lastSeq = null;
36
+ const logStream = fs.createWriteStream(log);
37
37
 
38
38
  // send documents ids to the queue in batches of bufferSize + the last batch
39
- var processBuffer = function(lastOne) {
39
+ const processBuffer = function(lastOne) {
40
40
  if (buffer.length >= bufferSize || (lastOne && buffer.length > 0)) {
41
41
  debug('writing', buffer.length, 'changes to the backup file');
42
- var b = { docs: buffer.splice(0, bufferSize), batch: batch };
42
+ const b = { docs: buffer.splice(0, bufferSize), batch: batch };
43
43
  logStream.write(':t batch' + batch + ' ' + JSON.stringify(b.docs) + '\n');
44
44
  ee.emit('changes', batch);
45
45
  batch++;
@@ -47,12 +47,12 @@ module.exports = function(db, log, bufferSize, ee, callback) {
47
47
  };
48
48
 
49
49
  // called once per received change
50
- var onChange = function(c) {
50
+ const onChange = function(c) {
51
51
  if (c) {
52
52
  if (c.error) {
53
53
  ee.emit('error', new error.BackupError('InvalidChange', `Received invalid change: ${c}`));
54
54
  } else if (c.changes) {
55
- var obj = { id: c.id };
55
+ const obj = { id: c.id };
56
56
  buffer.push(obj);
57
57
  processBuffer(false);
58
58
  } else if (c.last_seq) {
@@ -20,15 +20,15 @@ const debug = require('debug')('couchbackup:writer');
20
20
 
21
21
  module.exports = function(db, bufferSize, parallelism, ee) {
22
22
  const writer = new stream.Transform({ objectMode: true });
23
- var buffer = [];
24
- var written = 0;
25
- var linenumber = 0;
23
+ let buffer = [];
24
+ let written = 0;
25
+ let linenumber = 0;
26
26
 
27
27
  // this is the queue of chunks that are written to the database
28
28
  // the queue's payload will be an array of documents to be written,
29
29
  // the size of the array will be bufferSize. The variable parallelism
30
30
  // determines how many HTTP requests will occur at any one time.
31
- var q = async.queue(function(payload, cb) {
31
+ const q = async.queue(function(payload, cb) {
32
32
  // if we are restoring known revisions, we need to supply new_edits=false
33
33
  if (payload.docs && payload.docs[0] && payload.docs[0]._rev) {
34
34
  payload.new_edits = false;
@@ -55,7 +55,7 @@ module.exports = function(db, bufferSize, parallelism, ee) {
55
55
  }
56
56
  }, parallelism);
57
57
 
58
- var didError = false;
58
+ let didError = false;
59
59
 
60
60
  // write the contents of the buffer to CouchDB in blocks of bufferSize
61
61
  function processBuffer(flush, callback) {
@@ -73,7 +73,7 @@ module.exports = function(db, bufferSize, parallelism, ee) {
73
73
  // and feed the chunks to the queue
74
74
  do {
75
75
  // split the buffer into bufferSize chunks
76
- var toSend = buffer.splice(0, bufferSize);
76
+ const toSend = buffer.splice(0, bufferSize);
77
77
 
78
78
  // and add the chunk to the queue
79
79
  debug(`Adding ${toSend.length} to the write queue.`);
@@ -126,7 +126,7 @@ module.exports = function(db, bufferSize, parallelism, ee) {
126
126
  if (!didError && obj !== '') {
127
127
  // see if it parses as JSON
128
128
  try {
129
- var arr = JSON.parse(obj);
129
+ const arr = JSON.parse(obj);
130
130
 
131
131
  // if it's an array with a length
132
132
  if (typeof arr === 'object' && arr.length > 0) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@cloudant/couchbackup",
3
- "version": "2.7.1-SNAPSHOT.53",
3
+ "version": "2.7.1-SNAPSHOT.57",
4
4
  "description": "CouchBackup - command-line backup utility for Cloudant/CouchDB",
5
5
  "homepage": "https://github.com/cloudant/couchbackup",
6
6
  "repository": "https://github.com/cloudant/couchbackup.git",
@@ -36,28 +36,23 @@
36
36
  "devDependencies": {
37
37
  "eslint": "^8.2.0",
38
38
  "eslint-config-semistandard": "^16.0.0",
39
- "eslint-config-standard": "^15.0.1",
39
+ "eslint-config-standard": "^16.0.3",
40
40
  "eslint-plugin-header": "^3.0.0",
41
41
  "eslint-plugin-import": "^2.8.0",
42
42
  "eslint-plugin-node": "^11.0.0",
43
43
  "eslint-plugin-promise": "^4.0.0",
44
44
  "eslint-plugin-react": "^7.14.2",
45
- "eslint-plugin-standard": "^5.0.0",
46
45
  "http-proxy": "^1.16.2",
47
46
  "jsdoc": "^3.6.7",
48
47
  "mocha": "^9.1.3",
49
48
  "nock": "^13.2.1",
50
- "rewire": "^5.0.0",
51
49
  "tail": "^2.0.0",
52
50
  "toxy": "^0.3.16",
53
51
  "uuid": "^8.3.2"
54
52
  },
55
53
  "scripts": {
56
- "test": "eslint --ignore-path .gitignore . && mocha --grep \"#unit\""
57
- },
58
- "greenkeeper": {
59
- "ignore": [
60
- "eslint"
61
- ]
54
+ "lint": "eslint --ignore-path .gitignore .",
55
+ "unit": "mocha --grep \"#unit\"",
56
+ "test": "npm run lint && npm run unit"
62
57
  }
63
58
  }