@cloudant/couchbackup 2.9.17 → 2.10.0-206

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,4 @@
1
- // Copyright © 2017 IBM Corp. All rights reserved.
1
+ // Copyright © 2017, 2023 IBM Corp. All rights reserved.
2
2
  //
3
3
  // Licensed under the Apache License, Version 2.0 (the "License");
4
4
  // you may not use this file except in compliance with the License.
@@ -13,63 +13,28 @@
13
13
  // limitations under the License.
14
14
  'use strict';
15
15
 
16
- const fs = require('fs');
17
- const stream = require('stream');
18
- const liner = require('./liner.js');
19
-
20
- const onLine = function(onCommand, batches) {
21
- const change = new stream.Transform({ objectMode: true });
22
- change._transform = function(line, encoding, done) {
23
- if (line && line[0] === ':') {
24
- const obj = {
25
- command: null,
26
- batch: null,
27
- docs: []
28
- };
29
-
30
- let matches;
31
-
32
- // extract command
33
- matches = line.match(/^:([a-z_]+) ?/);
34
- if (matches) {
35
- obj.command = matches[1];
36
- }
37
-
38
- // extract batch
39
- matches = line.match(/ batch([0-9]+)/);
40
- if (matches) {
41
- obj.batch = parseInt(matches[1]);
42
- }
43
-
44
- // if this is one we want
45
- if (obj.command === 't' && batches.indexOf(obj.batch) > -1) {
46
- const json = line.replace(/^.* batch[0-9]+ /, '').trim();
47
- obj.docs = JSON.parse(json);
48
- onCommand(obj);
49
- }
50
- }
51
- done();
52
- };
53
- return change;
54
- };
55
-
56
- module.exports = function(log, batches, callback) {
57
- // our sense of state
58
- const retval = { };
59
-
60
- // called with each line from the log file
61
- const onCommand = function(obj) {
62
- retval[obj.batch] = obj;
63
- };
64
-
65
- // stream through the previous log file
66
- fs.createReadStream(log)
67
- .pipe(liner())
68
- .pipe(onLine(onCommand, batches))
69
- .on('error', function(err) {
70
- callback(err);
71
- })
72
- .on('finish', function() {
73
- callback(null, retval);
74
- });
16
+ const fs = require('node:fs');
17
+ const { LogMapper } = require('./backupMappings.js');
18
+ const { Liner } = require('./liner.js');
19
+ const { FilterStream, MappingStream } = require('./transforms.js');
20
+
21
+ /**
22
+ * Return an array of streams that when pipelined will produce
23
+ * pending backup batches from a log file.
24
+ *
25
+ * @param {string} log - log file name
26
+ * @param {Map} batches - a log summary batches Map of pending batch numbers
27
+ * @returns a log summary object
28
+ */
29
+ module.exports = function(log, batches) {
30
+ const logMapper = new LogMapper();
31
+ return [
32
+ fs.createReadStream(log), // log file
33
+ new Liner(), // split it into lines
34
+ new MappingStream(logMapper.logLineToBackupBatch), // parse line to a backup batch
35
+ new FilterStream((metadata) => {
36
+ // delete returns true if the key exists, false otherwise
37
+ return batches.delete(metadata.batch);
38
+ }) // filter out already done batches
39
+ ];
75
40
  };
@@ -1,4 +1,4 @@
1
- // Copyright © 2017 IBM Corp. All rights reserved.
1
+ // Copyright © 2017, 2023 IBM Corp. All rights reserved.
2
2
  //
3
3
  // Licensed under the Apache License, Version 2.0 (the "License");
4
4
  // you may not use this file except in compliance with the License.
@@ -13,80 +13,50 @@
13
13
  // limitations under the License.
14
14
  'use strict';
15
15
 
16
- const fs = require('fs');
17
- const stream = require('stream');
18
- const liner = require('./liner.js');
19
-
20
- const onLine = function(onCommand, getDocs) {
21
- const change = new stream.Transform({ objectMode: true });
22
-
23
- change._transform = function(line, encoding, done) {
24
- if (line && line[0] === ':') {
25
- const obj = {
26
- command: null,
27
- batch: null,
28
- docs: []
29
- };
30
-
31
- let matches;
32
-
33
- // extract command
34
- matches = line.match(/^:([a-z_]+) ?/);
35
- if (matches) {
36
- obj.command = matches[1];
37
- }
38
-
39
- // extract batch
40
- matches = line.match(/ batch([0-9]+)/);
41
- if (matches) {
42
- obj.batch = parseInt(matches[1]);
43
- }
44
-
45
- // extract doc ids
46
- if (getDocs && obj.command === 't') {
47
- const json = line.replace(/^.* batch[0-9]+ /, '').trim();
48
- obj.docs = JSON.parse(json);
49
- }
50
- onCommand(obj);
51
- }
52
- done();
53
- };
54
- return change;
55
- };
16
+ const { createReadStream } = require('node:fs');
17
+ const { Writable } = require('node:stream');
18
+ const { pipeline } = require('node:stream/promises');
19
+ const { Liner } = require('./liner.js');
20
+ const { LogMapper } = require('./backupMappings.js');
21
+ const { MappingStream } = require('./transforms.js');
56
22
 
57
23
  /**
58
24
  * Generate a list of remaining batches from a download file.
25
+ * Creates a summary containing a changesComplete boolean for
26
+ * if the :changes_complete log file entry was found and a map
27
+ * of pending batch numbers that have yet to be backed up
28
+ * (i.e. the difference of :t and :d log file entries).
59
29
  *
60
30
  * @param {string} log - log file name
61
- * @param {function} callback - callback with err, {changesComplete: N, batches: N}.
62
- * changesComplete signifies whether the log file appeared to
63
- * have completed reading the changes feed (contains :changes_complete).
64
- * batches are remaining batch IDs for download.
31
+ * @returns a log summary object
65
32
  */
66
- module.exports = function(log, callback) {
67
- // our sense of state
68
- const state = {
69
-
70
- };
71
- let changesComplete = false;
72
-
73
- // called with each line from the log file
74
- const onCommand = function(obj) {
75
- if (obj.command === 't') {
76
- state[obj.batch] = true;
77
- } else if (obj.command === 'd') {
78
- delete state[obj.batch];
79
- } else if (obj.command === 'changes_complete') {
80
- changesComplete = true;
81
- }
82
- };
83
-
84
- // stream through the previous log file
85
- fs.createReadStream(log)
86
- .pipe(liner())
87
- .pipe(onLine(onCommand, false))
88
- .on('finish', function() {
89
- const obj = { changesComplete: changesComplete, batches: state };
90
- callback(null, obj);
91
- });
33
+ module.exports = async function(log) {
34
+ const logMapper = new LogMapper();
35
+ const state = { changesComplete: false, batches: new Map() };
36
+
37
+ await pipeline(
38
+ createReadStream(log), // read the log file
39
+ new Liner(), // split it into lines
40
+ new MappingStream(logMapper.logLineToMetadata), // parse line to metadata
41
+ new Writable({
42
+ objectMode: true,
43
+ write: (metadata, encoding, callback) => {
44
+ switch (metadata.command) {
45
+ case 't':
46
+ state.batches.set(metadata.batch, true);
47
+ break;
48
+ case 'd':
49
+ state.batches.delete(metadata.batch);
50
+ break;
51
+ case 'changes_complete':
52
+ state.changesComplete = true;
53
+ break;
54
+ default:
55
+ break;
56
+ }
57
+ callback();
58
+ }
59
+ }) // Save the done batch number in an array
60
+ );
61
+ return state;
92
62
  };
@@ -1,4 +1,4 @@
1
- // Copyright © 2017, 2021 IBM Corp. All rights reserved.
1
+ // Copyright © 2017, 2024 IBM Corp. All rights reserved.
2
2
  //
3
3
  // Licensed under the Apache License, Version 2.0 (the "License");
4
4
  // you may not use this file except in compliance with the License.
@@ -124,6 +124,6 @@ function parseRestoreArgs() {
124
124
  }
125
125
 
126
126
  module.exports = {
127
- parseBackupArgs: parseBackupArgs,
128
- parseRestoreArgs: parseRestoreArgs
127
+ parseBackupArgs,
128
+ parseRestoreArgs
129
129
  };
@@ -14,68 +14,44 @@
14
14
  'use strict';
15
15
 
16
16
  const pkg = require('../package.json');
17
- const stream = require('stream');
18
17
  const { CloudantV1, CouchdbSessionAuthenticator } = require('@ibm-cloud/cloudant');
19
18
  const { IamAuthenticator, NoAuthAuthenticator } = require('ibm-cloud-sdk-core');
20
19
  const retryPlugin = require('retry-axios');
20
+ const debug = require('debug')('couchbackup:request');
21
21
 
22
22
  const userAgent = 'couchbackup-cloudant/' + pkg.version + ' (Node.js ' +
23
23
  process.version + ')';
24
24
 
25
- // Class for streaming _changes error responses into
26
- // In general the response is a small error/reason JSON object
27
- // so it is OK to have this in memory.
28
- class ResponseWriteable extends stream.Writable {
29
- constructor(options) {
30
- super(options);
31
- this.data = [];
32
- }
33
-
34
- _write(chunk, encoding, callback) {
35
- this.data.push(chunk);
36
- callback();
37
- }
38
-
39
- stringBody() {
40
- return Buffer.concat(this.data).toString();
41
- }
42
- }
43
-
44
25
  // An interceptor function to help augment error bodies with a little
45
26
  // extra information so we can continue to use consistent messaging
46
27
  // after the ugprade to @ibm-cloud/cloudant
47
28
  const errorHelper = async function(err) {
29
+ debug('Entering error helper interceptor');
48
30
  let method;
49
31
  let requestUrl;
50
32
  if (err.response) {
33
+ debug('Error has a response');
51
34
  if (err.response.config.url) {
35
+ debug('Getting request URL and method for error');
52
36
  requestUrl = err.response.config.url;
53
37
  method = err.response.config.method;
54
38
  }
39
+ debug('Applying response error message with status, url, and method');
55
40
  // Override the status text with an improved message
56
41
  let errorMsg = `${err.response.status} ${err.response.statusText || ''}: ` +
57
42
  `${method} ${requestUrl}`;
58
43
  if (err.response.data) {
44
+ debug('Found response data');
59
45
  // Check if we have a JSON response and try to get the error/reason
60
46
  if (err.response.headers['content-type'] === 'application/json') {
61
- if (!err.response.data.error && err.response.data.pipe) {
62
- // If we didn't find a JSON object with `error` then we might have a stream response.
63
- // Detect the stream by the presence of `pipe` and use it to get the body and parse
64
- // the error information.
65
- const p = new Promise((resolve, reject) => {
66
- const errorBody = new ResponseWriteable();
67
- err.response.data.pipe(errorBody)
68
- .on('finish', () => { resolve(JSON.parse(errorBody.stringBody())); })
69
- .on('error', () => { reject(err); });
70
- });
71
- // Replace the stream on the response with the parsed object
72
- err.response.data = await p;
73
- }
47
+ debug('Response data is JSON');
74
48
  // Append the error/reason if available
75
49
  if (err.response.data.error) {
50
+ debug('Augmenting error message with error property');
76
51
  // Override the status text with our more complete message
77
52
  errorMsg += ` - Error: ${err.response.data.error}`;
78
53
  if (err.response.data.reason) {
54
+ debug('Augmenting error message with reason property');
79
55
  errorMsg += `, Reason: ${err.response.data.reason}`;
80
56
  }
81
57
  }
@@ -88,91 +64,104 @@ const errorHelper = async function(err) {
88
64
  err.response.data.errors = [{ message: errorMsg }];
89
65
  }
90
66
  } else if (err.request) {
67
+ debug('Error did not include a response');
91
68
  if (!err.message.includes(err.config.url)) {
69
+ debug('Augmenting request error message with URL and method');
92
70
  // Augment the message with the URL and method
93
71
  // but don't do it again if we already have the URL.
94
72
  err.message = `${err.message}: ${err.config.method} ${err.config.url}`;
73
+ } else {
74
+ debug('Request error message already augmented');
95
75
  }
96
76
  }
97
77
  return Promise.reject(err);
98
78
  };
99
79
 
100
- module.exports = {
101
- client: function(rawUrl, opts) {
102
- const url = new URL(rawUrl);
103
- // Split the URL to separate service from database
104
- // Use origin as the "base" to remove auth elements
105
- const actUrl = new URL(url.pathname.substring(0, url.pathname.lastIndexOf('/')), url.origin);
106
- const dbName = url.pathname.substring(url.pathname.lastIndexOf('/') + 1);
107
- let authenticator;
108
- // Default to cookieauth unless an IAM key is provided
109
- if (opts.iamApiKey) {
110
- const iamAuthOpts = { apikey: opts.iamApiKey };
111
- if (opts.iamTokenUrl) {
112
- iamAuthOpts.url = opts.iamTokenUrl;
113
- }
114
- authenticator = new IamAuthenticator(iamAuthOpts);
115
- } else if (url.username) {
116
- authenticator = new CouchdbSessionAuthenticator({
117
- username: decodeURIComponent(url.username),
118
- password: decodeURIComponent(url.password)
119
- });
120
- } else {
121
- authenticator = new NoAuthAuthenticator();
80
+ function newSimpleClient(rawUrl, opts) {
81
+ const url = new URL(rawUrl);
82
+ // Split the URL to separate service from database
83
+ // Use origin as the "base" to remove auth elements
84
+ const actUrl = new URL(url.pathname.substring(0, url.pathname.lastIndexOf('/')), url.origin);
85
+ const dbName = url.pathname.substring(url.pathname.lastIndexOf('/') + 1);
86
+ let authenticator;
87
+ // Default to cookieauth unless an IAM key is provided
88
+ if (opts.iamApiKey) {
89
+ const iamAuthOpts = { apikey: opts.iamApiKey };
90
+ if (opts.iamTokenUrl) {
91
+ iamAuthOpts.url = opts.iamTokenUrl;
122
92
  }
123
- const serviceOpts = {
124
- authenticator: authenticator,
125
- timeout: opts.requestTimeout,
126
- // Axios performance options
127
- maxContentLength: -1
128
- };
129
-
130
- const service = new CloudantV1(serviceOpts);
131
- // Configure retries
132
- const maxRetries = 2; // for 3 total attempts
133
- service.getHttpClient().defaults.raxConfig = {
134
- // retries for status codes
135
- retry: maxRetries,
136
- // retries for non-response e.g. ETIMEDOUT
137
- noResponseRetries: maxRetries,
138
- backoffType: 'exponential',
139
- httpMethodsToRetry: ['GET', 'HEAD', 'POST'],
140
- statusCodesToRetry: [
141
- [429, 429],
142
- [500, 599]
143
- ],
144
- shouldRetry: err => {
145
- const cfg = retryPlugin.getConfig(err);
146
- // cap at max retries regardless of response/non-response type
147
- if (cfg.currentRetryAttempt >= maxRetries) {
148
- return false;
149
- } else {
150
- return retryPlugin.shouldRetryRequest(err);
151
- }
152
- },
153
- instance: service.getHttpClient()
154
- };
155
- retryPlugin.attach(service.getHttpClient());
93
+ authenticator = new IamAuthenticator(iamAuthOpts);
94
+ } else if (url.username) {
95
+ authenticator = new CouchdbSessionAuthenticator({
96
+ username: decodeURIComponent(url.username),
97
+ password: decodeURIComponent(url.password)
98
+ });
99
+ } else {
100
+ authenticator = new NoAuthAuthenticator();
101
+ }
102
+ const serviceOpts = {
103
+ authenticator,
104
+ timeout: opts.requestTimeout,
105
+ // Axios performance options
106
+ maxContentLength: -1
107
+ };
156
108
 
157
- service.setServiceUrl(actUrl.toString());
158
- if (authenticator instanceof CouchdbSessionAuthenticator) {
159
- // Awkward workaround for known Couch issue with compression on _session requests
160
- // It is not feasible to disable compression on all requests with the amount of
161
- // data this lib needs to move, so override the property in the tokenManager instance.
162
- authenticator.tokenManager.requestWrapperInstance.compressRequestData = false;
163
- }
164
- if (authenticator.tokenManager && authenticator.tokenManager.requestWrapperInstance) {
165
- authenticator.tokenManager.requestWrapperInstance.axiosInstance.interceptors.response.use(null, errorHelper);
166
- }
167
- // Add error interceptors to put URLs in error messages
168
- service.getHttpClient().interceptors.response.use(null, errorHelper);
109
+ const service = new CloudantV1(serviceOpts);
110
+ service.setServiceUrl(actUrl.toString());
111
+ if (authenticator instanceof CouchdbSessionAuthenticator) {
112
+ // Awkward workaround for known Couch issue with compression on _session requests
113
+ // It is not feasible to disable compression on all requests with the amount of
114
+ // data this lib needs to move, so override the property in the tokenManager instance.
115
+ authenticator.tokenManager.requestWrapperInstance.compressRequestData = false;
116
+ }
117
+ return { service, dbName, actUrl };
118
+ }
169
119
 
170
- // Add request interceptor to add user-agent (adding it with custom request headers gets overwritten)
171
- service.getHttpClient().interceptors.request.use(function(requestConfig) {
172
- requestConfig.headers['User-Agent'] = userAgent;
173
- return requestConfig;
174
- }, null);
120
+ function newClient(rawUrl, opts) {
121
+ const { service, dbName, actUrl } = newSimpleClient(rawUrl, opts);
122
+ const authenticator = service.getAuthenticator();
123
+ // Configure retries
124
+ const maxRetries = 2; // for 3 total attempts
125
+ service.getHttpClient().defaults.raxConfig = {
126
+ // retries for status codes
127
+ retry: maxRetries,
128
+ // retries for non-response e.g. ETIMEDOUT
129
+ noResponseRetries: maxRetries,
130
+ backoffType: 'exponential',
131
+ httpMethodsToRetry: ['GET', 'HEAD', 'POST'],
132
+ statusCodesToRetry: [
133
+ [429, 429],
134
+ [500, 599]
135
+ ],
136
+ shouldRetry: err => {
137
+ const cfg = retryPlugin.getConfig(err);
138
+ // cap at max retries regardless of response/non-response type
139
+ if (cfg.currentRetryAttempt >= maxRetries) {
140
+ return false;
141
+ } else {
142
+ return retryPlugin.shouldRetryRequest(err);
143
+ }
144
+ },
145
+ instance: service.getHttpClient()
146
+ };
147
+ retryPlugin.attach(service.getHttpClient());
175
148
 
176
- return { service: service, db: dbName, url: actUrl.toString() };
149
+ if (authenticator.tokenManager && authenticator.tokenManager.requestWrapperInstance) {
150
+ authenticator.tokenManager.requestWrapperInstance.axiosInstance.interceptors.response.use(null, errorHelper);
177
151
  }
152
+ // Add error interceptors to put URLs in error messages
153
+ service.getHttpClient().interceptors.response.use(null, errorHelper);
154
+
155
+ // Add request interceptor to add user-agent (adding it with custom request headers gets overwritten)
156
+ service.getHttpClient().interceptors.request.use(function(requestConfig) {
157
+ requestConfig.headers['User-Agent'] = userAgent;
158
+ return requestConfig;
159
+ }, null);
160
+
161
+ return { service, dbName, url: actUrl.toString() };
162
+ }
163
+
164
+ module.exports = {
165
+ newSimpleClient,
166
+ newClient
178
167
  };
@@ -1,4 +1,4 @@
1
- // Copyright © 2017, 2018 IBM Corp. All rights reserved.
1
+ // Copyright © 2017, 2024 IBM Corp. All rights reserved.
2
2
  //
3
3
  // Licensed under the Apache License, Version 2.0 (the "License");
4
4
  // you may not use this file except in compliance with the License.
@@ -13,19 +13,50 @@
13
13
  // limitations under the License.
14
14
  'use strict';
15
15
 
16
- module.exports = function(db, options, readstream, ee, callback) {
17
- const liner = require('../includes/liner.js')();
18
- const writer = require('../includes/writer.js')(db, options.bufferSize, options.parallelism, ee);
16
+ const debug = require('debug')('couchbackup:restore');
17
+ const { Liner } = require('../includes/liner.js');
18
+ const { Restore } = require('../includes/restoreMappings.js');
19
+ const { BatchingStream, MappingStream } = require('./transforms.js');
20
+ const { Writable } = require('node:stream');
21
+ const { pipeline } = require('node:stream/promises');
19
22
 
20
- // pipe the input to the output, via transformation functions
21
- readstream
22
- .pipe(liner) // transform the input stream into per-line
23
- .on('error', function(err) {
24
- // Forward the error to the writer event emitter where we already have
25
- // listeners on for handling errors
26
- writer.emit('error', err);
27
- })
28
- .pipe(writer); // transform the data
23
+ /**
24
+ * Function for performing a restore.
25
+ *
26
+ * @param {object} dbClient - object for connection to source database containing name, service and url
27
+ * @param {object} options - restore configuration
28
+ * @param {Readable} readstream - the backup file content
29
+ * @param {EventEmitter} ee - the user facing EventEmitter
30
+ * @returns a promise that resolves when the restore is complete or rejects if it errors
31
+ */
32
+ module.exports = function(dbClient, options, readstream, ee) {
33
+ const restore = new Restore(dbClient);
34
+ const start = new Date().getTime(); // restore start time
35
+ let total = 0; // the total restored
29
36
 
30
- callback(null, writer);
37
+ const output = new Writable({
38
+ objectMode: true,
39
+ write: (restoreBatch, encoding, cb) => {
40
+ debug(' restored ', restoreBatch.documents);
41
+ total += restoreBatch.documents;
42
+ const totalRunningTimeSec = (new Date().getTime() - start) / 1000;
43
+ try {
44
+ ee.emit('restored', { ...restoreBatch, total, time: totalRunningTimeSec });
45
+ } finally {
46
+ cb();
47
+ }
48
+ }
49
+ });
50
+
51
+ return pipeline(
52
+ readstream, // the backup file
53
+ new Liner(), // line by line
54
+ new MappingStream(restore.backupLineToDocsArray), // convert line to a docs array
55
+ new BatchingStream(options.bufferSize, true), // make new arrays of the correct buffer size
56
+ new MappingStream(restore.docsToRestoreBatch), // make a restore batch
57
+ new MappingStream(restore.pendingToRestored, options.parallelism), // do the restore at the desired level of concurrency
58
+ output // emit restored events
59
+ ).then(() => {
60
+ return { total };
61
+ });
31
62
  };