b2-cloud-storage 1.0.5 → 1.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +545 -405
- package/package.json +14 -15
package/index.js
CHANGED
|
@@ -1,13 +1,14 @@
|
|
|
1
1
|
/* eslint-disable unicorn/explicit-length-check */
|
|
2
2
|
'use strict';
|
|
3
|
-
const
|
|
4
|
-
const
|
|
5
|
-
const os = require('os');
|
|
6
|
-
const
|
|
3
|
+
const crypto = require('node:crypto');
|
|
4
|
+
const fs = require('node:fs');
|
|
5
|
+
const os = require('node:os');
|
|
6
|
+
const { Transform } = require('node:stream');
|
|
7
|
+
const url = require('node:url');
|
|
7
8
|
|
|
8
|
-
const request = require('request');
|
|
9
|
-
const _ = require('lodash');
|
|
10
9
|
const async = require('async');
|
|
10
|
+
const _ = require('lodash');
|
|
11
|
+
const request = require('request');
|
|
11
12
|
|
|
12
13
|
const nodeVersion = process.version;
|
|
13
14
|
const packageVersion = require('./package.json').version;
|
|
@@ -28,33 +29,34 @@ const b2CloudStorage = class {
|
|
|
28
29
|
* @param {number} options.maxPartAttempts Maximum retries each part can reattempt before erroring when uploading a Large File.
|
|
29
30
|
* @param {number} options.maxTotalErrors Maximum total errors the collective list of file parts can trigger (below the individual maxPartAttempts) before the Large File upload is considered failed.
|
|
30
31
|
* @param {number} options.maxReauthAttempts Maximum times this library will try to reauthenticate if an auth token expires, before assuming failure.
|
|
32
|
+
* @param {number} options.defaultUploadConcurrency Default number of concurrent part uploads for large files. Defaults to 4.
|
|
31
33
|
* @return {undefined}
|
|
32
34
|
*/
|
|
33
|
-
constructor(options){
|
|
34
|
-
if(!options || !options.auth){
|
|
35
|
+
constructor(options) {
|
|
36
|
+
if (!options || !options.auth) {
|
|
35
37
|
throw new Error('Missing authentication object');
|
|
36
38
|
}
|
|
37
|
-
if(!options.auth.accountId){
|
|
39
|
+
if (!options.auth.accountId) {
|
|
38
40
|
throw new Error('Missing authentication accountId');
|
|
39
41
|
}
|
|
40
|
-
if(!options.auth.applicationKey){
|
|
42
|
+
if (!options.auth.applicationKey) {
|
|
41
43
|
throw new Error('Missing authentication applicationKey');
|
|
42
44
|
}
|
|
43
45
|
|
|
44
46
|
this.maxSmallFileSize = options.maxSmallFileSize || 100_000_000; // default to 100MB
|
|
45
|
-
if(this.maxSmallFileSize > 5_000_000_000){
|
|
47
|
+
if (this.maxSmallFileSize > 5_000_000_000) {
|
|
46
48
|
throw new Error('maxSmallFileSize can not exceed 5GB');
|
|
47
49
|
}
|
|
48
|
-
if(this.maxSmallFileSize < 100_000_000){
|
|
50
|
+
if (this.maxSmallFileSize < 100_000_000) {
|
|
49
51
|
throw new Error('maxSmallFileSize can not be less than 100MB');
|
|
50
52
|
}
|
|
51
53
|
|
|
52
54
|
this.maxCopyWorkers = options.maxCopyWorkers || (os.cpus().length * 5); // default to the number of available CPUs * 5 (web requests are cheap)
|
|
53
|
-
this.maxSmallCopyFileSize = options.maxSmallCopyFileSize || 100_000_000; // default to
|
|
54
|
-
if(this.maxSmallCopyFileSize > 5_000_000_000){
|
|
55
|
+
this.maxSmallCopyFileSize = options.maxSmallCopyFileSize || 100_000_000; // default to 100MB
|
|
56
|
+
if (this.maxSmallCopyFileSize > 5_000_000_000) {
|
|
55
57
|
throw new Error('maxSmallFileSize can not exceed 5GB');
|
|
56
58
|
}
|
|
57
|
-
if(this.maxSmallCopyFileSize < 5_000_000){
|
|
59
|
+
if (this.maxSmallCopyFileSize < 5_000_000) {
|
|
58
60
|
throw new Error('maxSmallFileSize can not be less than 5MB');
|
|
59
61
|
}
|
|
60
62
|
|
|
@@ -64,6 +66,7 @@ const b2CloudStorage = class {
|
|
|
64
66
|
this.maxPartAttempts = options.maxPartAttempts || 3; // retry each chunk up to 3 times
|
|
65
67
|
this.maxTotalErrors = options.maxTotalErrors || 10; // quit if 10 chunks fail
|
|
66
68
|
this.maxReauthAttempts = options.maxReauthAttempts || 3; // quit if 3 re-auth attempts fail
|
|
69
|
+
this.defaultUploadConcurrency = options.defaultUploadConcurrency || 4;
|
|
67
70
|
}
|
|
68
71
|
|
|
69
72
|
/**
|
|
@@ -71,15 +74,150 @@ const b2CloudStorage = class {
|
|
|
71
74
|
* @param {string} fileName File name for upload
|
|
72
75
|
* @returns {string} Returns a safe and URL encoded file name for upload
|
|
73
76
|
*/
|
|
74
|
-
static getUrlEncodedFileName(fileName){
|
|
77
|
+
static getUrlEncodedFileName(fileName) {
|
|
75
78
|
return fileName.split('/').map(component => encodeURIComponent(component)).join('/');
|
|
76
79
|
}
|
|
77
80
|
|
|
81
|
+
/**
|
|
82
|
+
* Helper method: Computes an array of upload chunks with inclusive byte ranges for a large file upload.
|
|
83
|
+
* Automatically increases part size if the file would exceed B2's 10,000-part limit.
|
|
84
|
+
* Supports resume by accepting previously-uploaded part sizes and adjusting chunk boundaries accordingly.
|
|
85
|
+
* @param {Object} data Chunk build data
|
|
86
|
+
* @param {Number} data.size Total file size in bytes
|
|
87
|
+
* @param {Number} data.partSize Requested part size in bytes
|
|
88
|
+
* @param {Object<number, number>} [data.uploadedParts] Plain object hash of partNumber to existing part size in bytes, for resumed uploads
|
|
89
|
+
* @param {Number} [data.lastConsecutivePart] Last contiguous uploaded part number
|
|
90
|
+
* @param {Number} [data.lastUploadedPart] Last uploaded part number
|
|
91
|
+
* @param {Number} [data.missingPartSize] Internal resume tracking size
|
|
92
|
+
* @returns {{partSize:Number, chunks:Array<{attempts:Number, part:Number, start:Number, size:Number, end:Number}>, lastPart:Number, missingPartSize:Number}}
|
|
93
|
+
* @throws {Error} When partSize is not a finite positive number
|
|
94
|
+
* @throws {Error} When size is not a finite number or is negative
|
|
95
|
+
* @throws {Error} When part count would exceed 10,000
|
|
96
|
+
* @throws {Error} When a chunk's computed size is zero or its byte range is invalid
|
|
97
|
+
*/
|
|
98
|
+
static buildLargeUploadChunks(data) {
|
|
99
|
+
const maxPartCount = 10000;
|
|
100
|
+
if (!Number.isFinite(data.partSize) || data.partSize <= 0) {
|
|
101
|
+
throw new Error('B2 part size must be greater than zero (got: ' + data.partSize + ')');
|
|
102
|
+
}
|
|
103
|
+
if (!Number.isFinite(data.size)) {
|
|
104
|
+
throw new TypeError('B2 file size must be a finite number (got: ' + data.size + ')');
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
const size = data.size;
|
|
108
|
+
if (size < 0) {
|
|
109
|
+
throw new Error('B2 file size must not be negative');
|
|
110
|
+
}
|
|
111
|
+
if (size === 0) {
|
|
112
|
+
return {
|
|
113
|
+
partSize: data.partSize,
|
|
114
|
+
chunks: [],
|
|
115
|
+
lastPart: 0,
|
|
116
|
+
missingPartSize: data.missingPartSize || 0,
|
|
117
|
+
};
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
const uploadedParts = data.uploadedParts || {};
|
|
121
|
+
const lastConsecutivePart = data.lastConsecutivePart || 0;
|
|
122
|
+
const lastUploadedPart = data.lastUploadedPart || 0;
|
|
123
|
+
let missingPartSize = data.missingPartSize || 0;
|
|
124
|
+
|
|
125
|
+
let partSize = data.partSize;
|
|
126
|
+
const minPartSize = Math.ceil(size / maxPartCount);
|
|
127
|
+
if (minPartSize > partSize) {
|
|
128
|
+
partSize = minPartSize;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
const lastByte = size - 1;
|
|
132
|
+
const partTemplate = {
|
|
133
|
+
attempts: 1,
|
|
134
|
+
part: 0,
|
|
135
|
+
start: 0,
|
|
136
|
+
size: 0,
|
|
137
|
+
end: -1, // sentinel so first iteration's end += partSize yields partSize - 1 (inclusive byte index)
|
|
138
|
+
};
|
|
139
|
+
|
|
140
|
+
const chunks = [];
|
|
141
|
+
let lastPart = 1;
|
|
142
|
+
|
|
143
|
+
while (partTemplate.end < lastByte) {
|
|
144
|
+
if (partTemplate.part >= maxPartCount) {
|
|
145
|
+
const err = new Error('B2 part count can not exceed 10,000 parts (file size: ' + size + ', partSize: ' + partSize + ')');
|
|
146
|
+
err.chunk = _.clone(partTemplate);
|
|
147
|
+
throw err;
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
partTemplate.part++;
|
|
151
|
+
|
|
152
|
+
let currentPartSize = partSize;
|
|
153
|
+
if (uploadedParts[partTemplate.part]) {
|
|
154
|
+
currentPartSize = uploadedParts[partTemplate.part];
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
if (partTemplate.part > lastConsecutivePart && partTemplate.part < lastUploadedPart) {
|
|
158
|
+
if (!missingPartSize) {
|
|
159
|
+
const accountedForParts = partTemplate.end + 1;
|
|
160
|
+
missingPartSize = Math.ceil((size - accountedForParts) / (lastUploadedPart - lastConsecutivePart));
|
|
161
|
+
if (missingPartSize > partSize) {
|
|
162
|
+
missingPartSize = partSize;
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
currentPartSize = missingPartSize;
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
if (currentPartSize <= 0) {
|
|
169
|
+
const err = new Error('B2 part size cannot be zero at part ' + partTemplate.part + ' (file size: ' + size + ', partSize: ' + partSize + ')');
|
|
170
|
+
err.chunk = _.clone(partTemplate);
|
|
171
|
+
throw err;
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
partTemplate.end += currentPartSize;
|
|
175
|
+
if (partTemplate.end > lastByte) {
|
|
176
|
+
currentPartSize = currentPartSize - (partTemplate.end - lastByte);
|
|
177
|
+
partTemplate.end = lastByte;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
if (currentPartSize <= 0) {
|
|
181
|
+
const err = new Error('B2 part size cannot be zero at part ' + partTemplate.part + ' (file size: ' + size + ', partSize: ' + partSize + ')');
|
|
182
|
+
err.chunk = _.clone(partTemplate);
|
|
183
|
+
throw err;
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
partTemplate.start += partTemplate.size;
|
|
187
|
+
partTemplate.size = currentPartSize;
|
|
188
|
+
if (partTemplate.part === 1) {
|
|
189
|
+
partTemplate.start = 0;
|
|
190
|
+
}
|
|
191
|
+
if (partTemplate.size > partSize) {
|
|
192
|
+
const err = new Error('B2 part size overflows maximum recommended chunk to resume upload at part ' + partTemplate.part + ' (size: ' + partTemplate.size + ', max: ' + partSize + ')');
|
|
193
|
+
err.chunk = _.clone(partTemplate);
|
|
194
|
+
throw err;
|
|
195
|
+
}
|
|
196
|
+
if (partTemplate.end < partTemplate.start) {
|
|
197
|
+
const err = new Error('B2 chunk range is invalid at part ' + partTemplate.part + ' (start: ' + partTemplate.start + ', end: ' + partTemplate.end + ')');
|
|
198
|
+
err.chunk = _.clone(partTemplate);
|
|
199
|
+
throw err;
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
if (lastPart < partTemplate.part) {
|
|
203
|
+
lastPart = partTemplate.part;
|
|
204
|
+
}
|
|
205
|
+
chunks.push(_.clone(partTemplate));
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
return {
|
|
209
|
+
partSize: partSize,
|
|
210
|
+
chunks: chunks,
|
|
211
|
+
lastPart: lastPart,
|
|
212
|
+
missingPartSize: missingPartSize,
|
|
213
|
+
};
|
|
214
|
+
}
|
|
215
|
+
|
|
78
216
|
/**
|
|
79
217
|
* `b2_authorize_account` method, required before calling any B2 API routes.
|
|
80
218
|
* @param {Function} [callback]
|
|
81
219
|
*/
|
|
82
|
-
authorize(callback){
|
|
220
|
+
authorize(callback) {
|
|
83
221
|
this.request({
|
|
84
222
|
auth: {
|
|
85
223
|
user: this.auth.accountId,
|
|
@@ -88,7 +226,7 @@ const b2CloudStorage = class {
|
|
|
88
226
|
apiUrl: 'https://api.backblazeb2.com',
|
|
89
227
|
url: 'b2_authorize_account',
|
|
90
228
|
}, (err, results) => {
|
|
91
|
-
if(err){
|
|
229
|
+
if (err) {
|
|
92
230
|
return callback(err);
|
|
93
231
|
}
|
|
94
232
|
this.authData = results;
|
|
@@ -100,7 +238,7 @@ const b2CloudStorage = class {
|
|
|
100
238
|
|
|
101
239
|
/**
|
|
102
240
|
* Upload file with `b2_upload_file` or as several parts of a large file upload.
|
|
103
|
-
* This method also will get the filesize & sha1 hash of the entire file.
|
|
241
|
+
* This method also will get the filesize & sha1 hash of the entire file (unless `data.hash` is already provided or set to `false`).
|
|
104
242
|
* @param {String} filename Path to filename to for upload.
|
|
105
243
|
* @param {Object} data Configuration data passed from the `uploadFile` method.
|
|
106
244
|
* @param {String} data.bucketId The target bucket the file is to be uploaded.
|
|
@@ -113,18 +251,18 @@ const b2CloudStorage = class {
|
|
|
113
251
|
* @param {Number} [data.progressInterval] How frequently the `onUploadProgress` callback is fired during upload
|
|
114
252
|
* @param {Number} [data.partSize] Overwrite the default part size as defined by the b2 authorization process
|
|
115
253
|
* @param {Object} [data.info] File info metadata for the file.
|
|
116
|
-
* @param {String} [data.hash]
|
|
254
|
+
* @param {String|false} [data.hash] When a string is provided, skips the whole-file sha1 computation and uses the given hash. Set to `false` to skip hashing entirely; small files will use `do_not_verify`, while large file parts are always verified post-upload against B2's response.
|
|
117
255
|
* @param {('fail_some_uploads'|'expire_some_account_authorization_tokens'|'force_cap_exceeded')} [data.testMode] Enables B2 test mode by setting the `X-Bz-Test-Mode` header, which will cause intermittent artificial failures.
|
|
118
256
|
* @param {Function} [callback]
|
|
119
257
|
* @returns {object} Returns an object with 3 helper methods: `cancel()`, `progress()`, & `info()`
|
|
120
258
|
*/
|
|
121
|
-
uploadFile(filename, data, callback = function(){}){
|
|
122
|
-
if(!this.authData){
|
|
259
|
+
uploadFile(filename, data, callback = function() {}) {
|
|
260
|
+
if (!this.authData) {
|
|
123
261
|
return callback(new Error('Not authenticated. Did you forget to call authorize()?'));
|
|
124
262
|
}
|
|
125
263
|
|
|
126
264
|
// todo: check if allowed (access) to upload files
|
|
127
|
-
if(data.partSize < 5_000_000){
|
|
265
|
+
if (data.partSize < 5_000_000) {
|
|
128
266
|
return callback(new Error('partSize can not be lower than 5MB'));
|
|
129
267
|
}
|
|
130
268
|
|
|
@@ -134,14 +272,14 @@ const b2CloudStorage = class {
|
|
|
134
272
|
|
|
135
273
|
let fileFuncs = {};
|
|
136
274
|
const returnFuncs = {
|
|
137
|
-
cancel: function(){
|
|
275
|
+
cancel: function() {
|
|
138
276
|
cancel = true;
|
|
139
|
-
if(fileFuncs.cancel){
|
|
277
|
+
if (fileFuncs.cancel) {
|
|
140
278
|
return fileFuncs.cancel();
|
|
141
279
|
}
|
|
142
280
|
},
|
|
143
|
-
progress: function(){
|
|
144
|
-
if(fileFuncs.progress){
|
|
281
|
+
progress: function() {
|
|
282
|
+
if (fileFuncs.progress) {
|
|
145
283
|
return fileFuncs.progress();
|
|
146
284
|
}
|
|
147
285
|
return {
|
|
@@ -150,35 +288,35 @@ const b2CloudStorage = class {
|
|
|
150
288
|
bytesTotal: data.size || 0,
|
|
151
289
|
};
|
|
152
290
|
},
|
|
153
|
-
info: function(){
|
|
154
|
-
if(fileFuncs.info){
|
|
291
|
+
info: function() {
|
|
292
|
+
if (fileFuncs.info) {
|
|
155
293
|
return fileFuncs.info();
|
|
156
294
|
}
|
|
157
295
|
return null;
|
|
158
296
|
},
|
|
159
297
|
};
|
|
160
298
|
async.series([
|
|
161
|
-
function(cb){
|
|
162
|
-
if(cancel){
|
|
299
|
+
function(cb) {
|
|
300
|
+
if (cancel) {
|
|
163
301
|
return cb(new Error('B2 upload canceled'));
|
|
164
302
|
}
|
|
165
|
-
if(data.hash){
|
|
303
|
+
if (typeof data.hash === 'string' || data.hash === false) {
|
|
166
304
|
return cb();
|
|
167
305
|
}
|
|
168
|
-
self.getFileHash(filename, function(err, hash){
|
|
169
|
-
if(err){
|
|
306
|
+
self.getFileHash(filename, function(err, hash) {
|
|
307
|
+
if (err) {
|
|
170
308
|
return cb(err);
|
|
171
309
|
}
|
|
172
310
|
data.hash = hash;
|
|
173
311
|
return cb();
|
|
174
312
|
});
|
|
175
313
|
},
|
|
176
|
-
function(cb){
|
|
177
|
-
if(cancel){
|
|
314
|
+
function(cb) {
|
|
315
|
+
if (cancel) {
|
|
178
316
|
return cb(new Error('B2 upload canceled'));
|
|
179
317
|
}
|
|
180
|
-
self.getStat(filename, function(err, stat){
|
|
181
|
-
if(err){
|
|
318
|
+
self.getStat(filename, function(err, stat) {
|
|
319
|
+
if (err) {
|
|
182
320
|
return cb(err);
|
|
183
321
|
}
|
|
184
322
|
data.stat = stat;
|
|
@@ -187,18 +325,18 @@ const b2CloudStorage = class {
|
|
|
187
325
|
return cb();
|
|
188
326
|
});
|
|
189
327
|
},
|
|
190
|
-
], function(err){
|
|
191
|
-
if(cancel){
|
|
328
|
+
], function(err) {
|
|
329
|
+
if (cancel) {
|
|
192
330
|
return callback(new Error('B2 upload canceled'));
|
|
193
331
|
}
|
|
194
|
-
if(err){
|
|
332
|
+
if (err) {
|
|
195
333
|
return callback(err);
|
|
196
334
|
}
|
|
197
335
|
// properly encode file name for upload
|
|
198
|
-
if(data.fileName){
|
|
336
|
+
if (data.fileName) {
|
|
199
337
|
data.fileName = b2CloudStorage.getUrlEncodedFileName(data.fileName);
|
|
200
338
|
}
|
|
201
|
-
if(smallFile){
|
|
339
|
+
if (smallFile) {
|
|
202
340
|
fileFuncs = self.uploadFileSmall(filename, data, callback);
|
|
203
341
|
return;
|
|
204
342
|
}
|
|
@@ -215,7 +353,7 @@ const b2CloudStorage = class {
|
|
|
215
353
|
* @param {Number} [data.maxPartCount] The maximum number of parts to return from this call. The default value is 100, and the maximum allowed is 1000.
|
|
216
354
|
* @param {Function} [callback]
|
|
217
355
|
*/
|
|
218
|
-
listParts(data, callback){
|
|
356
|
+
listParts(data, callback) {
|
|
219
357
|
return this.request({
|
|
220
358
|
url: 'b2_list_parts',
|
|
221
359
|
method: 'POST',
|
|
@@ -232,7 +370,7 @@ const b2CloudStorage = class {
|
|
|
232
370
|
* @param {Number} [data.maxFileCount] The maximum number of files to return from this call. The default value is 100, and the maximum allowed is 100.
|
|
233
371
|
* @param {Function} [callback]
|
|
234
372
|
*/
|
|
235
|
-
listUnfinishedLargeFiles(data, callback){
|
|
373
|
+
listUnfinishedLargeFiles(data, callback) {
|
|
236
374
|
return this.request({
|
|
237
375
|
url: 'b2_list_unfinished_large_files',
|
|
238
376
|
method: 'POST',
|
|
@@ -246,7 +384,7 @@ const b2CloudStorage = class {
|
|
|
246
384
|
* @param {String} data.fileId The ID returned by b2_start_large_file.
|
|
247
385
|
* @param {Function} [callback]
|
|
248
386
|
*/
|
|
249
|
-
cancelLargeFile(data, callback){
|
|
387
|
+
cancelLargeFile(data, callback) {
|
|
250
388
|
return this.request({
|
|
251
389
|
url: 'b2_cancel_large_file',
|
|
252
390
|
method: 'POST',
|
|
@@ -259,7 +397,7 @@ const b2CloudStorage = class {
|
|
|
259
397
|
* @param {String} fileId The ID of the file, as returned by `b2_upload_file`, `b2_hide_file`, `b2_list_file_names`, or `b2_list_file_versions`.
|
|
260
398
|
* @param {Function} [callback]
|
|
261
399
|
*/
|
|
262
|
-
getFileInfo(fileId, callback){
|
|
400
|
+
getFileInfo(fileId, callback) {
|
|
263
401
|
return this.request({
|
|
264
402
|
url: 'b2_get_file_info',
|
|
265
403
|
method: 'POST',
|
|
@@ -277,12 +415,12 @@ const b2CloudStorage = class {
|
|
|
277
415
|
* @param {Array} [data.bucketTypes] One of: "allPublic", "allPrivate", "snapshot", or other values added in the future. "allPublic" means that anybody can download the files is the bucket; "allPrivate" means that you need an authorization token to download them; "snapshot" means that it's a private bucket containing snapshots created on the B2 web site.
|
|
278
416
|
* @param {Function} [callback]
|
|
279
417
|
*/
|
|
280
|
-
listBuckets(data, callback){
|
|
281
|
-
if(!callback && data){
|
|
418
|
+
listBuckets(data, callback) {
|
|
419
|
+
if (!callback && data) {
|
|
282
420
|
callback = data;
|
|
283
421
|
data = {};
|
|
284
422
|
}
|
|
285
|
-
if(!data.accountId){
|
|
423
|
+
if (!data.accountId) {
|
|
286
424
|
data.accountId = this.authData.accountId;
|
|
287
425
|
}
|
|
288
426
|
return this.request({
|
|
@@ -303,7 +441,7 @@ const b2CloudStorage = class {
|
|
|
303
441
|
* @param {Object} [data.range] The range of bytes to copy. If not provided, the whole source file will be copied.
|
|
304
442
|
* @param {Function} [callback]
|
|
305
443
|
*/
|
|
306
|
-
copyFilePart(data, callback){
|
|
444
|
+
copyFilePart(data, callback) {
|
|
307
445
|
return this.request({
|
|
308
446
|
url: 'b2_copy_part',
|
|
309
447
|
method: 'POST',
|
|
@@ -328,8 +466,8 @@ const b2CloudStorage = class {
|
|
|
328
466
|
* @param {Function} [callback]
|
|
329
467
|
* @returns {object} Returns an object with 3 helper methods: `cancel()`, `progress()`, & `info()`
|
|
330
468
|
*/
|
|
331
|
-
copyFile(data, callback){
|
|
332
|
-
if(!this.authData){
|
|
469
|
+
copyFile(data, callback) {
|
|
470
|
+
if (!this.authData) {
|
|
333
471
|
return callback(new Error('Not authenticated. Did you forget to call authorize()?'));
|
|
334
472
|
}
|
|
335
473
|
|
|
@@ -340,14 +478,14 @@ const b2CloudStorage = class {
|
|
|
340
478
|
let fileFuncs = {};
|
|
341
479
|
|
|
342
480
|
const returnFuncs = {
|
|
343
|
-
cancel: function(){
|
|
481
|
+
cancel: function() {
|
|
344
482
|
cancel = true;
|
|
345
|
-
if(fileFuncs.cancel){
|
|
483
|
+
if (fileFuncs.cancel) {
|
|
346
484
|
return fileFuncs.cancel();
|
|
347
485
|
}
|
|
348
486
|
},
|
|
349
|
-
progress: function(){
|
|
350
|
-
if(fileFuncs.progress){
|
|
487
|
+
progress: function() {
|
|
488
|
+
if (fileFuncs.progress) {
|
|
351
489
|
return fileFuncs.progress();
|
|
352
490
|
}
|
|
353
491
|
return {
|
|
@@ -356,8 +494,8 @@ const b2CloudStorage = class {
|
|
|
356
494
|
bytesTotal: data.size || 0,
|
|
357
495
|
};
|
|
358
496
|
},
|
|
359
|
-
info: function(){
|
|
360
|
-
if(fileFuncs.info){
|
|
497
|
+
info: function() {
|
|
498
|
+
if (fileFuncs.info) {
|
|
361
499
|
return fileFuncs.info();
|
|
362
500
|
}
|
|
363
501
|
return null;
|
|
@@ -365,15 +503,15 @@ const b2CloudStorage = class {
|
|
|
365
503
|
};
|
|
366
504
|
|
|
367
505
|
async.series([
|
|
368
|
-
function(cb){
|
|
369
|
-
if(cancel){
|
|
506
|
+
function(cb) {
|
|
507
|
+
if (cancel) {
|
|
370
508
|
return cb(new Error('B2 copy canceled'));
|
|
371
509
|
}
|
|
372
|
-
if(data.size && data.hash && data.destinationBucketId && data.contentType){
|
|
510
|
+
if (data.size && data.hash && data.destinationBucketId && data.contentType) {
|
|
373
511
|
return cb();
|
|
374
512
|
}
|
|
375
|
-
self.getFileInfo(data.sourceFileId, function(err, results){
|
|
376
|
-
if(err){
|
|
513
|
+
self.getFileInfo(data.sourceFileId, function(err, results) {
|
|
514
|
+
if (err) {
|
|
377
515
|
return cb(err);
|
|
378
516
|
}
|
|
379
517
|
data.size = data.size || results.contentLength;
|
|
@@ -383,13 +521,13 @@ const b2CloudStorage = class {
|
|
|
383
521
|
return cb();
|
|
384
522
|
});
|
|
385
523
|
},
|
|
386
|
-
function(cb){
|
|
387
|
-
if(cancel){
|
|
524
|
+
function(cb) {
|
|
525
|
+
if (cancel) {
|
|
388
526
|
return cb(new Error('B2 copy canceled'));
|
|
389
527
|
}
|
|
390
|
-
if(data.size > self.maxSmallCopyFileSize){
|
|
391
|
-
fileFuncs = self.copyLargeFile(data, function(err, results){
|
|
392
|
-
if(err){
|
|
528
|
+
if (data.size > self.maxSmallCopyFileSize) {
|
|
529
|
+
fileFuncs = self.copyLargeFile(data, function(err, results) {
|
|
530
|
+
if (err) {
|
|
393
531
|
return cb(err);
|
|
394
532
|
}
|
|
395
533
|
returnData = results;
|
|
@@ -405,19 +543,19 @@ const b2CloudStorage = class {
|
|
|
405
543
|
'metadataDirective',
|
|
406
544
|
];
|
|
407
545
|
// only required for metadata replace
|
|
408
|
-
if(data.metadataDirective === 'REPLACE'){
|
|
546
|
+
if (data.metadataDirective === 'REPLACE') {
|
|
409
547
|
fields.push('contentType', 'fileInfo');
|
|
410
548
|
}
|
|
411
|
-
fileFuncs = self.copySmallFile(_.pick(data, fields), function(err, results){
|
|
412
|
-
if(err){
|
|
549
|
+
fileFuncs = self.copySmallFile(_.pick(data, fields), function(err, results) {
|
|
550
|
+
if (err) {
|
|
413
551
|
return cb(err);
|
|
414
552
|
}
|
|
415
553
|
returnData = results;
|
|
416
554
|
return cb();
|
|
417
555
|
});
|
|
418
556
|
},
|
|
419
|
-
], function(err){
|
|
420
|
-
if(err){
|
|
557
|
+
], function(err) {
|
|
558
|
+
if (err) {
|
|
421
559
|
return callback(err);
|
|
422
560
|
}
|
|
423
561
|
return callback(null, returnData);
|
|
@@ -436,11 +574,11 @@ const b2CloudStorage = class {
|
|
|
436
574
|
* @param {Array} [data.lifecycleRules] The initial list (a JSON array) of lifecycle rules for this bucket. Structure defined below. See Lifecycle Rules.
|
|
437
575
|
* @param {Function} [callback]
|
|
438
576
|
*/
|
|
439
|
-
createBucket(data, callback){
|
|
440
|
-
if(!this.authData){
|
|
577
|
+
createBucket(data, callback) {
|
|
578
|
+
if (!this.authData) {
|
|
441
579
|
return callback(new Error('Not authenticated. Did you forget to call authorize()?'));
|
|
442
580
|
}
|
|
443
|
-
if(!data.accountId){
|
|
581
|
+
if (!data.accountId) {
|
|
444
582
|
data.accountId = this.authData.accountId;
|
|
445
583
|
}
|
|
446
584
|
return this.request({
|
|
@@ -462,11 +600,11 @@ const b2CloudStorage = class {
|
|
|
462
600
|
* @param {Array} [data.ifRevisionIs] When set, the update will only happen if the revision number stored in the B2 service matches the one passed in. This can be used to avoid having simultaneous updates make conflicting changes.
|
|
463
601
|
* @param {Function} [callback]
|
|
464
602
|
*/
|
|
465
|
-
updateBucket(data, callback){
|
|
466
|
-
if(!this.authData){
|
|
603
|
+
updateBucket(data, callback) {
|
|
604
|
+
if (!this.authData) {
|
|
467
605
|
return callback(new Error('Not authenticated. Did you forget to call authorize()?'));
|
|
468
606
|
}
|
|
469
|
-
if(!data.accountId){
|
|
607
|
+
if (!data.accountId) {
|
|
470
608
|
data.accountId = this.authData.accountId;
|
|
471
609
|
}
|
|
472
610
|
return this.request({
|
|
@@ -483,16 +621,16 @@ const b2CloudStorage = class {
|
|
|
483
621
|
* @param {String} [data.accountId] The ID of your account. When unset will use the `b2_authorize` results `accountId`.
|
|
484
622
|
* @param {Function} [callback]
|
|
485
623
|
*/
|
|
486
|
-
deleteBucket(data, callback){
|
|
487
|
-
if(!this.authData){
|
|
624
|
+
deleteBucket(data, callback) {
|
|
625
|
+
if (!this.authData) {
|
|
488
626
|
return callback(new Error('Not authenticated. Did you forget to call authorize()?'));
|
|
489
627
|
}
|
|
490
|
-
if(typeof(data) === 'string'){
|
|
628
|
+
if (typeof(data) === 'string') {
|
|
491
629
|
data = {
|
|
492
630
|
bucketId: data,
|
|
493
631
|
};
|
|
494
632
|
}
|
|
495
|
-
if(!data.accountId){
|
|
633
|
+
if (!data.accountId) {
|
|
496
634
|
data.accountId = this.authData.accountId;
|
|
497
635
|
}
|
|
498
636
|
return this.request({
|
|
@@ -514,7 +652,7 @@ const b2CloudStorage = class {
|
|
|
514
652
|
* @param {String} [data.delimiter] files returned will be limited to those within the top folder, or any one subfolder. Defaults to NULL. Folder names will also be returned. The delimiter character will be used to "break" file names into folders.
|
|
515
653
|
* @param {Function} [callback]
|
|
516
654
|
*/
|
|
517
|
-
listFileNames(data, callback){
|
|
655
|
+
listFileNames(data, callback) {
|
|
518
656
|
return this.request({
|
|
519
657
|
url: 'b2_list_file_names',
|
|
520
658
|
method: 'POST',
|
|
@@ -533,7 +671,7 @@ const b2CloudStorage = class {
|
|
|
533
671
|
* @param {String} [data.delimiter] files returned will be limited to those within the top folder, or any one subfolder. Defaults to NULL. Folder names will also be returned. The delimiter character will be used to "break" file names into folders.
|
|
534
672
|
* @param {Function} [callback]
|
|
535
673
|
*/
|
|
536
|
-
listFileVersions(data, callback){
|
|
674
|
+
listFileVersions(data, callback) {
|
|
537
675
|
return this.request({
|
|
538
676
|
url: 'b2_list_file_versions',
|
|
539
677
|
method: 'POST',
|
|
@@ -549,15 +687,15 @@ const b2CloudStorage = class {
|
|
|
549
687
|
* @param {String} [data.startApplicationKeyId] The first key to return. Used when a query hits the maxKeyCount, and you want to get more. Set to the value returned as the nextApplicationKeyId in the previous query.
|
|
550
688
|
* @param {Function} [callback]
|
|
551
689
|
*/
|
|
552
|
-
listKeys(data, callback){
|
|
553
|
-
if(!this.authData){
|
|
690
|
+
listKeys(data, callback) {
|
|
691
|
+
if (!this.authData) {
|
|
554
692
|
return callback(new Error('Not authenticated. Did you forget to call authorize()?'));
|
|
555
693
|
}
|
|
556
|
-
if(!callback && data){
|
|
694
|
+
if (!callback && data) {
|
|
557
695
|
callback = data;
|
|
558
696
|
data = {};
|
|
559
697
|
}
|
|
560
|
-
if(!data.accountId){
|
|
698
|
+
if (!data.accountId) {
|
|
561
699
|
data.accountId = this.authData.accountId;
|
|
562
700
|
}
|
|
563
701
|
return this.request({
|
|
@@ -578,11 +716,11 @@ const b2CloudStorage = class {
|
|
|
578
716
|
* @param {String} [data.namePrefix] When present, restricts access to files whose names start with the prefix. You must set `bucketId` when setting this.
|
|
579
717
|
* @param {Function} [callback]
|
|
580
718
|
*/
|
|
581
|
-
createKey(data, callback){
|
|
582
|
-
if(!this.authData){
|
|
719
|
+
createKey(data, callback) {
|
|
720
|
+
if (!this.authData) {
|
|
583
721
|
return callback(new Error('Not authenticated. Did you forget to call authorize()?'));
|
|
584
722
|
}
|
|
585
|
-
if(!data.accountId){
|
|
723
|
+
if (!data.accountId) {
|
|
586
724
|
data.accountId = this.authData.accountId;
|
|
587
725
|
}
|
|
588
726
|
return this.request({
|
|
@@ -597,7 +735,7 @@ const b2CloudStorage = class {
|
|
|
597
735
|
* @param {String} applicationKeyId The key to delete.
|
|
598
736
|
* @param {Function} [callback]
|
|
599
737
|
*/
|
|
600
|
-
deleteKey(applicationKeyId, callback){
|
|
738
|
+
deleteKey(applicationKeyId, callback) {
|
|
601
739
|
return this.request({
|
|
602
740
|
url: 'b2_delete_key',
|
|
603
741
|
method: 'POST',
|
|
@@ -616,7 +754,7 @@ const b2CloudStorage = class {
|
|
|
616
754
|
* @param {String} data.fileId The ID of the file, as returned by `b2_upload_file`, `b2_list_file_names`, or `b2_list_file_versions`.
|
|
617
755
|
* @param {Function} [callback]
|
|
618
756
|
*/
|
|
619
|
-
deleteFileVersion(data, callback){
|
|
757
|
+
deleteFileVersion(data, callback) {
|
|
620
758
|
return this.request({
|
|
621
759
|
url: 'b2_delete_file_version',
|
|
622
760
|
method: 'POST',
|
|
@@ -635,8 +773,8 @@ const b2CloudStorage = class {
|
|
|
635
773
|
* @param {String} [data.b2ContentDisposition] If this is present, B2 will use it as the value of the 'Content-Disposition' header, overriding any 'b2-content-disposition' specified when the file was uploaded.
|
|
636
774
|
* @param {Function} [callback]
|
|
637
775
|
*/
|
|
638
|
-
downloadFileById(data, callback){
|
|
639
|
-
if(!callback && typeof(callback) === 'function'){
|
|
776
|
+
downloadFileById(data, callback) {
|
|
777
|
+
if (!callback && typeof(callback) === 'function') {
|
|
640
778
|
callback = data;
|
|
641
779
|
data = {};
|
|
642
780
|
}
|
|
@@ -650,13 +788,13 @@ const b2CloudStorage = class {
|
|
|
650
788
|
fileId: data.fileId,
|
|
651
789
|
},
|
|
652
790
|
};
|
|
653
|
-
if(data.Authorization){
|
|
791
|
+
if (data.Authorization) {
|
|
654
792
|
requestData.headers.Authorization = data.Authorization;
|
|
655
793
|
}
|
|
656
|
-
if(data.Range){
|
|
794
|
+
if (data.Range) {
|
|
657
795
|
requestData.headers.Range = data.Range;
|
|
658
796
|
}
|
|
659
|
-
if(data.b2ContentDisposition){
|
|
797
|
+
if (data.b2ContentDisposition) {
|
|
660
798
|
requestData.headers.b2ContentDisposition = data.b2ContentDisposition;
|
|
661
799
|
}
|
|
662
800
|
return this.request(requestData, callback);
|
|
@@ -673,20 +811,20 @@ const b2CloudStorage = class {
|
|
|
673
811
|
* @param {String} [data.b2ContentDisposition] If this is present, B2 will use it as the value of the 'Content-Disposition' header, overriding any 'b2-content-disposition' specified when the file was uploaded.
|
|
674
812
|
* @param {Function} [callback]
|
|
675
813
|
*/
|
|
676
|
-
downloadFileByName(data, callback){
|
|
814
|
+
downloadFileByName(data, callback) {
|
|
677
815
|
const requestData = {
|
|
678
816
|
apiUrl: `${this.downloadUrl}/file/${data.bucket}/${data.fileName}`,
|
|
679
817
|
json: false,
|
|
680
818
|
appendPath: false,
|
|
681
819
|
headers: {},
|
|
682
820
|
};
|
|
683
|
-
if(data.Authorization){
|
|
821
|
+
if (data.Authorization) {
|
|
684
822
|
requestData.headers.Authorization = data.Authorization;
|
|
685
823
|
}
|
|
686
|
-
if(data.Range){
|
|
824
|
+
if (data.Range) {
|
|
687
825
|
requestData.headers.Range = data.Range;
|
|
688
826
|
}
|
|
689
|
-
if(data.b2ContentDisposition){
|
|
827
|
+
if (data.b2ContentDisposition) {
|
|
690
828
|
requestData.headers.b2ContentDisposition = data.b2ContentDisposition;
|
|
691
829
|
}
|
|
692
830
|
return this.request(requestData, callback);
|
|
@@ -701,7 +839,7 @@ const b2CloudStorage = class {
|
|
|
701
839
|
* @param {Number} [data.b2ContentDisposition] If this is present, download requests using the returned authorization must include the same value for b2ContentDisposition. The value must match the grammar specified in RFC 6266 (except that parameter names that contain an '*' are not allowed).
|
|
702
840
|
* @param {Function} [callback]
|
|
703
841
|
*/
|
|
704
|
-
getDownloadAuthorization(data, callback){
|
|
842
|
+
getDownloadAuthorization(data, callback) {
|
|
705
843
|
return this.request({
|
|
706
844
|
url: 'b2_get_download_authorization',
|
|
707
845
|
method: 'POST',
|
|
@@ -716,7 +854,7 @@ const b2CloudStorage = class {
|
|
|
716
854
|
* @param {String} data.fileName The name of the file to hide.
|
|
717
855
|
* @param {Function} [callback]
|
|
718
856
|
*/
|
|
719
|
-
hideFile(data, callback){
|
|
857
|
+
hideFile(data, callback) {
|
|
720
858
|
return this.request({
|
|
721
859
|
url: 'b2_hide_file',
|
|
722
860
|
method: 'POST',
|
|
@@ -732,10 +870,10 @@ const b2CloudStorage = class {
|
|
|
732
870
|
* @param {boolean} data.apiUrl (internal) Full URL path or hostname to replace. Most useful when combined with `appendPath`.
|
|
733
871
|
* @param {Function} callback [description]
|
|
734
872
|
*/
|
|
735
|
-
request(data, callback){
|
|
873
|
+
request(data, callback) {
|
|
736
874
|
const apiUrl = new url.URL(data.apiUrl || this.url);
|
|
737
875
|
|
|
738
|
-
if(data.appendPath !== false){
|
|
876
|
+
if (data.appendPath !== false) {
|
|
739
877
|
apiUrl.pathname += `b2api/${this.version}/${data.url}`;
|
|
740
878
|
}
|
|
741
879
|
const requestData = _.defaults(data, {
|
|
@@ -745,54 +883,54 @@ const b2CloudStorage = class {
|
|
|
745
883
|
});
|
|
746
884
|
requestData.url = apiUrl.toString();
|
|
747
885
|
// if auth data is set from `authorize` function and we haven't overridden it via `data.auth` or request headers, set it for this request
|
|
748
|
-
if(this.authData && !data.auth && !requestData.headers.Authorization){
|
|
886
|
+
if (this.authData && !data.auth && !requestData.headers.Authorization) {
|
|
749
887
|
requestData.headers.Authorization = this.authData.authorizationToken;
|
|
750
888
|
}
|
|
751
889
|
requestData.headers.Accept = 'application/json';
|
|
752
|
-
if(!requestData.headers.Authorization && !requestData.auth){
|
|
890
|
+
if (!requestData.headers.Authorization && !requestData.auth) {
|
|
753
891
|
return callback(new Error('Not yet authorised. Call `.authorize` before running any functions.'));
|
|
754
892
|
}
|
|
755
893
|
// default user agent to package version and node version if not already set
|
|
756
|
-
if(!requestData.headers['User-Agent']){
|
|
894
|
+
if (!requestData.headers['User-Agent']) {
|
|
757
895
|
requestData.headers['User-Agent'] = `b2-cloud-storage/${packageVersion}+node/${nodeVersion}`;
|
|
758
896
|
}
|
|
759
897
|
let reqCount = 0;
|
|
760
898
|
const doRequest = () => {
|
|
761
|
-
if(reqCount >= this.maxReauthAttempts){
|
|
899
|
+
if (reqCount >= this.maxReauthAttempts) {
|
|
762
900
|
return callback(new Error('Auth token expired, and unable to re-authenticate to acquire new token.'));
|
|
763
901
|
}
|
|
764
902
|
reqCount++;
|
|
765
903
|
return request(requestData, (err, res, body) => {
|
|
766
|
-
if(err){
|
|
904
|
+
if (err) {
|
|
767
905
|
return callback(err, null, res);
|
|
768
906
|
}
|
|
769
|
-
if(res.headers['content-type'] && res.headers['content-type'].includes('application/json') && typeof(body) === 'string'){
|
|
770
|
-
try{
|
|
907
|
+
if (res.headers['content-type'] && res.headers['content-type'].includes('application/json') && typeof(body) === 'string') {
|
|
908
|
+
try {
|
|
771
909
|
body = JSON.parse(body);
|
|
772
|
-
}catch{
|
|
910
|
+
} catch {
|
|
773
911
|
// we tried
|
|
774
912
|
}
|
|
775
913
|
}
|
|
776
914
|
// auth expired, re-authorize and then make request again
|
|
777
|
-
if(res.statusCode === 401 && body && body.code === 'expired_auth_token'){
|
|
915
|
+
if (res.statusCode === 401 && body && body.code === 'expired_auth_token') {
|
|
778
916
|
return this.authorize(doRequest);
|
|
779
917
|
}
|
|
780
|
-
if(res.statusCode === 403 || (body && body.code === 'storage_cap_exceeded')){
|
|
918
|
+
if (res.statusCode === 403 || (body && body.code === 'storage_cap_exceeded')) {
|
|
781
919
|
return callback(new Error('B2 Cap Exceeded. Check your Backblaze account for more details.'), body, res);
|
|
782
920
|
}
|
|
783
921
|
// todo: handle more response codes.
|
|
784
|
-
if(res.statusCode !== 200){
|
|
922
|
+
if (res.statusCode !== 200) {
|
|
785
923
|
let error = null;
|
|
786
|
-
if(typeof(body) === 'string'){
|
|
924
|
+
if (typeof(body) === 'string') {
|
|
787
925
|
error = new Error(body);
|
|
788
926
|
}
|
|
789
|
-
if(body && body.code && !body.message){
|
|
927
|
+
if (body && body.code && !body.message) {
|
|
790
928
|
error = new Error('API returned error code: ' + body.code);
|
|
791
929
|
}
|
|
792
|
-
if(body && body.message){
|
|
930
|
+
if (body && body.message) {
|
|
793
931
|
error = new Error(body.message);
|
|
794
932
|
}
|
|
795
|
-
if(!error){
|
|
933
|
+
if (!error) {
|
|
796
934
|
error = new Error('Invalid response from API.');
|
|
797
935
|
}
|
|
798
936
|
return callback(error, body, res);
|
|
@@ -809,11 +947,11 @@ const b2CloudStorage = class {
|
|
|
809
947
|
* @param {Stream} fileStream File stream from `fs.readFileStream`.
|
|
810
948
|
* @param {Function} [callback]
|
|
811
949
|
*/
|
|
812
|
-
getHash(fileStream, callback){
|
|
950
|
+
getHash(fileStream, callback) {
|
|
813
951
|
const hash = crypto.createHash('sha1');
|
|
814
|
-
fileStream.on('data', function(chunk){
|
|
952
|
+
fileStream.on('data', function(chunk) {
|
|
815
953
|
hash.update(chunk);
|
|
816
|
-
}).on('error', err => callback(err)).on('end', function(){
|
|
954
|
+
}).on('error', err => callback(err)).on('end', function() {
|
|
817
955
|
return callback(null, hash.digest('hex'));
|
|
818
956
|
});
|
|
819
957
|
}
|
|
@@ -824,7 +962,7 @@ const b2CloudStorage = class {
|
|
|
824
962
|
* @param {String} Path to filename to get sha1 hash.
|
|
825
963
|
* @param {Function} [callback]
|
|
826
964
|
*/
|
|
827
|
-
getFileHash(filename, callback){
|
|
965
|
+
getFileHash(filename, callback) {
|
|
828
966
|
return this.getHash(fs.createReadStream(filename), callback);
|
|
829
967
|
}
|
|
830
968
|
|
|
@@ -834,7 +972,7 @@ const b2CloudStorage = class {
|
|
|
834
972
|
* @param {String} Path to filename to get file stats.
|
|
835
973
|
* @param {Function} [callback]
|
|
836
974
|
*/
|
|
837
|
-
getStat(filename, callback){
|
|
975
|
+
getStat(filename, callback) {
|
|
838
976
|
return fs.stat(filename, callback);
|
|
839
977
|
}
|
|
840
978
|
|
|
@@ -851,7 +989,7 @@ const b2CloudStorage = class {
|
|
|
851
989
|
* @param {Function} [callback]
|
|
852
990
|
* @returns {object} Returns an object with 1 helper method: `cancel()`
|
|
853
991
|
*/
|
|
854
|
-
copySmallFile(data, callback){
|
|
992
|
+
copySmallFile(data, callback) {
|
|
855
993
|
const req = this.request({
|
|
856
994
|
url: 'b2_copy_file',
|
|
857
995
|
method: 'POST',
|
|
@@ -860,7 +998,7 @@ const b2CloudStorage = class {
|
|
|
860
998
|
|
|
861
999
|
// If we had a progress and info we could return those as well
|
|
862
1000
|
return {
|
|
863
|
-
cancel: function(){
|
|
1001
|
+
cancel: function() {
|
|
864
1002
|
req.abort();
|
|
865
1003
|
},
|
|
866
1004
|
};
|
|
@@ -881,8 +1019,8 @@ const b2CloudStorage = class {
|
|
|
881
1019
|
* @param {Object} [data.fileInfo] Must only be supplied if the metadataDirective is REPLACE. This field stores the metadata that will be stored with the file.
|
|
882
1020
|
* @param {Function} [callback]
|
|
883
1021
|
*/
|
|
884
|
-
copyLargeFile(data, callback){
|
|
885
|
-
if(!this.authData){
|
|
1022
|
+
copyLargeFile(data, callback) {
|
|
1023
|
+
if (!this.authData) {
|
|
886
1024
|
return callback(new Error('Not authenticated. Did you forget to call authorize()?'));
|
|
887
1025
|
}
|
|
888
1026
|
const self = this;
|
|
@@ -892,7 +1030,7 @@ const b2CloudStorage = class {
|
|
|
892
1030
|
|
|
893
1031
|
let interval = null;
|
|
894
1032
|
async.series([
|
|
895
|
-
function(cb){
|
|
1033
|
+
function(cb) {
|
|
896
1034
|
self.request({
|
|
897
1035
|
url: 'b2_start_large_file',
|
|
898
1036
|
method: 'POST',
|
|
@@ -900,23 +1038,27 @@ const b2CloudStorage = class {
|
|
|
900
1038
|
bucketId: data.destinationBucketId,
|
|
901
1039
|
fileName: data.fileName,
|
|
902
1040
|
contentType: data.contentType,
|
|
903
|
-
fileInfo: _.defaults(data.fileInfo, {
|
|
1041
|
+
fileInfo: _.defaults({}, data.fileInfo, data.hash ? {
|
|
904
1042
|
large_file_sha1: data.hash,
|
|
905
1043
|
hash_sha1: data.hash,
|
|
1044
|
+
} : {}, {
|
|
906
1045
|
src_last_modified_millis: String(Date.now()),
|
|
907
1046
|
}),
|
|
908
1047
|
},
|
|
909
1048
|
}, (err, results) => {
|
|
910
|
-
if(err){
|
|
1049
|
+
if (err) {
|
|
911
1050
|
return cb(err);
|
|
912
1051
|
}
|
|
913
1052
|
info.fileId = results.fileId;
|
|
914
1053
|
return cb();
|
|
915
1054
|
});
|
|
916
1055
|
},
|
|
917
|
-
function(cb){
|
|
918
|
-
|
|
919
|
-
const
|
|
1056
|
+
function(cb) {
|
|
1057
|
+
let partSize = data.partSize || self.authData.recommendedPartSize;
|
|
1058
|
+
const minPartSize = Math.ceil(data.size / 10000);
|
|
1059
|
+
if (minPartSize > partSize) {
|
|
1060
|
+
partSize = minPartSize;
|
|
1061
|
+
}
|
|
920
1062
|
|
|
921
1063
|
// track the current chunk
|
|
922
1064
|
const fsOptions = {
|
|
@@ -930,8 +1072,8 @@ const b2CloudStorage = class {
|
|
|
930
1072
|
info.chunks = [];
|
|
931
1073
|
info.lastPart = 1;
|
|
932
1074
|
// create array with calculated number of chunks (floored)
|
|
933
|
-
const pushChunks = Array.from({length: Math.floor(data.size / partSize)});
|
|
934
|
-
_.each(pushChunks, function(){
|
|
1075
|
+
const pushChunks = Array.from({ length: Math.floor(data.size / partSize) });
|
|
1076
|
+
_.each(pushChunks, function() {
|
|
935
1077
|
info.chunks.push(_.clone(fsOptions));
|
|
936
1078
|
fsOptions.part++;
|
|
937
1079
|
fsOptions.start += partSize;
|
|
@@ -939,9 +1081,9 @@ const b2CloudStorage = class {
|
|
|
939
1081
|
});
|
|
940
1082
|
// calculate remainder left (less than single chunk)
|
|
941
1083
|
const remainder = data.size % partSize;
|
|
942
|
-
if(remainder > 0){
|
|
1084
|
+
if (remainder > 0) {
|
|
943
1085
|
const item = _.clone(fsOptions);
|
|
944
|
-
item.end = data.size;
|
|
1086
|
+
item.end = data.size - 1;
|
|
945
1087
|
item.size = remainder;
|
|
946
1088
|
info.chunks.push(item);
|
|
947
1089
|
}
|
|
@@ -949,20 +1091,20 @@ const b2CloudStorage = class {
|
|
|
949
1091
|
|
|
950
1092
|
return process.nextTick(cb);
|
|
951
1093
|
},
|
|
952
|
-
function(cb){
|
|
1094
|
+
function(cb) {
|
|
953
1095
|
info.shaParts = {};
|
|
954
1096
|
info.totalCopied = 0;
|
|
955
1097
|
|
|
956
1098
|
let queue = null; // initialise queue to avoid no-use-before-define eslint error
|
|
957
|
-
const reQueue = function(task, incrementCount = true){
|
|
958
|
-
if(incrementCount){
|
|
1099
|
+
const reQueue = function(task, incrementCount = true) {
|
|
1100
|
+
if (incrementCount) {
|
|
959
1101
|
task.attempts++;
|
|
960
1102
|
}
|
|
961
1103
|
queue.push(task);
|
|
962
1104
|
};
|
|
963
|
-
queue = async.queue(function(task, queueCB){
|
|
1105
|
+
queue = async.queue(function(task, queueCB) {
|
|
964
1106
|
// if the queue has already errored, just callback immediately
|
|
965
|
-
if(info.error){
|
|
1107
|
+
if (info.error) {
|
|
966
1108
|
return process.nextTick(queueCB);
|
|
967
1109
|
}
|
|
968
1110
|
self.request({
|
|
@@ -974,14 +1116,14 @@ const b2CloudStorage = class {
|
|
|
974
1116
|
partNumber: task.part,
|
|
975
1117
|
range: `bytes=${task.start}-${task.end}`,
|
|
976
1118
|
},
|
|
977
|
-
}, function(err, results){
|
|
978
|
-
if(err){
|
|
1119
|
+
}, function(err, results) {
|
|
1120
|
+
if (err) {
|
|
979
1121
|
// if upload fails, error if exceeded max attempts, else requeue
|
|
980
|
-
|
|
1122
|
+
info.totalErrors++;
|
|
1123
|
+
if (task.attempts > self.maxPartAttempts || info.totalErrors >= self.maxTotalErrors) {
|
|
981
1124
|
info.error = err;
|
|
982
1125
|
return queueCB(err);
|
|
983
1126
|
}
|
|
984
|
-
info.totalErrors++;
|
|
985
1127
|
reQueue(task);
|
|
986
1128
|
return queueCB();
|
|
987
1129
|
}
|
|
@@ -992,20 +1134,20 @@ const b2CloudStorage = class {
|
|
|
992
1134
|
}, self.maxCopyWorkers);
|
|
993
1135
|
|
|
994
1136
|
// callback when queue has completed
|
|
995
|
-
queue.drain(function(){
|
|
1137
|
+
queue.drain(function() {
|
|
996
1138
|
clearInterval(interval);
|
|
997
|
-
if(info.error){
|
|
1139
|
+
if (info.error) {
|
|
998
1140
|
return cb();
|
|
999
1141
|
}
|
|
1000
1142
|
info.partSha1Array = [];
|
|
1001
1143
|
let i = 1;
|
|
1002
|
-
while(i <= info.lastPart){
|
|
1144
|
+
while (i <= info.lastPart) {
|
|
1003
1145
|
info.partSha1Array.push(info.shaParts[i++]);
|
|
1004
1146
|
}
|
|
1005
1147
|
return cb();
|
|
1006
1148
|
});
|
|
1007
|
-
interval = setInterval(function(){
|
|
1008
|
-
if(!data.onUploadProgress || typeof(data.onUploadProgress) !== 'function'){
|
|
1149
|
+
interval = setInterval(function() {
|
|
1150
|
+
if (!data.onUploadProgress || typeof(data.onUploadProgress) !== 'function') {
|
|
1009
1151
|
return;
|
|
1010
1152
|
}
|
|
1011
1153
|
const percent = Math.floor((info.totalCopied / data.size) * 100);
|
|
@@ -1018,13 +1160,13 @@ const b2CloudStorage = class {
|
|
|
1018
1160
|
|
|
1019
1161
|
queue.push(info.chunks);
|
|
1020
1162
|
},
|
|
1021
|
-
function(cb){
|
|
1022
|
-
if(interval){
|
|
1163
|
+
function(cb) {
|
|
1164
|
+
if (interval) {
|
|
1023
1165
|
clearInterval(interval);
|
|
1024
1166
|
}
|
|
1025
1167
|
|
|
1026
1168
|
// cleanup large file upload if error occurred
|
|
1027
|
-
if(!info.error){
|
|
1169
|
+
if (!info.error) {
|
|
1028
1170
|
return cb();
|
|
1029
1171
|
}
|
|
1030
1172
|
|
|
@@ -1036,8 +1178,8 @@ const b2CloudStorage = class {
|
|
|
1036
1178
|
},
|
|
1037
1179
|
}, cb);
|
|
1038
1180
|
},
|
|
1039
|
-
function(cb){
|
|
1040
|
-
if(info.error){
|
|
1181
|
+
function(cb) {
|
|
1182
|
+
if (info.error) {
|
|
1041
1183
|
return cb(info.error);
|
|
1042
1184
|
}
|
|
1043
1185
|
self.request({
|
|
@@ -1047,34 +1189,34 @@ const b2CloudStorage = class {
|
|
|
1047
1189
|
fileId: info.fileId,
|
|
1048
1190
|
partSha1Array: info.partSha1Array,
|
|
1049
1191
|
},
|
|
1050
|
-
}, function(err, results){
|
|
1051
|
-
if(err){
|
|
1192
|
+
}, function(err, results) {
|
|
1193
|
+
if (err) {
|
|
1052
1194
|
return cb(err);
|
|
1053
1195
|
}
|
|
1054
1196
|
info.returnData = results;
|
|
1055
1197
|
return cb();
|
|
1056
1198
|
});
|
|
1057
1199
|
},
|
|
1058
|
-
], function(err){
|
|
1059
|
-
if(interval){
|
|
1200
|
+
], function(err) {
|
|
1201
|
+
if (interval) {
|
|
1060
1202
|
clearInterval(interval);
|
|
1061
1203
|
}
|
|
1062
|
-
if(err || info.error){
|
|
1204
|
+
if (err || info.error) {
|
|
1063
1205
|
return callback(err || info.error);
|
|
1064
1206
|
}
|
|
1065
1207
|
return callback(null, info.returnData);
|
|
1066
1208
|
});
|
|
1067
1209
|
|
|
1068
1210
|
return {
|
|
1069
|
-
cancel: function(){
|
|
1211
|
+
cancel: function() {
|
|
1070
1212
|
info.error = new Error('B2 upload canceled');
|
|
1071
1213
|
// TODO: cancel all concurrent copy part requests
|
|
1072
1214
|
},
|
|
1073
|
-
progress: function(){
|
|
1215
|
+
progress: function() {
|
|
1074
1216
|
return info.progress;
|
|
1075
1217
|
},
|
|
1076
|
-
info: function(){
|
|
1077
|
-
if(info.returnData){
|
|
1218
|
+
info: function() {
|
|
1219
|
+
if (info.returnData) {
|
|
1078
1220
|
return info.returnData;
|
|
1079
1221
|
}
|
|
1080
1222
|
return {
|
|
@@ -1091,7 +1233,7 @@ const b2CloudStorage = class {
|
|
|
1091
1233
|
* @param {Object} data Configuration data passed from the `uploadFile` method.
|
|
1092
1234
|
* @param {Function} [callback]
|
|
1093
1235
|
*/
|
|
1094
|
-
uploadFileSmall(filename, data, callback = function(){}){
|
|
1236
|
+
uploadFileSmall(filename, data, callback = function() {}) {
|
|
1095
1237
|
let req = null;
|
|
1096
1238
|
const info = {};
|
|
1097
1239
|
let attempts = 0;
|
|
@@ -1103,7 +1245,7 @@ const b2CloudStorage = class {
|
|
|
1103
1245
|
bucketId: data.bucketId,
|
|
1104
1246
|
},
|
|
1105
1247
|
}, (err, results) => {
|
|
1106
|
-
if(err){
|
|
1248
|
+
if (err) {
|
|
1107
1249
|
return callback(err);
|
|
1108
1250
|
}
|
|
1109
1251
|
const requestData = {
|
|
@@ -1116,43 +1258,43 @@ const b2CloudStorage = class {
|
|
|
1116
1258
|
'Content-Type': data.contentType,
|
|
1117
1259
|
'Content-Length': data.size,
|
|
1118
1260
|
'X-Bz-File-Name': data.fileName,
|
|
1119
|
-
'X-Bz-Content-Sha1': data.hash,
|
|
1261
|
+
'X-Bz-Content-Sha1': data.hash === false ? 'do_not_verify' : data.hash,
|
|
1120
1262
|
},
|
|
1121
1263
|
body: fs.createReadStream(filename),
|
|
1122
1264
|
};
|
|
1123
|
-
if(data.testMode){
|
|
1265
|
+
if (data.testMode) {
|
|
1124
1266
|
requestData.headers['X-Bz-Test-Mode'] = data.testMode;
|
|
1125
1267
|
}
|
|
1126
|
-
data.info = _.defaults({
|
|
1127
|
-
|
|
1128
|
-
}, data.info, {
|
|
1129
|
-
|
|
1268
|
+
data.info = _.defaults(data.hash ? {
|
|
1269
|
+
hash_sha1: data.hash,
|
|
1270
|
+
} : {}, data.info, {
|
|
1271
|
+
src_last_modified_millis: data.stat.mtime.getTime(),
|
|
1130
1272
|
});
|
|
1131
|
-
_.each(data.info || {}, function(value, key){
|
|
1273
|
+
_.each(data.info || {}, function(value, key) {
|
|
1132
1274
|
requestData.headers['X-Bz-Info-' + key] = value;
|
|
1133
1275
|
});
|
|
1134
1276
|
data.info = _.mapValues(data.info, _.toString);
|
|
1135
1277
|
|
|
1136
1278
|
let interval = null;
|
|
1137
1279
|
callback = _.once(callback);
|
|
1138
|
-
req = this.request(requestData, function(err, results, res){
|
|
1280
|
+
req = this.request(requestData, function(err, results, res) {
|
|
1139
1281
|
attempts++;
|
|
1140
|
-
if(err){
|
|
1141
|
-
if(attempts > data.maxPartAttempts || attempts > data.maxTotalErrors){
|
|
1282
|
+
if (err) {
|
|
1283
|
+
if (attempts > data.maxPartAttempts || attempts > data.maxTotalErrors) {
|
|
1142
1284
|
return callback(new Error('Exceeded max retry attempts for upload'));
|
|
1143
1285
|
}
|
|
1144
1286
|
// handle connection failures that should trigger a retry (https://www.backblaze.com/b2/docs/integration_checklist.html)
|
|
1145
|
-
if(err.code === 'EPIPE' || err.code === 'ETIMEDOUT' || err.code === 'ESOCKETTIMEDOUT'){
|
|
1287
|
+
if (err.code === 'EPIPE' || err.code === 'ETIMEDOUT' || err.code === 'ESOCKETTIMEDOUT') {
|
|
1146
1288
|
return upload();
|
|
1147
1289
|
}
|
|
1148
1290
|
// handle status codes that should trigger a retry (https://www.backblaze.com/b2/docs/integration_checklist.html)
|
|
1149
|
-
if(res && (res.statusCode === 408 || (res.statusCode >= 500 && res.statusCode <= 599))){
|
|
1291
|
+
if (res && (res.statusCode === 408 || (res.statusCode >= 500 && res.statusCode <= 599))) {
|
|
1150
1292
|
return upload();
|
|
1151
1293
|
}
|
|
1152
1294
|
return callback(err);
|
|
1153
1295
|
}
|
|
1154
1296
|
info.returnData = results;
|
|
1155
|
-
if(data.onFileId && typeof(data.onFileId) === 'function'){
|
|
1297
|
+
if (data.onFileId && typeof(data.onFileId) === 'function') {
|
|
1156
1298
|
data.onFileId(results.fileId);
|
|
1157
1299
|
}
|
|
1158
1300
|
return callback(null, results);
|
|
@@ -1160,16 +1302,17 @@ const b2CloudStorage = class {
|
|
|
1160
1302
|
clearInterval(interval);
|
|
1161
1303
|
}).on('error', () => {
|
|
1162
1304
|
clearInterval(interval);
|
|
1163
|
-
})
|
|
1164
|
-
|
|
1165
|
-
|
|
1166
|
-
|
|
1167
|
-
|
|
1168
|
-
|
|
1305
|
+
})
|
|
1306
|
+
.on('abort', () => {
|
|
1307
|
+
clearInterval(interval);
|
|
1308
|
+
return callback(new Error('B2 upload canceled'));
|
|
1309
|
+
});
|
|
1310
|
+
interval = setInterval(function() {
|
|
1311
|
+
if (!data.onUploadProgress || typeof(data.onUploadProgress) !== 'function') {
|
|
1169
1312
|
return;
|
|
1170
1313
|
}
|
|
1171
1314
|
let bytesDispatched = 0;
|
|
1172
|
-
if(req.req && req.req.connection && req.req.connection._bytesDispatched){
|
|
1315
|
+
if (req.req && req.req.connection && req.req.connection._bytesDispatched) {
|
|
1173
1316
|
bytesDispatched = req.req.connection._bytesDispatched;
|
|
1174
1317
|
}
|
|
1175
1318
|
const percent = Math.floor((bytesDispatched / data.size) * 100);
|
|
@@ -1184,15 +1327,15 @@ const b2CloudStorage = class {
|
|
|
1184
1327
|
};
|
|
1185
1328
|
upload();
|
|
1186
1329
|
return {
|
|
1187
|
-
cancel: function(){
|
|
1188
|
-
if(req && req.abort){
|
|
1330
|
+
cancel: function() {
|
|
1331
|
+
if (req && req.abort) {
|
|
1189
1332
|
req.abort();
|
|
1190
1333
|
}
|
|
1191
1334
|
},
|
|
1192
|
-
progress: function(){
|
|
1335
|
+
progress: function() {
|
|
1193
1336
|
return info.progress;
|
|
1194
1337
|
},
|
|
1195
|
-
info: function(){
|
|
1338
|
+
info: function() {
|
|
1196
1339
|
return info.returnData;
|
|
1197
1340
|
},
|
|
1198
1341
|
};
|
|
@@ -1200,15 +1343,16 @@ const b2CloudStorage = class {
|
|
|
1200
1343
|
|
|
1201
1344
|
/**
|
|
1202
1345
|
* Helper method: Uploads a large file as several parts
|
|
1203
|
-
* This method will split the large
|
|
1204
|
-
*
|
|
1346
|
+
* This method will split the large file into several chunks, uploading them in parallel to B2.
|
|
1347
|
+
* Each part's sha1 hash is computed inline during upload via a transform stream and verified against B2's response.
|
|
1348
|
+
* Parts will retry on failure.
|
|
1205
1349
|
* @private
|
|
1206
1350
|
* @param {String} filename Path to filename for upload.
|
|
1207
1351
|
* @param {Object} data Configuration data passed from the `uploadFile` method.
|
|
1208
1352
|
* @param {Function} [callback]
|
|
1209
1353
|
*/
|
|
1210
|
-
uploadFileLarge(filename, data, callback = function(){}){
|
|
1211
|
-
if(!this.authData){
|
|
1354
|
+
uploadFileLarge(filename, data, callback = function() {}) {
|
|
1355
|
+
if (!this.authData) {
|
|
1212
1356
|
return callback(new Error('Not authenticated. Did you forget to call authorize()?'));
|
|
1213
1357
|
}
|
|
1214
1358
|
const self = this;
|
|
@@ -1224,17 +1368,17 @@ const b2CloudStorage = class {
|
|
|
1224
1368
|
};
|
|
1225
1369
|
// TODO: handle update callbacks
|
|
1226
1370
|
|
|
1227
|
-
data.limit = data.limit ||
|
|
1371
|
+
data.limit = data.limit || self.defaultUploadConcurrency;
|
|
1228
1372
|
|
|
1229
|
-
const generateUploadURL = function(num, callback){
|
|
1373
|
+
const generateUploadURL = function(num, callback) {
|
|
1230
1374
|
self.request({
|
|
1231
1375
|
url: 'b2_get_upload_part_url',
|
|
1232
1376
|
method: 'POST',
|
|
1233
1377
|
json: {
|
|
1234
1378
|
fileId: info.fileId,
|
|
1235
1379
|
},
|
|
1236
|
-
}, function(err, results){
|
|
1237
|
-
if(err){
|
|
1380
|
+
}, function(err, results) {
|
|
1381
|
+
if (err) {
|
|
1238
1382
|
return callback(err);
|
|
1239
1383
|
}
|
|
1240
1384
|
info.upload_urls[num] = {
|
|
@@ -1247,28 +1391,28 @@ const b2CloudStorage = class {
|
|
|
1247
1391
|
};
|
|
1248
1392
|
let interval = null;
|
|
1249
1393
|
async.series([
|
|
1250
|
-
function(cb){
|
|
1251
|
-
if(!data.largeFileId){
|
|
1394
|
+
function(cb) {
|
|
1395
|
+
if (!data.largeFileId) {
|
|
1252
1396
|
return cb();
|
|
1253
1397
|
}
|
|
1254
1398
|
// resuming a file upload
|
|
1255
1399
|
const parts = {};
|
|
1256
1400
|
let startPartNumber = 0;
|
|
1257
1401
|
let validFileId = false;
|
|
1258
|
-
async.whilst(function(wcb){
|
|
1402
|
+
async.whilst(function(wcb) {
|
|
1259
1403
|
return wcb(null, startPartNumber !== null);
|
|
1260
|
-
}, function(wcb){
|
|
1404
|
+
}, function(wcb) {
|
|
1261
1405
|
const partsData = {
|
|
1262
1406
|
fileId: data.largeFileId,
|
|
1263
1407
|
maxPartCount: 1000,
|
|
1264
1408
|
};
|
|
1265
|
-
if(startPartNumber){
|
|
1409
|
+
if (startPartNumber) {
|
|
1266
1410
|
partsData.startPartNumber = startPartNumber;
|
|
1267
1411
|
}
|
|
1268
|
-
self.listParts(partsData, function(err, results){
|
|
1269
|
-
if(err){
|
|
1412
|
+
self.listParts(partsData, function(err, results) {
|
|
1413
|
+
if (err) {
|
|
1270
1414
|
// failed to find the fileId or invalid fileId
|
|
1271
|
-
if(results.status === 400 && data.ignoreFileIdError){
|
|
1415
|
+
if (results.status === 400 && data.ignoreFileIdError) {
|
|
1272
1416
|
startPartNumber = null;
|
|
1273
1417
|
return wcb();
|
|
1274
1418
|
}
|
|
@@ -1277,14 +1421,14 @@ const b2CloudStorage = class {
|
|
|
1277
1421
|
validFileId = true;
|
|
1278
1422
|
startPartNumber = results.nextPartNumber; // will return null or the next number
|
|
1279
1423
|
let partTrack = 1;
|
|
1280
|
-
_.each(results.parts, function(part){
|
|
1281
|
-
if(info.lastUploadedPart < part.partNumber){
|
|
1424
|
+
_.each(results.parts, function(part) {
|
|
1425
|
+
if (info.lastUploadedPart < part.partNumber) {
|
|
1282
1426
|
info.lastUploadedPart = part.partNumber;
|
|
1283
1427
|
}
|
|
1284
|
-
if(partTrack !== part.partNumber){
|
|
1428
|
+
if (partTrack !== part.partNumber) {
|
|
1285
1429
|
return;
|
|
1286
1430
|
} // ignore gaps in upload, TODO: check for order?
|
|
1287
|
-
if(info.lastConsecutivePart < part.partNumber){
|
|
1431
|
+
if (info.lastConsecutivePart < part.partNumber) {
|
|
1288
1432
|
info.lastConsecutivePart = part.partNumber;
|
|
1289
1433
|
}
|
|
1290
1434
|
parts[part.partNumber] = part.contentLength;
|
|
@@ -1293,14 +1437,14 @@ const b2CloudStorage = class {
|
|
|
1293
1437
|
});
|
|
1294
1438
|
return wcb();
|
|
1295
1439
|
});
|
|
1296
|
-
}, function(err){
|
|
1297
|
-
if(err){
|
|
1298
|
-
//
|
|
1440
|
+
}, function(err) {
|
|
1441
|
+
if (err) {
|
|
1442
|
+
// invalid file ID is handled above via data.ignoreFileIdError
|
|
1299
1443
|
return cb(err);
|
|
1300
1444
|
}
|
|
1301
|
-
if(validFileId){
|
|
1445
|
+
if (validFileId) {
|
|
1302
1446
|
info.fileId = data.largeFileId;
|
|
1303
|
-
if(data.onFileId && typeof(data.onFileId) === 'function'){
|
|
1447
|
+
if (data.onFileId && typeof(data.onFileId) === 'function') {
|
|
1304
1448
|
data.onFileId(info.fileId);
|
|
1305
1449
|
}
|
|
1306
1450
|
info.uploadedParts = parts;
|
|
@@ -1309,88 +1453,34 @@ const b2CloudStorage = class {
|
|
|
1309
1453
|
return cb();
|
|
1310
1454
|
});
|
|
1311
1455
|
},
|
|
1312
|
-
function(cb){
|
|
1313
|
-
|
|
1314
|
-
|
|
1315
|
-
|
|
1316
|
-
|
|
1317
|
-
|
|
1318
|
-
|
|
1319
|
-
|
|
1320
|
-
|
|
1321
|
-
|
|
1322
|
-
|
|
1323
|
-
|
|
1324
|
-
|
|
1325
|
-
|
|
1326
|
-
|
|
1327
|
-
|
|
1328
|
-
|
|
1329
|
-
partTemplate.part++;
|
|
1330
|
-
|
|
1331
|
-
let currentPartSize = partSize; // default to recommended size
|
|
1332
|
-
// check previously uploaded parts
|
|
1333
|
-
if(info.uploadedParts[partTemplate.part]){
|
|
1334
|
-
currentPartSize = info.uploadedParts[partTemplate.part];
|
|
1335
|
-
}
|
|
1336
|
-
// calculates at least how big each chunk has to be to fit into the chunks previously uploaded
|
|
1337
|
-
// we don't know the start/end of those chunks and they MUST be overwritten
|
|
1338
|
-
if(partTemplate.part > info.lastConsecutivePart && partTemplate.part < info.lastUploadedPart){
|
|
1339
|
-
if(!info.missingPartSize){
|
|
1340
|
-
const accountedForParts = partTemplate.end + 1; // last uploaded part
|
|
1341
|
-
info.missingPartSize = Math.ceil((data.size - accountedForParts) / (info.lastUploadedPart - info.lastConsecutivePart));
|
|
1342
|
-
// if this exceeds the recommended size, we can lower the part size and write more chunks after the
|
|
1343
|
-
// higher number of chunks previously uploaded
|
|
1344
|
-
if(info.missingPartSize > partSize){
|
|
1345
|
-
info.missingPartSize = partSize;
|
|
1346
|
-
}
|
|
1347
|
-
}
|
|
1348
|
-
currentPartSize = info.missingPartSize;
|
|
1349
|
-
}
|
|
1350
|
-
if(currentPartSize <= 0){
|
|
1351
|
-
chunkError = new Error('B2 part size cannot be zero');
|
|
1352
|
-
chunkError.chunk = partTemplate;
|
|
1353
|
-
break;
|
|
1354
|
-
}
|
|
1355
|
-
|
|
1356
|
-
partTemplate.end += currentPartSize; // minus 1 to prevent overlapping chunks
|
|
1357
|
-
// check for end of file, adjust part size
|
|
1358
|
-
if(partTemplate.end + 1 >= data.size){
|
|
1359
|
-
// calculate the part size with the remainder
|
|
1360
|
-
// started with -1, so needs to be padded to prevent off by 1 errors
|
|
1361
|
-
currentPartSize = currentPartSize - (partTemplate.end + 1 - data.size);
|
|
1362
|
-
partTemplate.end = data.size;
|
|
1363
|
-
}
|
|
1364
|
-
partTemplate.start += partTemplate.size; // last part size
|
|
1365
|
-
partTemplate.size = currentPartSize;
|
|
1366
|
-
if(partTemplate.part === 1){
|
|
1367
|
-
partTemplate.start = 0;
|
|
1368
|
-
}
|
|
1369
|
-
if(partTemplate.size > partSize){
|
|
1370
|
-
chunkError = new Error('B2 part size overflows maximum recommended chunk to resume upload.');
|
|
1371
|
-
chunkError.chunk = partTemplate;
|
|
1372
|
-
break;
|
|
1373
|
-
}
|
|
1374
|
-
if(info.lastPart < partTemplate.part){
|
|
1375
|
-
info.lastPart = partTemplate.part;
|
|
1376
|
-
}
|
|
1377
|
-
info.chunks.push(_.clone(partTemplate));
|
|
1456
|
+
function(cb) {
|
|
1457
|
+
try {
|
|
1458
|
+
const chunkResults = b2CloudStorage.buildLargeUploadChunks({
|
|
1459
|
+
size: data.size,
|
|
1460
|
+
partSize: data.partSize || self.authData.recommendedPartSize,
|
|
1461
|
+
uploadedParts: info.uploadedParts,
|
|
1462
|
+
lastConsecutivePart: info.lastConsecutivePart,
|
|
1463
|
+
lastUploadedPart: info.lastUploadedPart,
|
|
1464
|
+
missingPartSize: info.missingPartSize,
|
|
1465
|
+
});
|
|
1466
|
+
info.chunks = chunkResults.chunks;
|
|
1467
|
+
info.lastPart = chunkResults.lastPart;
|
|
1468
|
+
info.missingPartSize = chunkResults.missingPartSize;
|
|
1469
|
+
} catch (err) {
|
|
1470
|
+
return process.nextTick(function() {
|
|
1471
|
+
return cb(err);
|
|
1472
|
+
});
|
|
1378
1473
|
}
|
|
1379
|
-
return process.nextTick(
|
|
1380
|
-
if(chunkError){
|
|
1381
|
-
return cb(chunkError);
|
|
1382
|
-
}
|
|
1383
|
-
return cb();
|
|
1384
|
-
});
|
|
1474
|
+
return process.nextTick(cb);
|
|
1385
1475
|
},
|
|
1386
|
-
function(cb){
|
|
1387
|
-
if(info.fileId){
|
|
1476
|
+
function(cb) {
|
|
1477
|
+
if (info.fileId) {
|
|
1388
1478
|
return cb();
|
|
1389
1479
|
}
|
|
1390
|
-
let fileInfo = _.defaults({
|
|
1480
|
+
let fileInfo = _.defaults(data.hash ? {
|
|
1391
1481
|
large_file_sha1: data.hash,
|
|
1392
1482
|
hash_sha1: data.hash,
|
|
1393
|
-
}, data.info, {
|
|
1483
|
+
} : {}, data.info, {
|
|
1394
1484
|
src_last_modified_millis: data.stat.mtime.getTime(),
|
|
1395
1485
|
});
|
|
1396
1486
|
fileInfo = _.mapValues(fileInfo, _.toString);
|
|
@@ -1404,39 +1494,41 @@ const b2CloudStorage = class {
|
|
|
1404
1494
|
fileInfo: fileInfo,
|
|
1405
1495
|
},
|
|
1406
1496
|
}, (err, results) => {
|
|
1407
|
-
if(err){
|
|
1497
|
+
if (err) {
|
|
1408
1498
|
return cb(err);
|
|
1409
1499
|
}
|
|
1410
1500
|
info.fileId = results.fileId;
|
|
1411
|
-
if(data.onFileId && typeof(data.onFileId) === 'function'){
|
|
1501
|
+
if (data.onFileId && typeof(data.onFileId) === 'function') {
|
|
1412
1502
|
data.onFileId(info.fileId);
|
|
1413
1503
|
}
|
|
1414
1504
|
return cb();
|
|
1415
1505
|
});
|
|
1416
1506
|
},
|
|
1417
|
-
function(cb){
|
|
1418
|
-
async.times(data.limit, function(num, next){
|
|
1507
|
+
function(cb) {
|
|
1508
|
+
async.times(data.limit, function(num, next) {
|
|
1419
1509
|
return generateUploadURL(num, next);
|
|
1420
1510
|
}, cb);
|
|
1421
1511
|
},
|
|
1422
|
-
function(cb){
|
|
1512
|
+
function(cb) {
|
|
1423
1513
|
info.totalUploaded = 0;
|
|
1424
1514
|
|
|
1425
1515
|
let queue = null; // initialise queue to avoid no-use-before-define eslint error
|
|
1426
|
-
const reQueue = function(task, incrementCount = true){
|
|
1427
|
-
if(incrementCount){
|
|
1516
|
+
const reQueue = function(task, incrementCount = true) {
|
|
1517
|
+
if (incrementCount) {
|
|
1428
1518
|
task.attempts++;
|
|
1429
1519
|
}
|
|
1430
1520
|
queue.push(task);
|
|
1431
1521
|
};
|
|
1432
|
-
queue = async.queue(function(task, queueCB){
|
|
1522
|
+
queue = async.queue(function(task, queueCB) {
|
|
1523
|
+
queueCB = _.once(queueCB);
|
|
1524
|
+
|
|
1433
1525
|
// if the queue has already errored, just callback immediately
|
|
1434
|
-
if(info.error){
|
|
1526
|
+
if (info.error) {
|
|
1435
1527
|
return process.nextTick(queueCB);
|
|
1436
1528
|
}
|
|
1437
1529
|
|
|
1438
1530
|
// check for previously uploaded
|
|
1439
|
-
if(info.uploadedParts[task.part]){
|
|
1531
|
+
if (info.uploadedParts[task.part]) {
|
|
1440
1532
|
// already uploaded
|
|
1441
1533
|
info.totalUploaded += task.size;
|
|
1442
1534
|
return process.nextTick(queueCB);
|
|
@@ -1446,122 +1538,170 @@ const b2CloudStorage = class {
|
|
|
1446
1538
|
// re-queue if no url found (shouldn't ever happen)
|
|
1447
1539
|
let url = null;
|
|
1448
1540
|
let urlIndex = null;
|
|
1449
|
-
for(const key in info.upload_urls){
|
|
1450
|
-
if(url){ break; }
|
|
1451
|
-
if(info.upload_urls[key].in_use === false){
|
|
1541
|
+
for (const key in info.upload_urls) {
|
|
1542
|
+
if (url) { break; }
|
|
1543
|
+
if (info.upload_urls[key].in_use === false) {
|
|
1452
1544
|
url = info.upload_urls[key];
|
|
1453
1545
|
urlIndex = key;
|
|
1454
1546
|
}
|
|
1455
1547
|
}
|
|
1456
|
-
if(!urlIndex || !url){
|
|
1548
|
+
if (!urlIndex || !url) {
|
|
1457
1549
|
return reQueue(task, false);
|
|
1458
1550
|
}
|
|
1459
1551
|
url.in_use = true;
|
|
1460
|
-
|
|
1461
|
-
|
|
1462
|
-
|
|
1552
|
+
url.request = null;
|
|
1553
|
+
|
|
1554
|
+
// single-read: hash the part data while streaming the upload, then verify against B2's response
|
|
1555
|
+
const sha1 = crypto.createHash('sha1');
|
|
1556
|
+
const hashTransform = new Transform({
|
|
1557
|
+
transform(chunk, encoding, cb) {
|
|
1558
|
+
sha1.update(chunk);
|
|
1559
|
+
cb(null, chunk);
|
|
1560
|
+
},
|
|
1561
|
+
});
|
|
1562
|
+
const fileStream = fs.createReadStream(filename, {
|
|
1463
1563
|
start: task.start,
|
|
1464
1564
|
end: task.end,
|
|
1465
1565
|
encoding: null,
|
|
1466
1566
|
});
|
|
1467
1567
|
|
|
1468
|
-
|
|
1469
|
-
|
|
1470
|
-
|
|
1471
|
-
|
|
1472
|
-
|
|
1473
|
-
|
|
1474
|
-
|
|
1475
|
-
|
|
1476
|
-
|
|
1477
|
-
|
|
1478
|
-
|
|
1479
|
-
|
|
1568
|
+
let streamErrorHandled = false;
|
|
1569
|
+
const cleanupStreams = function() {
|
|
1570
|
+
fileStream.destroy();
|
|
1571
|
+
hashTransform.destroy();
|
|
1572
|
+
if (url.request && url.request.abort) {
|
|
1573
|
+
url.request.abort();
|
|
1574
|
+
}
|
|
1575
|
+
url.in_use = false;
|
|
1576
|
+
url.request = null;
|
|
1577
|
+
};
|
|
1578
|
+
|
|
1579
|
+
const handleStreamError = function(err) {
|
|
1580
|
+
if (streamErrorHandled) { return; }
|
|
1581
|
+
streamErrorHandled = true;
|
|
1582
|
+
cleanupStreams();
|
|
1583
|
+
info.totalErrors++;
|
|
1584
|
+
if (task.attempts > self.maxPartAttempts || info.totalErrors >= self.maxTotalErrors) {
|
|
1585
|
+
info.error = err;
|
|
1586
|
+
return queueCB(err);
|
|
1480
1587
|
}
|
|
1588
|
+
reQueue(task);
|
|
1589
|
+
return queueCB();
|
|
1590
|
+
};
|
|
1481
1591
|
|
|
1482
|
-
|
|
1483
|
-
|
|
1484
|
-
|
|
1485
|
-
|
|
1486
|
-
|
|
1487
|
-
|
|
1488
|
-
|
|
1489
|
-
|
|
1490
|
-
|
|
1491
|
-
|
|
1492
|
-
|
|
1493
|
-
|
|
1494
|
-
|
|
1495
|
-
|
|
1496
|
-
|
|
1497
|
-
|
|
1498
|
-
|
|
1499
|
-
|
|
1500
|
-
|
|
1592
|
+
fileStream.on('error', handleStreamError);
|
|
1593
|
+
hashTransform.on('error', handleStreamError);
|
|
1594
|
+
fileStream.pipe(hashTransform);
|
|
1595
|
+
|
|
1596
|
+
const reqOptions = {
|
|
1597
|
+
apiUrl: url.uploadUrl,
|
|
1598
|
+
appendPath: false,
|
|
1599
|
+
method: 'POST',
|
|
1600
|
+
json: false,
|
|
1601
|
+
headers: {
|
|
1602
|
+
'Authorization': url.authorizationToken,
|
|
1603
|
+
'X-Bz-Part-Number': task.part,
|
|
1604
|
+
'X-Bz-Content-Sha1': 'do_not_verify',
|
|
1605
|
+
'Content-Length': task.size,
|
|
1606
|
+
},
|
|
1607
|
+
body: hashTransform,
|
|
1608
|
+
};
|
|
1609
|
+
if (data.testMode) {
|
|
1610
|
+
reqOptions.headers['X-Bz-Test-Mode'] = data.testMode;
|
|
1611
|
+
}
|
|
1612
|
+
url.request = self.request(reqOptions, function(err, body, res) {
|
|
1613
|
+
// release upload url
|
|
1614
|
+
url.in_use = false;
|
|
1615
|
+
url.request = null;
|
|
1616
|
+
|
|
1617
|
+
const retry = function() {
|
|
1618
|
+
return generateUploadURL(urlIndex, function(err) {
|
|
1619
|
+
// if we're unable to get an upload URL from B2, we can't attempt to retry
|
|
1620
|
+
if (err) { return queueCB(err); }
|
|
1621
|
+
reQueue(task);
|
|
1622
|
+
return queueCB();
|
|
1623
|
+
});
|
|
1501
1624
|
};
|
|
1502
|
-
if
|
|
1503
|
-
|
|
1625
|
+
// if upload fails, error if exceeded max attempts, else requeue
|
|
1626
|
+
if (err) {
|
|
1627
|
+
if (!streamErrorHandled) {
|
|
1628
|
+
info.totalErrors++;
|
|
1629
|
+
}
|
|
1630
|
+
// fail immediately if max errors exceeded
|
|
1631
|
+
if (task.attempts > self.maxPartAttempts || info.totalErrors >= self.maxTotalErrors) {
|
|
1632
|
+
info.error = err;
|
|
1633
|
+
return queueCB(err);
|
|
1634
|
+
}
|
|
1635
|
+
// handle connection failures that should trigger a retry (https://www.backblaze.com/b2/docs/integration_checklist.html)
|
|
1636
|
+
if (err.code === 'EPIPE' || err.code === 'ETIMEDOUT' || err.code === 'ESOCKETTIMEDOUT') {
|
|
1637
|
+
return retry();
|
|
1638
|
+
}
|
|
1639
|
+
// handle status codes that should trigger a retry (https://www.backblaze.com/b2/docs/integration_checklist.html)
|
|
1640
|
+
if (res && (res.statusCode === 408 || (res.statusCode >= 500 && res.statusCode <= 599))) {
|
|
1641
|
+
return retry();
|
|
1642
|
+
}
|
|
1643
|
+
return queueCB(err);
|
|
1504
1644
|
}
|
|
1505
|
-
|
|
1506
|
-
|
|
1507
|
-
|
|
1508
|
-
|
|
1509
|
-
|
|
1510
|
-
const retry = function(){
|
|
1511
|
-
return generateUploadURL(urlIndex, function(err){
|
|
1512
|
-
// if we're unable to get an upload URL from B2, we can't attempt to retry
|
|
1513
|
-
if(err){ return queueCB(err); }
|
|
1514
|
-
reQueue(task);
|
|
1515
|
-
return queueCB();
|
|
1516
|
-
});
|
|
1517
|
-
};
|
|
1518
|
-
// if upload fails, error if exceeded max attempts, else requeue
|
|
1519
|
-
if(err){
|
|
1520
|
-
// handle connection failures that should trigger a retry (https://www.backblaze.com/b2/docs/integration_checklist.html)
|
|
1645
|
+
// verify locally computed hash matches B2's response
|
|
1646
|
+
if (typeof body === 'string') {
|
|
1647
|
+
try {
|
|
1648
|
+
body = JSON.parse(body);
|
|
1649
|
+
} catch {
|
|
1521
1650
|
info.totalErrors++;
|
|
1522
|
-
|
|
1523
|
-
|
|
1524
|
-
|
|
1525
|
-
|
|
1526
|
-
if(res && (res.statusCode === 408 || (res.statusCode >= 500 && res.statusCode <= 599))){
|
|
1527
|
-
return retry();
|
|
1651
|
+
const parseErr = new Error('Failed to parse B2 upload response as JSON');
|
|
1652
|
+
if (task.attempts > self.maxPartAttempts || info.totalErrors >= self.maxTotalErrors) {
|
|
1653
|
+
info.error = parseErr;
|
|
1654
|
+
return queueCB(parseErr);
|
|
1528
1655
|
}
|
|
1529
|
-
|
|
1530
|
-
|
|
1531
|
-
|
|
1532
|
-
|
|
1533
|
-
|
|
1534
|
-
|
|
1656
|
+
reQueue(task);
|
|
1657
|
+
return queueCB();
|
|
1658
|
+
}
|
|
1659
|
+
}
|
|
1660
|
+
const localHash = sha1.digest('hex');
|
|
1661
|
+
// B2 returns "unverified:<hash>" when X-Bz-Content-Sha1 is "do_not_verify"
|
|
1662
|
+
const remoteHash = body && body.contentSha1;
|
|
1663
|
+
const normalizedRemoteHash = typeof remoteHash === 'string' ? remoteHash.replace(/^unverified:/, '') : remoteHash;
|
|
1664
|
+
if (!normalizedRemoteHash || normalizedRemoteHash !== localHash) {
|
|
1665
|
+
info.totalErrors++;
|
|
1666
|
+
if (task.attempts > self.maxPartAttempts || info.totalErrors >= self.maxTotalErrors) {
|
|
1667
|
+
const hashErr = !normalizedRemoteHash
|
|
1668
|
+
? new Error('B2 response missing contentSha1 for hash verification')
|
|
1669
|
+
: new Error('SHA1 mismatch: local ' + localHash + ' != remote ' + normalizedRemoteHash);
|
|
1670
|
+
info.error = hashErr;
|
|
1671
|
+
return queueCB(hashErr);
|
|
1535
1672
|
}
|
|
1536
|
-
|
|
1537
|
-
info.totalUploaded += task.size;
|
|
1673
|
+
reQueue(task);
|
|
1538
1674
|
return queueCB();
|
|
1539
|
-
}
|
|
1540
|
-
|
|
1541
|
-
|
|
1542
|
-
|
|
1675
|
+
}
|
|
1676
|
+
info.shaParts[task.part] = localHash;
|
|
1677
|
+
info.totalUploaded += task.size;
|
|
1678
|
+
return queueCB();
|
|
1679
|
+
}).on('error', () => {
|
|
1680
|
+
// Error is handled by the request callback with proper retry logic.
|
|
1681
|
+
// This handler only prevents unhandled 'error' event crashes.
|
|
1682
|
+
}).on('abort', () => queueCB());
|
|
1543
1683
|
}, _.size(info.upload_urls));
|
|
1544
1684
|
|
|
1545
1685
|
// callback when queue has completed
|
|
1546
|
-
queue.drain(function(){
|
|
1686
|
+
queue.drain(function() {
|
|
1547
1687
|
clearInterval(interval);
|
|
1548
|
-
if(info.error){
|
|
1688
|
+
if (info.error) {
|
|
1549
1689
|
return cb();
|
|
1550
1690
|
}
|
|
1551
1691
|
info.partSha1Array = [];
|
|
1552
1692
|
let i = 1;
|
|
1553
|
-
while(i <= info.lastPart){
|
|
1693
|
+
while (i <= info.lastPart) {
|
|
1554
1694
|
info.partSha1Array.push(info.shaParts[i++]);
|
|
1555
1695
|
}
|
|
1556
1696
|
return cb();
|
|
1557
1697
|
});
|
|
1558
|
-
interval = setInterval(function(){
|
|
1559
|
-
if(!data.onUploadProgress || typeof(data.onUploadProgress) !== 'function'){
|
|
1698
|
+
interval = setInterval(function() {
|
|
1699
|
+
if (!data.onUploadProgress || typeof(data.onUploadProgress) !== 'function') {
|
|
1560
1700
|
return;
|
|
1561
1701
|
}
|
|
1562
1702
|
let bytesDispatched = 0;
|
|
1563
|
-
bytesDispatched = _.sumBy(Object.values(info.upload_urls), function(url){
|
|
1564
|
-
if(url && url.request && url.request.req && url.request.req.connection && url.request.req.connection._bytesDispatched){
|
|
1703
|
+
bytesDispatched = _.sumBy(Object.values(info.upload_urls), function(url) {
|
|
1704
|
+
if (url && url.request && url.request.req && url.request.req.connection && url.request.req.connection._bytesDispatched) {
|
|
1565
1705
|
return url.request.req.connection._bytesDispatched;
|
|
1566
1706
|
}
|
|
1567
1707
|
return 0;
|
|
@@ -1577,13 +1717,13 @@ const b2CloudStorage = class {
|
|
|
1577
1717
|
|
|
1578
1718
|
queue.push(info.chunks);
|
|
1579
1719
|
},
|
|
1580
|
-
function(cb){
|
|
1581
|
-
if(interval){
|
|
1720
|
+
function(cb) {
|
|
1721
|
+
if (interval) {
|
|
1582
1722
|
clearInterval(interval);
|
|
1583
1723
|
}
|
|
1584
1724
|
|
|
1585
1725
|
// cleanup large file upload if error occurred
|
|
1586
|
-
if(!info.error){
|
|
1726
|
+
if (!info.error) {
|
|
1587
1727
|
return cb();
|
|
1588
1728
|
}
|
|
1589
1729
|
|
|
@@ -1595,8 +1735,8 @@ const b2CloudStorage = class {
|
|
|
1595
1735
|
},
|
|
1596
1736
|
}, cb);
|
|
1597
1737
|
},
|
|
1598
|
-
function(cb){
|
|
1599
|
-
if(info.error){
|
|
1738
|
+
function(cb) {
|
|
1739
|
+
if (info.error) {
|
|
1600
1740
|
return cb(info.error);
|
|
1601
1741
|
}
|
|
1602
1742
|
self.request({
|
|
@@ -1606,37 +1746,37 @@ const b2CloudStorage = class {
|
|
|
1606
1746
|
fileId: info.fileId,
|
|
1607
1747
|
partSha1Array: info.partSha1Array,
|
|
1608
1748
|
},
|
|
1609
|
-
}, function(err, results){
|
|
1610
|
-
if(err){
|
|
1749
|
+
}, function(err, results) {
|
|
1750
|
+
if (err) {
|
|
1611
1751
|
return cb(err);
|
|
1612
1752
|
}
|
|
1613
1753
|
info.returnData = results;
|
|
1614
1754
|
return cb();
|
|
1615
1755
|
});
|
|
1616
1756
|
},
|
|
1617
|
-
], function(err){
|
|
1618
|
-
if(interval){
|
|
1757
|
+
], function(err) {
|
|
1758
|
+
if (interval) {
|
|
1619
1759
|
clearInterval(interval);
|
|
1620
1760
|
}
|
|
1621
|
-
if(err || info.error){
|
|
1761
|
+
if (err || info.error) {
|
|
1622
1762
|
return callback(err || info.error);
|
|
1623
1763
|
}
|
|
1624
1764
|
return callback(null, info.returnData);
|
|
1625
1765
|
});
|
|
1626
1766
|
return {
|
|
1627
|
-
cancel: function(){
|
|
1767
|
+
cancel: function() {
|
|
1628
1768
|
info.error = new Error('B2 upload canceled');
|
|
1629
|
-
_.each(info.upload_urls, function(url){
|
|
1630
|
-
if(url.request && url.request.abort){
|
|
1769
|
+
_.each(info.upload_urls, function(url) {
|
|
1770
|
+
if (url.request && url.request.abort) {
|
|
1631
1771
|
url.request.abort();
|
|
1632
1772
|
}
|
|
1633
1773
|
});
|
|
1634
1774
|
},
|
|
1635
|
-
progress: function(){
|
|
1775
|
+
progress: function() {
|
|
1636
1776
|
return info.progress;
|
|
1637
1777
|
},
|
|
1638
|
-
info: function(){
|
|
1639
|
-
if(info.returnData){
|
|
1778
|
+
info: function() {
|
|
1779
|
+
if (info.returnData) {
|
|
1640
1780
|
return info.returnData;
|
|
1641
1781
|
}
|
|
1642
1782
|
return {
|
|
@@ -1647,4 +1787,4 @@ const b2CloudStorage = class {
|
|
|
1647
1787
|
}
|
|
1648
1788
|
};
|
|
1649
1789
|
|
|
1650
|
-
module.exports = b2CloudStorage;
|
|
1790
|
+
module.exports = b2CloudStorage;
|