@automattic/vip 2.22.0 → 2.23.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/automattic-vip-2.23.0.tgz +0 -0
- package/dist/bin/vip-app-list.js +0 -12
- package/dist/bin/vip-app.js +6 -13
- package/dist/bin/vip-cache-purge-url.js +4 -19
- package/dist/bin/vip-cache.js +0 -2
- package/dist/bin/vip-config-envvar-delete.js +4 -19
- package/dist/bin/vip-config-envvar-get-all.js +6 -19
- package/dist/bin/vip-config-envvar-get.js +4 -15
- package/dist/bin/vip-config-envvar-list.js +8 -20
- package/dist/bin/vip-config-envvar-set.js +4 -23
- package/dist/bin/vip-config-envvar.js +0 -2
- package/dist/bin/vip-config-software-get.js +0 -17
- package/dist/bin/vip-config-software-update.js +6 -22
- package/dist/bin/vip-config-software.js +0 -2
- package/dist/bin/vip-config.js +0 -2
- package/dist/bin/vip-dev-env-create.js +0 -18
- package/dist/bin/vip-dev-env-destroy.js +0 -9
- package/dist/bin/vip-dev-env-exec.js +0 -12
- package/dist/bin/vip-dev-env-import-media.js +0 -7
- package/dist/bin/vip-dev-env-import-sql.js +2 -17
- package/dist/bin/vip-dev-env-import.js +0 -3
- package/dist/bin/vip-dev-env-info.js +0 -10
- package/dist/bin/vip-dev-env-list.js +0 -7
- package/dist/bin/vip-dev-env-start.js +4 -14
- package/dist/bin/vip-dev-env-stop.js +0 -9
- package/dist/bin/vip-dev-env-update.js +0 -11
- package/dist/bin/vip-dev-env.js +0 -2
- package/dist/bin/vip-import-media-abort.js +0 -18
- package/dist/bin/vip-import-media-status.js +0 -12
- package/dist/bin/vip-import-media.js +6 -23
- package/dist/bin/vip-import-sql-status.js +0 -12
- package/dist/bin/vip-import-sql.js +33 -99
- package/dist/bin/vip-import-validate-files.js +21 -42
- package/dist/bin/vip-import-validate-sql.js +0 -8
- package/dist/bin/vip-import.js +0 -3
- package/dist/bin/vip-logs.js +20 -50
- package/dist/bin/vip-search-replace.js +8 -14
- package/dist/bin/vip-sync.js +2 -25
- package/dist/bin/vip-validate-preflight.js +427 -0
- package/dist/bin/vip-validate.js +19 -0
- package/dist/bin/vip-whoami.js +2 -14
- package/dist/bin/vip-wp.js +39 -89
- package/dist/bin/vip.js +5 -35
- package/dist/lib/analytics/clients/pendo.js +9 -18
- package/dist/lib/analytics/clients/stub.js +1 -3
- package/dist/lib/analytics/clients/tracks.js +11 -20
- package/dist/lib/analytics/index.js +4 -11
- package/dist/lib/api/app.js +1 -11
- package/dist/lib/api/cache-purge.js +4 -7
- package/dist/lib/api/feature-flags.js +1 -4
- package/dist/lib/api/http.js +9 -15
- package/dist/lib/api/user.js +1 -7
- package/dist/lib/api.js +7 -18
- package/dist/lib/app-logs/app-logs.js +2 -9
- package/dist/lib/app.js +2 -5
- package/dist/lib/cli/apiConfig.js +4 -19
- package/dist/lib/cli/command.js +43 -133
- package/dist/lib/cli/config.js +1 -5
- package/dist/lib/cli/envAlias.js +14 -15
- package/dist/lib/cli/exit.js +4 -6
- package/dist/lib/cli/format.js +8 -50
- package/dist/lib/cli/progress.js +13 -42
- package/dist/lib/cli/prompt.js +1 -5
- package/dist/lib/cli/repo.js +7 -20
- package/dist/lib/client-file-uploader.js +44 -97
- package/dist/lib/config/software.js +2 -52
- package/dist/lib/constants/dev-environment.js +1 -2
- package/dist/lib/constants/file-size.js +1 -1
- package/dist/lib/constants/vipgo.js +1 -1
- package/dist/lib/dev-environment/dev-environment-cli.js +31 -134
- package/dist/lib/dev-environment/dev-environment-core.js +63 -171
- package/dist/lib/dev-environment/dev-environment-lando.js +16 -71
- package/dist/lib/env.js +1 -4
- package/dist/lib/envvar/api-delete.js +1 -4
- package/dist/lib/envvar/api-get-all.js +1 -4
- package/dist/lib/envvar/api-get.js +1 -2
- package/dist/lib/envvar/api-list.js +3 -4
- package/dist/lib/envvar/api-set.js +1 -4
- package/dist/lib/envvar/api.js +5 -16
- package/dist/lib/envvar/input.js +1 -8
- package/dist/lib/envvar/logging.js +2 -6
- package/dist/lib/envvar/read-file.js +1 -3
- package/dist/lib/http/proxy-agent.js +17 -22
- package/dist/lib/keychain/browser.js +1 -4
- package/dist/lib/keychain/insecure.js +1 -10
- package/dist/lib/keychain/secure.js +1 -8
- package/dist/lib/keychain.js +4 -8
- package/dist/lib/logout.js +0 -6
- package/dist/lib/media-import/media-file-import.js +3 -7
- package/dist/lib/media-import/progress.js +6 -17
- package/dist/lib/media-import/status.js +14 -65
- package/dist/lib/read-file.js +1 -6
- package/dist/lib/rollbar.js +1 -7
- package/dist/lib/search-and-replace.js +9 -41
- package/dist/lib/site-import/db-file-import.js +3 -9
- package/dist/lib/site-import/status.js +17 -74
- package/dist/lib/token.js +1 -33
- package/dist/lib/tracker.js +4 -20
- package/dist/lib/user-error.js +0 -2
- package/dist/lib/validations/is-multi-site-sql-dump.js +4 -12
- package/dist/lib/validations/is-multi-site.js +5 -21
- package/dist/lib/validations/is-multisite-domain-mapped.js +5 -31
- package/dist/lib/validations/line-by-line.js +4 -16
- package/dist/lib/validations/site-type.js +10 -19
- package/dist/lib/validations/sql.js +11 -76
- package/dist/lib/validations/utils.js +1 -6
- package/dist/lib/vip-import-validate-files.js +82 -109
- package/npm-shrinkwrap.json +837 -67
- package/package.json +4 -1
|
@@ -3,52 +3,38 @@
|
|
|
3
3
|
Object.defineProperty(exports, "__esModule", {
|
|
4
4
|
value: true
|
|
5
5
|
});
|
|
6
|
-
exports.
|
|
7
|
-
exports.uploadImportSqlFileToS3 = uploadImportSqlFileToS3;
|
|
8
|
-
exports.uploadUsingPutObject = uploadUsingPutObject;
|
|
9
|
-
exports.uploadUsingMultipart = uploadUsingMultipart;
|
|
10
|
-
exports.getSignedUploadRequestData = getSignedUploadRequestData;
|
|
6
|
+
exports.MULTIPART_THRESHOLD = exports.COMPRESS_THRESHOLD = void 0;
|
|
11
7
|
exports.checkFileAccess = checkFileAccess;
|
|
12
|
-
exports.
|
|
13
|
-
exports.isFile = isFile;
|
|
14
|
-
exports.getFileSize = getFileSize;
|
|
8
|
+
exports.completeMultipartUpload = completeMultipartUpload;
|
|
15
9
|
exports.detectCompressedMimeType = detectCompressedMimeType;
|
|
10
|
+
exports.getFileMD5Hash = void 0;
|
|
11
|
+
exports.getFileMeta = getFileMeta;
|
|
12
|
+
exports.getFileSize = getFileSize;
|
|
13
|
+
exports.getFileStats = getFileStats;
|
|
16
14
|
exports.getPartBoundaries = getPartBoundaries;
|
|
17
|
-
exports.
|
|
15
|
+
exports.getSignedUploadRequestData = getSignedUploadRequestData;
|
|
16
|
+
exports.gzipFile = exports.getWorkingTempDir = void 0;
|
|
17
|
+
exports.isFile = isFile;
|
|
18
|
+
exports.uploadImportSqlFileToS3 = uploadImportSqlFileToS3;
|
|
18
19
|
exports.uploadPart = uploadPart;
|
|
19
|
-
exports.
|
|
20
|
-
exports.
|
|
21
|
-
|
|
20
|
+
exports.uploadParts = uploadParts;
|
|
21
|
+
exports.uploadUsingMultipart = uploadUsingMultipart;
|
|
22
|
+
exports.uploadUsingPutObject = uploadUsingPutObject;
|
|
22
23
|
var _fs = _interopRequireWildcard(require("fs"));
|
|
23
|
-
|
|
24
24
|
var _os = _interopRequireDefault(require("os"));
|
|
25
|
-
|
|
26
25
|
var _path = _interopRequireDefault(require("path"));
|
|
27
|
-
|
|
28
26
|
var _nodeFetch = _interopRequireDefault(require("node-fetch"));
|
|
29
|
-
|
|
30
27
|
var _chalk = _interopRequireDefault(require("chalk"));
|
|
31
|
-
|
|
32
28
|
var _zlib = require("zlib");
|
|
33
|
-
|
|
34
29
|
var _crypto = require("crypto");
|
|
35
|
-
|
|
36
30
|
var _stream = require("stream");
|
|
37
|
-
|
|
38
31
|
var _xml2js = require("xml2js");
|
|
39
|
-
|
|
40
32
|
var _debug = _interopRequireDefault(require("debug"));
|
|
41
|
-
|
|
42
33
|
var _http = _interopRequireDefault(require("./api/http"));
|
|
43
|
-
|
|
44
34
|
var _fileSize = require("./constants/file-size");
|
|
45
|
-
|
|
46
35
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
47
|
-
|
|
48
36
|
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function (nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
|
|
49
|
-
|
|
50
37
|
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
|
|
51
|
-
|
|
52
38
|
/**
|
|
53
39
|
*
|
|
54
40
|
* @format
|
|
@@ -61,48 +47,45 @@ function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj &&
|
|
|
61
47
|
/**
|
|
62
48
|
* Internal dependencies
|
|
63
49
|
*/
|
|
64
|
-
const debug = (0, _debug.default)('vip:lib/client-file-uploader'); // Files smaller than COMPRESS_THRESHOLD will not be compressed before upload
|
|
65
50
|
|
|
66
|
-
const
|
|
51
|
+
const debug = (0, _debug.default)('vip:lib/client-file-uploader');
|
|
52
|
+
|
|
53
|
+
// Files smaller than COMPRESS_THRESHOLD will not be compressed before upload
|
|
54
|
+
const COMPRESS_THRESHOLD = 16 * _fileSize.MB_IN_BYTES;
|
|
67
55
|
|
|
56
|
+
// Files smaller than MULTIPART_THRESHOLD will use `PutObject` vs Multipart Uploads
|
|
68
57
|
exports.COMPRESS_THRESHOLD = COMPRESS_THRESHOLD;
|
|
69
|
-
const MULTIPART_THRESHOLD = 32 * _fileSize.MB_IN_BYTES;
|
|
58
|
+
const MULTIPART_THRESHOLD = 32 * _fileSize.MB_IN_BYTES;
|
|
70
59
|
|
|
60
|
+
// This is how big each part of a Multipart Upload is (except the last / remainder)
|
|
71
61
|
exports.MULTIPART_THRESHOLD = MULTIPART_THRESHOLD;
|
|
72
|
-
const UPLOAD_PART_SIZE = 16 * _fileSize.MB_IN_BYTES;
|
|
62
|
+
const UPLOAD_PART_SIZE = 16 * _fileSize.MB_IN_BYTES;
|
|
73
63
|
|
|
64
|
+
// How many parts will upload at the same time
|
|
74
65
|
const MAX_CONCURRENT_PART_UPLOADS = 5;
|
|
75
|
-
|
|
76
66
|
const getWorkingTempDir = async () => new Promise((resolve, reject) => {
|
|
77
67
|
_fs.default.mkdtemp(_path.default.join(_os.default.tmpdir(), 'vip-client-file-uploader'), (err, dir) => {
|
|
78
68
|
if (err) {
|
|
79
69
|
return reject(err);
|
|
80
70
|
}
|
|
81
|
-
|
|
82
71
|
resolve(dir);
|
|
83
72
|
});
|
|
84
73
|
});
|
|
85
|
-
|
|
86
74
|
exports.getWorkingTempDir = getWorkingTempDir;
|
|
87
|
-
|
|
88
75
|
const getFileMD5Hash = async fileName => new Promise((resolve, reject) => _fs.default.createReadStream(fileName).pipe((0, _crypto.createHash)('md5').setEncoding('hex')).on('finish', function () {
|
|
89
76
|
resolve(this.read());
|
|
90
77
|
}).on('error', error => reject(`could not generate file hash: ${error}`)));
|
|
91
|
-
|
|
92
78
|
exports.getFileMD5Hash = getFileMD5Hash;
|
|
93
|
-
|
|
94
79
|
const gzipFile = async (uncompressedFileName, compressedFileName) => new Promise((resolve, reject) => _fs.default.createReadStream(uncompressedFileName).pipe((0, _zlib.createGzip)()).pipe(_fs.default.createWriteStream(compressedFileName)).on('finish', resolve).on('error', error => reject(`could not compress file: ${error}`)));
|
|
95
|
-
|
|
96
80
|
exports.gzipFile = gzipFile;
|
|
97
|
-
|
|
98
81
|
async function getFileMeta(fileName) {
|
|
99
82
|
return new Promise(async resolve => {
|
|
100
83
|
const fileSize = await getFileSize(fileName);
|
|
84
|
+
const basename = _path.default.basename(fileName);
|
|
85
|
+
// TODO Validate File basename... encodeURIComponent, maybe...?
|
|
101
86
|
|
|
102
|
-
const
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
const mimeType = await detectCompressedMimeType(fileName); // TODO Only allow a subset of Mime Types...?
|
|
87
|
+
const mimeType = await detectCompressedMimeType(fileName);
|
|
88
|
+
// TODO Only allow a subset of Mime Types...?
|
|
106
89
|
|
|
107
90
|
const isCompressed = ['application/zip', 'application/gzip'].includes(mimeType);
|
|
108
91
|
resolve({
|
|
@@ -113,7 +96,6 @@ async function getFileMeta(fileName) {
|
|
|
113
96
|
});
|
|
114
97
|
});
|
|
115
98
|
}
|
|
116
|
-
|
|
117
99
|
async function uploadImportSqlFileToS3({
|
|
118
100
|
app,
|
|
119
101
|
env,
|
|
@@ -121,14 +103,14 @@ async function uploadImportSqlFileToS3({
|
|
|
121
103
|
progressCallback
|
|
122
104
|
}) {
|
|
123
105
|
let tmpDir;
|
|
124
|
-
|
|
125
106
|
try {
|
|
126
107
|
tmpDir = await getWorkingTempDir();
|
|
127
108
|
} catch (err) {
|
|
128
109
|
throw `Unable to create temporary working directory: ${err}`;
|
|
129
110
|
}
|
|
111
|
+
debug(`File ${_chalk.default.cyan(fileMeta.basename)} is ~ ${Math.floor(fileMeta.fileSize / _fileSize.MB_IN_BYTES)} MB\n`);
|
|
130
112
|
|
|
131
|
-
|
|
113
|
+
// TODO Compression will probably fail over a certain file size... break into pieces...?
|
|
132
114
|
// TODO if needed add a flag to bypass auto-compression
|
|
133
115
|
|
|
134
116
|
if (!fileMeta.isCompressed && fileMeta.fileSize >= COMPRESS_THRESHOLD) {
|
|
@@ -146,7 +128,6 @@ async function uploadImportSqlFileToS3({
|
|
|
146
128
|
const calculation = `${(fewerBytes / _fileSize.MB_IN_BYTES).toFixed(2)}MB (${Math.floor(100 * fewerBytes / uncompressedFileSize)}%)`;
|
|
147
129
|
debug(`** Compression resulted in a ${calculation} smaller file 📦 **\n`);
|
|
148
130
|
}
|
|
149
|
-
|
|
150
131
|
debug('Calculating file md5 checksum...');
|
|
151
132
|
const md5 = await getFileMD5Hash(fileMeta.fileName);
|
|
152
133
|
debug(`Calculated file md5 checksum: ${md5}\n`);
|
|
@@ -167,7 +148,6 @@ async function uploadImportSqlFileToS3({
|
|
|
167
148
|
result
|
|
168
149
|
};
|
|
169
150
|
}
|
|
170
|
-
|
|
171
151
|
async function uploadUsingPutObject({
|
|
172
152
|
app,
|
|
173
153
|
env,
|
|
@@ -187,43 +167,41 @@ async function uploadUsingPutObject({
|
|
|
187
167
|
action: 'PutObject'
|
|
188
168
|
});
|
|
189
169
|
const fetchOptions = presignedRequest.options;
|
|
190
|
-
fetchOptions.headers = {
|
|
170
|
+
fetchOptions.headers = {
|
|
171
|
+
...fetchOptions.headers,
|
|
191
172
|
'Content-Length': `${fileSize}` // This has to be a string
|
|
192
|
-
|
|
193
173
|
};
|
|
174
|
+
|
|
194
175
|
let readBytes = 0;
|
|
195
176
|
const progressPassThrough = new _stream.PassThrough();
|
|
196
177
|
progressPassThrough.on('data', data => {
|
|
197
178
|
readBytes += data.length;
|
|
198
179
|
const percentage = Math.floor(100 * readBytes / fileSize) + '%';
|
|
199
180
|
debug(percentage);
|
|
200
|
-
|
|
201
181
|
if (typeof progressCallback === 'function') {
|
|
202
182
|
progressCallback(percentage);
|
|
203
183
|
}
|
|
204
184
|
});
|
|
205
|
-
const response = await (0, _nodeFetch.default)(presignedRequest.url, {
|
|
185
|
+
const response = await (0, _nodeFetch.default)(presignedRequest.url, {
|
|
186
|
+
...fetchOptions,
|
|
206
187
|
body: fileContent ? fileContent : _fs.default.createReadStream(fileName).pipe(progressPassThrough)
|
|
207
188
|
});
|
|
208
|
-
|
|
209
189
|
if (response.status === 200) {
|
|
210
190
|
return 'ok';
|
|
211
191
|
}
|
|
192
|
+
const result = await response.text();
|
|
212
193
|
|
|
213
|
-
|
|
214
|
-
|
|
194
|
+
// TODO is any additional hardening needed here?
|
|
215
195
|
const parser = new _xml2js.Parser({
|
|
216
196
|
explicitArray: false,
|
|
217
197
|
ignoreAttrs: true
|
|
218
198
|
});
|
|
219
199
|
let parsedResponse;
|
|
220
|
-
|
|
221
200
|
try {
|
|
222
201
|
parsedResponse = await parser.parseStringPromise(result);
|
|
223
202
|
} catch (err) {
|
|
224
203
|
throw `Invalid response from cloud service. ${err}`;
|
|
225
204
|
}
|
|
226
|
-
|
|
227
205
|
const {
|
|
228
206
|
Code,
|
|
229
207
|
Message
|
|
@@ -233,7 +211,6 @@ async function uploadUsingPutObject({
|
|
|
233
211
|
Message
|
|
234
212
|
})}`;
|
|
235
213
|
}
|
|
236
|
-
|
|
237
214
|
async function uploadUsingMultipart({
|
|
238
215
|
app,
|
|
239
216
|
env,
|
|
@@ -251,14 +228,14 @@ async function uploadUsingMultipart({
|
|
|
251
228
|
action: 'CreateMultipartUpload'
|
|
252
229
|
});
|
|
253
230
|
const multipartUploadResponse = await (0, _nodeFetch.default)(presignedCreateMultipartUpload.url, presignedCreateMultipartUpload.options);
|
|
254
|
-
const multipartUploadResult = await multipartUploadResponse.text();
|
|
231
|
+
const multipartUploadResult = await multipartUploadResponse.text();
|
|
255
232
|
|
|
233
|
+
// TODO is any hardening needed here?
|
|
256
234
|
const parser = new _xml2js.Parser({
|
|
257
235
|
explicitArray: false,
|
|
258
236
|
ignoreAttrs: true
|
|
259
237
|
});
|
|
260
238
|
const parsedResponse = await parser.parseStringPromise(multipartUploadResult);
|
|
261
|
-
|
|
262
239
|
if (parsedResponse.Error) {
|
|
263
240
|
const {
|
|
264
241
|
Code,
|
|
@@ -269,11 +246,9 @@ async function uploadUsingMultipart({
|
|
|
269
246
|
Message
|
|
270
247
|
})}`;
|
|
271
248
|
}
|
|
272
|
-
|
|
273
249
|
if (!parsedResponse && parsedResponse.InitiateMultipartUploadResult && parsedResponse.InitiateMultipartUploadResult.UploadId) {
|
|
274
250
|
throw `Unable to get Upload ID from cloud storage. Error: ${multipartUploadResult}`;
|
|
275
251
|
}
|
|
276
|
-
|
|
277
252
|
const uploadId = parsedResponse.InitiateMultipartUploadResult.UploadId;
|
|
278
253
|
debug({
|
|
279
254
|
uploadId
|
|
@@ -298,7 +273,6 @@ async function uploadUsingMultipart({
|
|
|
298
273
|
etagResults
|
|
299
274
|
});
|
|
300
275
|
}
|
|
301
|
-
|
|
302
276
|
async function getSignedUploadRequestData({
|
|
303
277
|
action,
|
|
304
278
|
appId,
|
|
@@ -320,22 +294,17 @@ async function getSignedUploadRequestData({
|
|
|
320
294
|
uploadId
|
|
321
295
|
}
|
|
322
296
|
});
|
|
323
|
-
|
|
324
297
|
if (response.status !== 200) {
|
|
325
298
|
throw (await response.text()) || response.statusText;
|
|
326
299
|
}
|
|
327
|
-
|
|
328
300
|
return response.json();
|
|
329
301
|
}
|
|
330
|
-
|
|
331
302
|
async function checkFileAccess(fileName) {
|
|
332
303
|
return _fs.default.promises.access(fileName, _fs.default.R_OK);
|
|
333
304
|
}
|
|
334
|
-
|
|
335
305
|
async function getFileStats(fileName) {
|
|
336
306
|
return _fs.default.promises.stat(fileName);
|
|
337
307
|
}
|
|
338
|
-
|
|
339
308
|
async function isFile(fileName) {
|
|
340
309
|
try {
|
|
341
310
|
const stats = await getFileStats(fileName);
|
|
@@ -345,12 +314,10 @@ async function isFile(fileName) {
|
|
|
345
314
|
return false;
|
|
346
315
|
}
|
|
347
316
|
}
|
|
348
|
-
|
|
349
317
|
async function getFileSize(fileName) {
|
|
350
318
|
const stats = await getFileStats(fileName);
|
|
351
319
|
return stats.size;
|
|
352
320
|
}
|
|
353
|
-
|
|
354
321
|
async function detectCompressedMimeType(fileName) {
|
|
355
322
|
const ZIP_MAGIC_NUMBER = '504b0304';
|
|
356
323
|
const GZ_MAGIC_NUMBER = '1f8b';
|
|
@@ -366,21 +333,17 @@ async function detectCompressedMimeType(fileName) {
|
|
|
366
333
|
if (ZIP_MAGIC_NUMBER === fileHeader.slice(0, ZIP_MAGIC_NUMBER.length)) {
|
|
367
334
|
return resolve('application/zip');
|
|
368
335
|
}
|
|
369
|
-
|
|
370
336
|
if (GZ_MAGIC_NUMBER === fileHeader.slice(0, GZ_MAGIC_NUMBER.length)) {
|
|
371
337
|
return resolve('application/gzip');
|
|
372
338
|
}
|
|
373
|
-
|
|
374
339
|
resolve();
|
|
375
340
|
});
|
|
376
341
|
});
|
|
377
342
|
}
|
|
378
|
-
|
|
379
343
|
function getPartBoundaries(fileSize) {
|
|
380
344
|
if (fileSize < 1) {
|
|
381
345
|
throw 'fileSize must be greater than zero';
|
|
382
346
|
}
|
|
383
|
-
|
|
384
347
|
const numParts = Math.ceil(fileSize / UPLOAD_PART_SIZE);
|
|
385
348
|
return new Array(numParts).fill(undefined).map((_numPart, index) => {
|
|
386
349
|
const start = index * UPLOAD_PART_SIZE;
|
|
@@ -395,7 +358,6 @@ function getPartBoundaries(fileSize) {
|
|
|
395
358
|
};
|
|
396
359
|
});
|
|
397
360
|
}
|
|
398
|
-
|
|
399
361
|
async function uploadParts({
|
|
400
362
|
app,
|
|
401
363
|
env,
|
|
@@ -407,7 +369,6 @@ async function uploadParts({
|
|
|
407
369
|
let uploadsInProgress = 0;
|
|
408
370
|
let totalBytesRead = 0;
|
|
409
371
|
const partPercentages = new Array(parts.length).fill(0);
|
|
410
|
-
|
|
411
372
|
const readyForPartUpload = () => new Promise(resolve => {
|
|
412
373
|
const canDoInterval = setInterval(() => {
|
|
413
374
|
if (uploadsInProgress < MAX_CONCURRENT_PART_UPLOADS) {
|
|
@@ -417,14 +378,11 @@ async function uploadParts({
|
|
|
417
378
|
}
|
|
418
379
|
}, 300);
|
|
419
380
|
});
|
|
420
|
-
|
|
421
381
|
const updateProgress = () => {
|
|
422
382
|
const percentage = Math.floor(100 * totalBytesRead / fileMeta.fileSize) + '%';
|
|
423
|
-
|
|
424
383
|
if (typeof progressCallback === 'function') {
|
|
425
384
|
progressCallback(percentage);
|
|
426
385
|
}
|
|
427
|
-
|
|
428
386
|
debug(partPercentages.map((partPercentage, index) => {
|
|
429
387
|
const {
|
|
430
388
|
partSize
|
|
@@ -432,7 +390,6 @@ async function uploadParts({
|
|
|
432
390
|
return `Part # ${index}: ${partPercentage}% of ${(partSize / _fileSize.MB_IN_BYTES).toFixed(2)}MB`;
|
|
433
391
|
}).join('\n') + `\n\nOverall Progress: ${percentage}% of ${(fileMeta.fileSize / _fileSize.MB_IN_BYTES).toFixed(2)}MB`);
|
|
434
392
|
};
|
|
435
|
-
|
|
436
393
|
const updateProgressInterval = setInterval(updateProgress, 500);
|
|
437
394
|
const allDone = await Promise.all(parts.map(async part => {
|
|
438
395
|
const {
|
|
@@ -462,7 +419,6 @@ async function uploadParts({
|
|
|
462
419
|
updateProgress();
|
|
463
420
|
return allDone;
|
|
464
421
|
}
|
|
465
|
-
|
|
466
422
|
async function uploadPart({
|
|
467
423
|
app,
|
|
468
424
|
env,
|
|
@@ -481,8 +437,8 @@ async function uploadPart({
|
|
|
481
437
|
start
|
|
482
438
|
} = part;
|
|
483
439
|
const s3PartNumber = index + 1; // S3 multipart is indexed from 1
|
|
484
|
-
// TODO: handle failures / retries, etc.
|
|
485
440
|
|
|
441
|
+
// TODO: handle failures / retries, etc.
|
|
486
442
|
const doUpload = async () => {
|
|
487
443
|
// Get the signed request data from Parker
|
|
488
444
|
const partUploadRequestData = await getSignedUploadRequestData({
|
|
@@ -494,37 +450,35 @@ async function uploadPart({
|
|
|
494
450
|
uploadId
|
|
495
451
|
});
|
|
496
452
|
const fetchOptions = partUploadRequestData.options;
|
|
497
|
-
fetchOptions.headers = {
|
|
453
|
+
fetchOptions.headers = {
|
|
454
|
+
...fetchOptions.headers,
|
|
498
455
|
'Content-Length': `${partSize}` // This has to be a string
|
|
499
|
-
|
|
500
456
|
/**
|
|
501
457
|
* TODO? 'Content-MD5': Buffer.from( ... ).toString( 'base64' ),
|
|
502
458
|
* Content-MD5 has to be base64 encoded.
|
|
503
459
|
* It's the hash of the entire request object & has to be included in the signature,
|
|
504
460
|
* ...so it may not be feasible to include with presigned requests.
|
|
505
461
|
*/
|
|
506
|
-
|
|
507
462
|
};
|
|
463
|
+
|
|
508
464
|
fetchOptions.body = _fs.default.createReadStream(fileName, {
|
|
509
465
|
start,
|
|
510
466
|
end
|
|
511
467
|
}).pipe(progressPassThrough);
|
|
512
468
|
const fetchResponse = await (0, _nodeFetch.default)(partUploadRequestData.url, fetchOptions);
|
|
513
|
-
|
|
514
469
|
if (fetchResponse.status === 200) {
|
|
515
470
|
const responseHeaders = fetchResponse.headers.raw();
|
|
516
471
|
const [etag] = responseHeaders.etag;
|
|
517
472
|
return JSON.parse(etag);
|
|
518
473
|
}
|
|
474
|
+
const result = await fetchResponse.text();
|
|
519
475
|
|
|
520
|
-
|
|
521
|
-
|
|
476
|
+
// TODO is any hardening needed here?
|
|
522
477
|
const parser = new _xml2js.Parser({
|
|
523
478
|
explicitArray: false,
|
|
524
479
|
ignoreAttrs: true
|
|
525
480
|
});
|
|
526
481
|
const parsed = await parser.parseStringPromise(result);
|
|
527
|
-
|
|
528
482
|
if (parsed.Error) {
|
|
529
483
|
const {
|
|
530
484
|
Code,
|
|
@@ -535,16 +489,13 @@ async function uploadPart({
|
|
|
535
489
|
Message
|
|
536
490
|
})}`;
|
|
537
491
|
}
|
|
538
|
-
|
|
539
492
|
return parsed;
|
|
540
493
|
};
|
|
541
|
-
|
|
542
494
|
return {
|
|
543
495
|
ETag: await doUpload(),
|
|
544
496
|
PartNumber: s3PartNumber
|
|
545
497
|
};
|
|
546
498
|
}
|
|
547
|
-
|
|
548
499
|
async function completeMultipartUpload({
|
|
549
500
|
app,
|
|
550
501
|
env,
|
|
@@ -561,10 +512,10 @@ async function completeMultipartUpload({
|
|
|
561
512
|
etagResults
|
|
562
513
|
});
|
|
563
514
|
const completeMultipartUploadResponse = await (0, _nodeFetch.default)(completeMultipartUploadRequestData.url, completeMultipartUploadRequestData.options);
|
|
564
|
-
|
|
565
515
|
if (completeMultipartUploadResponse.status !== 200) {
|
|
566
516
|
throw await completeMultipartUploadResponse.text();
|
|
567
517
|
}
|
|
518
|
+
|
|
568
519
|
/**
|
|
569
520
|
* Processing of a Complete Multipart Upload request could take several minutes to complete.
|
|
570
521
|
* After Amazon S3 begins processing the request, it sends an HTTP response header that specifies a 200 OK response.
|
|
@@ -575,15 +526,12 @@ async function completeMultipartUpload({
|
|
|
575
526
|
*
|
|
576
527
|
* https://docs.aws.amazon.com/AmazonS3/latest/API/API_CompleteMultipartUpload.html
|
|
577
528
|
*/
|
|
578
|
-
|
|
579
|
-
|
|
580
529
|
const result = await completeMultipartUploadResponse.text();
|
|
581
530
|
const parser = new _xml2js.Parser({
|
|
582
531
|
explicitArray: false,
|
|
583
532
|
ignoreAttrs: true
|
|
584
533
|
});
|
|
585
534
|
const parsed = await parser.parseStringPromise(result);
|
|
586
|
-
|
|
587
535
|
if (parsed.Error) {
|
|
588
536
|
const {
|
|
589
537
|
Code,
|
|
@@ -594,6 +542,5 @@ async function completeMultipartUpload({
|
|
|
594
542
|
Message
|
|
595
543
|
})}`;
|
|
596
544
|
}
|
|
597
|
-
|
|
598
545
|
return parsed;
|
|
599
546
|
}
|