@sanity/export 2.30.2-shopify.2 → 3.0.0-dev-preview.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/AssetHandler.js +150 -202
- package/lib/export.js +151 -178
- package/lib/filterDocumentTypes.js +2 -2
- package/lib/filterDrafts.js +2 -2
- package/lib/filterSystemDocuments.js +3 -3
- package/lib/getDocumentsStream.js +10 -17
- package/lib/logFirstChunk.js +4 -4
- package/lib/rejectOnApiError.js +1 -1
- package/lib/requestStream.js +40 -61
- package/lib/stringifyStream.js +1 -1
- package/lib/tryParseJson.js +4 -4
- package/lib/util/rimraf.js +9 -0
- package/lib/validateOptions.js +7 -7
- package/package.json +11 -11
- package/src/AssetHandler.js +6 -5
- package/src/export.js +4 -3
- package/src/util/rimraf.js +4 -0
- package/jest.config.js +0 -6
package/lib/export.js
CHANGED
|
@@ -1,65 +1,63 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
const os = require('os');
|
|
4
4
|
|
|
5
|
-
|
|
5
|
+
const path = require('path');
|
|
6
6
|
|
|
7
|
-
|
|
7
|
+
const zlib = require('zlib');
|
|
8
8
|
|
|
9
|
-
|
|
9
|
+
const fs = require('fs');
|
|
10
10
|
|
|
11
|
-
|
|
11
|
+
const miss = require('mississippi');
|
|
12
12
|
|
|
13
|
-
|
|
13
|
+
const split = require('split2');
|
|
14
14
|
|
|
15
|
-
|
|
15
|
+
const archiver = require('archiver');
|
|
16
16
|
|
|
17
|
-
|
|
17
|
+
const rimraf = require('./util/rimraf');
|
|
18
18
|
|
|
19
|
-
|
|
19
|
+
const debug = require('./debug');
|
|
20
20
|
|
|
21
|
-
|
|
21
|
+
const AssetHandler = require('./AssetHandler');
|
|
22
22
|
|
|
23
|
-
|
|
23
|
+
const stringifyStream = require('./stringifyStream');
|
|
24
24
|
|
|
25
|
-
|
|
25
|
+
const validateOptions = require('./validateOptions');
|
|
26
26
|
|
|
27
|
-
|
|
27
|
+
const rejectOnApiError = require('./rejectOnApiError');
|
|
28
28
|
|
|
29
|
-
|
|
29
|
+
const getDocumentsStream = require('./getDocumentsStream');
|
|
30
30
|
|
|
31
|
-
|
|
31
|
+
const filterSystemDocuments = require('./filterSystemDocuments');
|
|
32
32
|
|
|
33
|
-
|
|
33
|
+
const filterDocumentTypes = require('./filterDocumentTypes');
|
|
34
34
|
|
|
35
|
-
|
|
35
|
+
const filterDrafts = require('./filterDrafts');
|
|
36
36
|
|
|
37
|
-
|
|
37
|
+
const logFirstChunk = require('./logFirstChunk');
|
|
38
38
|
|
|
39
|
-
|
|
39
|
+
const tryParseJson = require('./tryParseJson');
|
|
40
40
|
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
var noop = () => null;
|
|
41
|
+
const noop = () => null;
|
|
44
42
|
|
|
45
43
|
function exportDataset(opts) {
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
44
|
+
const options = validateOptions(opts);
|
|
45
|
+
const onProgress = options.onProgress || noop;
|
|
46
|
+
const archive = archiver('tar', {
|
|
49
47
|
gzip: true,
|
|
50
48
|
gzipOptions: {
|
|
51
49
|
level: options.compress ? zlib.Z_DEFAULT_COMPRESSION : zlib.Z_NO_COMPRESSION
|
|
52
50
|
}
|
|
53
51
|
});
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
52
|
+
const slugDate = new Date().toISOString().replace(/[^a-z0-9]/gi, '-').toLowerCase();
|
|
53
|
+
const prefix = "".concat(opts.dataset, "-export-").concat(slugDate);
|
|
54
|
+
const tmpDir = path.join(os.tmpdir(), prefix);
|
|
57
55
|
|
|
58
|
-
|
|
56
|
+
const cleanup = () => rimraf(tmpDir).catch(err => {
|
|
59
57
|
debug("Error while cleaning up temporary files: ".concat(err.message));
|
|
60
58
|
});
|
|
61
59
|
|
|
62
|
-
|
|
60
|
+
const assetHandler = new AssetHandler({
|
|
63
61
|
client: options.client,
|
|
64
62
|
tmpDir,
|
|
65
63
|
prefix,
|
|
@@ -67,179 +65,154 @@ function exportDataset(opts) {
|
|
|
67
65
|
});
|
|
68
66
|
debug('Outputting assets (temporarily) to %s', tmpDir);
|
|
69
67
|
debug('Outputting to %s', options.outputPath === '-' ? 'stdout' : options.outputPath);
|
|
70
|
-
|
|
68
|
+
let outputStream;
|
|
71
69
|
|
|
72
70
|
if (isWritableStream(options.outputPath)) {
|
|
73
71
|
outputStream = options.outputPath;
|
|
74
72
|
} else {
|
|
75
|
-
outputStream = options.outputPath === '-' ? process.stdout :
|
|
73
|
+
outputStream = options.outputPath === '-' ? process.stdout : fs.createWriteStream(options.outputPath);
|
|
76
74
|
}
|
|
77
75
|
|
|
78
|
-
|
|
76
|
+
let assetStreamHandler = assetHandler.noop;
|
|
79
77
|
|
|
80
78
|
if (!options.raw) {
|
|
81
79
|
assetStreamHandler = options.assets ? assetHandler.rewriteAssets : assetHandler.stripAssets;
|
|
82
80
|
}
|
|
83
81
|
|
|
84
|
-
return new Promise(
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
82
|
+
return new Promise(async (resolve, reject) => {
|
|
83
|
+
miss.finished(archive, async archiveErr => {
|
|
84
|
+
if (archiveErr) {
|
|
85
|
+
debug('Archiving errored! %s', archiveErr.stack);
|
|
86
|
+
await cleanup();
|
|
87
|
+
reject(archiveErr);
|
|
88
|
+
return;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
debug('Archive finished!');
|
|
92
|
+
});
|
|
93
|
+
debug('Getting dataset export stream');
|
|
94
|
+
onProgress({
|
|
95
|
+
step: 'Exporting documents...'
|
|
96
|
+
});
|
|
97
|
+
let documentCount = 0;
|
|
98
|
+
let lastReported = Date.now();
|
|
99
|
+
|
|
100
|
+
const reportDocumentCount = (chunk, enc, cb) => {
|
|
101
|
+
++documentCount;
|
|
102
|
+
const now = Date.now();
|
|
103
|
+
|
|
104
|
+
if (now - lastReported > 50) {
|
|
105
|
+
onProgress({
|
|
106
|
+
step: 'Exporting documents...',
|
|
107
|
+
current: documentCount,
|
|
108
|
+
total: '?',
|
|
109
|
+
update: true
|
|
96
110
|
});
|
|
111
|
+
lastReported = now;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
cb(null, chunk);
|
|
115
|
+
};
|
|
116
|
+
|
|
117
|
+
const inputStream = await getDocumentsStream(options.client, options.dataset);
|
|
118
|
+
debug('Got HTTP %d', inputStream.statusCode);
|
|
119
|
+
debug('Response headers: %o', inputStream.headers);
|
|
120
|
+
const jsonStream = miss.pipeline(inputStream, logFirstChunk(), split(tryParseJson), rejectOnApiError(), filterSystemDocuments(), assetStreamHandler, filterDocumentTypes(options.types), options.drafts ? miss.through.obj() : filterDrafts(), stringifyStream(), miss.through(reportDocumentCount));
|
|
121
|
+
miss.finished(jsonStream, async err => {
|
|
122
|
+
if (err) {
|
|
123
|
+
return;
|
|
124
|
+
}
|
|
97
125
|
|
|
98
|
-
return function (_x3) {
|
|
99
|
-
return _ref2.apply(this, arguments);
|
|
100
|
-
};
|
|
101
|
-
}());
|
|
102
|
-
debug('Getting dataset export stream');
|
|
103
126
|
onProgress({
|
|
104
|
-
step: 'Exporting documents...'
|
|
127
|
+
step: 'Exporting documents...',
|
|
128
|
+
current: documentCount,
|
|
129
|
+
total: documentCount,
|
|
130
|
+
update: true
|
|
105
131
|
});
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
});
|
|
120
|
-
lastReported = now;
|
|
132
|
+
|
|
133
|
+
if (!options.raw && options.assets) {
|
|
134
|
+
onProgress({
|
|
135
|
+
step: 'Downloading assets...'
|
|
136
|
+
});
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
let prevCompleted = 0;
|
|
140
|
+
const progressInterval = setInterval(() => {
|
|
141
|
+
const completed = assetHandler.queueSize - assetHandler.queue.size - assetHandler.queue.pending;
|
|
142
|
+
|
|
143
|
+
if (prevCompleted === completed) {
|
|
144
|
+
return;
|
|
121
145
|
}
|
|
122
146
|
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
var jsonStream = miss.pipeline(inputStream, logFirstChunk(), split(tryParseJson), rejectOnApiError(), filterSystemDocuments(), assetStreamHandler, filterDocumentTypes(options.types), options.drafts ? miss.through.obj() : filterDrafts(), stringifyStream(), miss.through(reportDocumentCount));
|
|
130
|
-
miss.finished(jsonStream, /*#__PURE__*/function () {
|
|
131
|
-
var _ref3 = _asyncToGenerator(function* (err) {
|
|
132
|
-
if (err) {
|
|
133
|
-
return;
|
|
134
|
-
}
|
|
135
|
-
|
|
136
|
-
onProgress({
|
|
137
|
-
step: 'Exporting documents...',
|
|
138
|
-
current: documentCount,
|
|
139
|
-
total: documentCount,
|
|
140
|
-
update: true
|
|
141
|
-
});
|
|
142
|
-
|
|
143
|
-
if (!options.raw && options.assets) {
|
|
144
|
-
onProgress({
|
|
145
|
-
step: 'Downloading assets...'
|
|
146
|
-
});
|
|
147
|
-
}
|
|
148
|
-
|
|
149
|
-
var prevCompleted = 0;
|
|
150
|
-
var progressInterval = setInterval(() => {
|
|
151
|
-
var completed = assetHandler.queueSize - assetHandler.queue.size - assetHandler.queue.pending;
|
|
152
|
-
|
|
153
|
-
if (prevCompleted === completed) {
|
|
154
|
-
return;
|
|
155
|
-
}
|
|
156
|
-
|
|
157
|
-
prevCompleted = completed;
|
|
158
|
-
onProgress({
|
|
159
|
-
step: 'Downloading assets...',
|
|
160
|
-
current: completed,
|
|
161
|
-
total: assetHandler.queueSize,
|
|
162
|
-
update: true
|
|
163
|
-
});
|
|
164
|
-
}, 500);
|
|
165
|
-
debug('Waiting for asset handler to complete downloads');
|
|
166
|
-
|
|
167
|
-
try {
|
|
168
|
-
var assetMap = yield assetHandler.finish(); // Make sure we mark the progress as done (eg 100/100 instead of 99/100)
|
|
169
|
-
|
|
170
|
-
onProgress({
|
|
171
|
-
step: 'Downloading assets...',
|
|
172
|
-
current: assetHandler.queueSize,
|
|
173
|
-
total: assetHandler.queueSize,
|
|
174
|
-
update: true
|
|
175
|
-
});
|
|
176
|
-
archive.append(JSON.stringify(assetMap), {
|
|
177
|
-
name: 'assets.json',
|
|
178
|
-
prefix
|
|
179
|
-
});
|
|
180
|
-
clearInterval(progressInterval);
|
|
181
|
-
} catch (assetErr) {
|
|
182
|
-
clearInterval(progressInterval);
|
|
183
|
-
yield cleanup();
|
|
184
|
-
reject(assetErr);
|
|
185
|
-
return;
|
|
186
|
-
} // Add all downloaded assets to archive
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
archive.directory(path.join(tmpDir, 'files'), "".concat(prefix, "/files"), {
|
|
190
|
-
store: true
|
|
191
|
-
});
|
|
192
|
-
archive.directory(path.join(tmpDir, 'images'), "".concat(prefix, "/images"), {
|
|
193
|
-
store: true
|
|
194
|
-
});
|
|
195
|
-
debug('Finalizing archive, flushing streams');
|
|
196
|
-
onProgress({
|
|
197
|
-
step: 'Adding assets to archive...'
|
|
198
|
-
});
|
|
199
|
-
archive.finalize();
|
|
147
|
+
prevCompleted = completed;
|
|
148
|
+
onProgress({
|
|
149
|
+
step: 'Downloading assets...',
|
|
150
|
+
current: completed,
|
|
151
|
+
total: assetHandler.queueSize,
|
|
152
|
+
update: true
|
|
200
153
|
});
|
|
154
|
+
}, 500);
|
|
155
|
+
debug('Waiting for asset handler to complete downloads');
|
|
201
156
|
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
157
|
+
try {
|
|
158
|
+
const assetMap = await assetHandler.finish(); // Make sure we mark the progress as done (eg 100/100 instead of 99/100)
|
|
159
|
+
|
|
160
|
+
onProgress({
|
|
161
|
+
step: 'Downloading assets...',
|
|
162
|
+
current: assetHandler.queueSize,
|
|
163
|
+
total: assetHandler.queueSize,
|
|
164
|
+
update: true
|
|
165
|
+
});
|
|
166
|
+
archive.append(JSON.stringify(assetMap), {
|
|
167
|
+
name: 'assets.json',
|
|
168
|
+
prefix
|
|
169
|
+
});
|
|
170
|
+
clearInterval(progressInterval);
|
|
171
|
+
} catch (assetErr) {
|
|
172
|
+
clearInterval(progressInterval);
|
|
173
|
+
await cleanup();
|
|
174
|
+
reject(assetErr);
|
|
175
|
+
return;
|
|
176
|
+
} // Add all downloaded assets to archive
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
archive.directory(path.join(tmpDir, 'files'), "".concat(prefix, "/files"), {
|
|
180
|
+
store: true
|
|
181
|
+
});
|
|
182
|
+
archive.directory(path.join(tmpDir, 'images'), "".concat(prefix, "/images"), {
|
|
183
|
+
store: true
|
|
208
184
|
});
|
|
209
|
-
archive
|
|
210
|
-
|
|
211
|
-
|
|
185
|
+
debug('Finalizing archive, flushing streams');
|
|
186
|
+
onProgress({
|
|
187
|
+
step: 'Adding assets to archive...'
|
|
212
188
|
});
|
|
213
|
-
|
|
189
|
+
archive.finalize();
|
|
190
|
+
});
|
|
191
|
+
archive.on('warning', err => {
|
|
192
|
+
debug('Archive warning: %s', err.message);
|
|
193
|
+
});
|
|
194
|
+
archive.append(jsonStream, {
|
|
195
|
+
name: 'data.ndjson',
|
|
196
|
+
prefix
|
|
197
|
+
});
|
|
198
|
+
miss.pipe(archive, outputStream, onComplete);
|
|
214
199
|
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
200
|
+
async function onComplete(err) {
|
|
201
|
+
onProgress({
|
|
202
|
+
step: 'Clearing temporary files...'
|
|
203
|
+
});
|
|
204
|
+
await cleanup();
|
|
218
205
|
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
step: 'Clearing temporary files...'
|
|
223
|
-
});
|
|
224
|
-
yield cleanup();
|
|
225
|
-
|
|
226
|
-
if (!err) {
|
|
227
|
-
resolve();
|
|
228
|
-
return;
|
|
229
|
-
}
|
|
230
|
-
|
|
231
|
-
debug('Error during streaming: %s', err.stack);
|
|
232
|
-
assetHandler.clear();
|
|
233
|
-
reject(err);
|
|
234
|
-
});
|
|
235
|
-
return _onComplete.apply(this, arguments);
|
|
206
|
+
if (!err) {
|
|
207
|
+
resolve();
|
|
208
|
+
return;
|
|
236
209
|
}
|
|
237
|
-
});
|
|
238
210
|
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
211
|
+
debug('Error during streaming: %s', err.stack);
|
|
212
|
+
assetHandler.clear();
|
|
213
|
+
reject(err);
|
|
214
|
+
}
|
|
215
|
+
});
|
|
243
216
|
}
|
|
244
217
|
|
|
245
218
|
function isWritableStream(val) {
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
const miss = require('mississippi');
|
|
4
4
|
|
|
5
5
|
module.exports = allowedTypes => allowedTypes ? miss.through.obj((doc, enc, callback) => {
|
|
6
|
-
|
|
6
|
+
const type = doc && doc._type;
|
|
7
7
|
|
|
8
8
|
if (allowedTypes.includes(type)) {
|
|
9
9
|
callback(null, doc);
|
package/lib/filterDrafts.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
const miss = require('mississippi');
|
|
4
4
|
|
|
5
|
-
|
|
5
|
+
const isDraft = doc => doc && doc._id && doc._id.indexOf('drafts.') === 0;
|
|
6
6
|
|
|
7
7
|
module.exports = () => miss.through.obj((doc, enc, callback) => {
|
|
8
8
|
if (isDraft(doc)) {
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
const miss = require('mississippi');
|
|
4
4
|
|
|
5
|
-
|
|
5
|
+
const debug = require('./debug');
|
|
6
6
|
|
|
7
|
-
|
|
7
|
+
const isSystemDocument = doc => doc && doc._id && doc._id.indexOf('_.') === 0;
|
|
8
8
|
|
|
9
9
|
module.exports = () => miss.through.obj((doc, enc, callback) => {
|
|
10
10
|
if (isSystemDocument(doc)) {
|
|
@@ -1,27 +1,20 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
const pkg = require('../package.json');
|
|
4
4
|
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
|
8
|
-
|
|
9
|
-
var pkg = require('../package.json');
|
|
10
|
-
|
|
11
|
-
var requestStream = require('./requestStream');
|
|
5
|
+
const requestStream = require('./requestStream');
|
|
12
6
|
|
|
13
7
|
module.exports = (client, dataset) => {
|
|
14
8
|
// Sanity client doesn't handle streams natively since we want to support node/browser
|
|
15
9
|
// with same API. We're just using it here to get hold of URLs and tokens.
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
}
|
|
24
|
-
|
|
10
|
+
const url = client.getUrl("/data/export/".concat(dataset));
|
|
11
|
+
const token = client.config().token;
|
|
12
|
+
const headers = {
|
|
13
|
+
'User-Agent': "".concat(pkg.name, "@").concat(pkg.version),
|
|
14
|
+
...(token ? {
|
|
15
|
+
Authorization: "Bearer ".concat(token)
|
|
16
|
+
} : {})
|
|
17
|
+
};
|
|
25
18
|
return requestStream({
|
|
26
19
|
url,
|
|
27
20
|
headers
|
package/lib/logFirstChunk.js
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
const miss = require('mississippi');
|
|
4
4
|
|
|
5
|
-
|
|
5
|
+
const debug = require('./debug');
|
|
6
6
|
|
|
7
7
|
module.exports = () => {
|
|
8
|
-
|
|
8
|
+
let firstChunk = true;
|
|
9
9
|
return miss.through((chunk, enc, callback) => {
|
|
10
10
|
if (firstChunk) {
|
|
11
|
-
|
|
11
|
+
const string = chunk.toString('utf8').split('\n')[0];
|
|
12
12
|
debug('First chunk received: %s', string.slice(0, 300));
|
|
13
13
|
firstChunk = false;
|
|
14
14
|
}
|
package/lib/rejectOnApiError.js
CHANGED
package/lib/requestStream.js
CHANGED
|
@@ -1,32 +1,23 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
const getIt = require('get-it');
|
|
4
4
|
|
|
5
|
-
|
|
5
|
+
const {
|
|
6
|
+
keepAlive,
|
|
7
|
+
promise
|
|
8
|
+
} = require('get-it/middleware');
|
|
6
9
|
|
|
7
|
-
|
|
10
|
+
const debug = require('./debug');
|
|
8
11
|
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
|
|
12
|
-
|
|
13
|
-
var getIt = require('get-it');
|
|
14
|
-
|
|
15
|
-
var _require = require('get-it/middleware'),
|
|
16
|
-
keepAlive = _require.keepAlive,
|
|
17
|
-
promise = _require.promise;
|
|
18
|
-
|
|
19
|
-
var debug = require('./debug');
|
|
20
|
-
|
|
21
|
-
var request = getIt([keepAlive(), promise({
|
|
12
|
+
const request = getIt([keepAlive(), promise({
|
|
22
13
|
onlyBody: true
|
|
23
14
|
})]);
|
|
24
|
-
|
|
25
|
-
|
|
15
|
+
const socketsWithTimeout = new WeakSet();
|
|
16
|
+
const CONNECTION_TIMEOUT = 15 * 1000; // 15 seconds
|
|
26
17
|
|
|
27
|
-
|
|
18
|
+
const READ_TIMEOUT = 3 * 60 * 1000; // 3 minutes
|
|
28
19
|
|
|
29
|
-
|
|
20
|
+
const MAX_RETRIES = 5;
|
|
30
21
|
|
|
31
22
|
function delay(ms) {
|
|
32
23
|
return new Promise(resolve => setTimeout(resolve, ms));
|
|
@@ -34,51 +25,39 @@ function delay(ms) {
|
|
|
34
25
|
/* eslint-disable no-await-in-loop, max-depth */
|
|
35
26
|
|
|
36
27
|
|
|
37
|
-
module.exports =
|
|
38
|
-
|
|
39
|
-
var error;
|
|
40
|
-
|
|
41
|
-
for (var i = 0; i < MAX_RETRIES; i++) {
|
|
42
|
-
try {
|
|
43
|
-
var _ret = yield* function* () {
|
|
44
|
-
var response = yield request(_objectSpread(_objectSpread({}, options), {}, {
|
|
45
|
-
stream: true,
|
|
46
|
-
maxRedirects: 0,
|
|
47
|
-
timeout: {
|
|
48
|
-
connect: CONNECTION_TIMEOUT,
|
|
49
|
-
socket: READ_TIMEOUT
|
|
50
|
-
}
|
|
51
|
-
}));
|
|
28
|
+
module.exports = async options => {
|
|
29
|
+
let error;
|
|
52
30
|
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
}();
|
|
31
|
+
for (let i = 0; i < MAX_RETRIES; i++) {
|
|
32
|
+
try {
|
|
33
|
+
const response = await request({ ...options,
|
|
34
|
+
stream: true,
|
|
35
|
+
maxRedirects: 0,
|
|
36
|
+
timeout: {
|
|
37
|
+
connect: CONNECTION_TIMEOUT,
|
|
38
|
+
socket: READ_TIMEOUT
|
|
39
|
+
}
|
|
40
|
+
});
|
|
64
41
|
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
42
|
+
if (response.connection && typeof response.connection.setTimeout === 'function' && !socketsWithTimeout.has(response.connection)) {
|
|
43
|
+
socketsWithTimeout.add(response.connection);
|
|
44
|
+
response.connection.setTimeout(READ_TIMEOUT, () => {
|
|
45
|
+
response.destroy(new Error("Read timeout: No data received on socket for ".concat(READ_TIMEOUT, " ms")));
|
|
46
|
+
});
|
|
47
|
+
}
|
|
68
48
|
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
49
|
+
return response;
|
|
50
|
+
} catch (err) {
|
|
51
|
+
error = err;
|
|
72
52
|
|
|
73
|
-
|
|
74
|
-
|
|
53
|
+
if (err.response && err.response.statusCode && err.response.statusCode < 500) {
|
|
54
|
+
break;
|
|
75
55
|
}
|
|
76
|
-
}
|
|
77
56
|
|
|
78
|
-
|
|
79
|
-
|
|
57
|
+
debug('Error, retrying after 1500ms: %s', err.message);
|
|
58
|
+
await delay(1500);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
80
61
|
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
};
|
|
84
|
-
}();
|
|
62
|
+
throw error;
|
|
63
|
+
};
|
package/lib/stringifyStream.js
CHANGED
package/lib/tryParseJson.js
CHANGED
|
@@ -5,16 +5,16 @@ module.exports = line => {
|
|
|
5
5
|
return JSON.parse(line);
|
|
6
6
|
} catch (err) {
|
|
7
7
|
// Catch half-done lines with an error at the end
|
|
8
|
-
|
|
8
|
+
const errorPosition = line.lastIndexOf('{"error":');
|
|
9
9
|
|
|
10
10
|
if (errorPosition === -1) {
|
|
11
11
|
err.message = "".concat(err.message, " (").concat(line, ")");
|
|
12
12
|
throw err;
|
|
13
13
|
}
|
|
14
14
|
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
15
|
+
const errorJson = line.slice(errorPosition);
|
|
16
|
+
const errorLine = JSON.parse(errorJson);
|
|
17
|
+
const error = errorLine && errorLine.error;
|
|
18
18
|
|
|
19
19
|
if (error && error.description) {
|
|
20
20
|
throw new Error("Error streaming dataset: ".concat(error.description, "\n\n").concat(errorJson, "\n"));
|