@sanity/export 2.29.5-purple-unicorn-remix.873 → 2.30.2-shopify.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/export.js CHANGED
@@ -1,63 +1,65 @@
1
1
  "use strict";
2
2
 
3
- const os = require('os');
3
+ function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
4
4
 
5
- const path = require('path');
5
+ function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
6
6
 
7
- const zlib = require('zlib');
7
+ var os = require('os');
8
8
 
9
- const fs = require('fs');
9
+ var path = require('path');
10
10
 
11
- const miss = require('mississippi');
11
+ var zlib = require('zlib');
12
12
 
13
- const split = require('split2');
13
+ var fse = require('fs-extra');
14
14
 
15
- const archiver = require('archiver');
15
+ var miss = require('mississippi');
16
16
 
17
- const rimraf = require('./util/rimraf');
17
+ var split = require('split2');
18
18
 
19
- const debug = require('./debug');
19
+ var archiver = require('archiver');
20
20
 
21
- const AssetHandler = require('./AssetHandler');
21
+ var debug = require('./debug');
22
22
 
23
- const stringifyStream = require('./stringifyStream');
23
+ var AssetHandler = require('./AssetHandler');
24
24
 
25
- const validateOptions = require('./validateOptions');
25
+ var stringifyStream = require('./stringifyStream');
26
26
 
27
- const rejectOnApiError = require('./rejectOnApiError');
27
+ var validateOptions = require('./validateOptions');
28
28
 
29
- const getDocumentsStream = require('./getDocumentsStream');
29
+ var rejectOnApiError = require('./rejectOnApiError');
30
30
 
31
- const filterSystemDocuments = require('./filterSystemDocuments');
31
+ var getDocumentsStream = require('./getDocumentsStream');
32
32
 
33
- const filterDocumentTypes = require('./filterDocumentTypes');
33
+ var filterSystemDocuments = require('./filterSystemDocuments');
34
34
 
35
- const filterDrafts = require('./filterDrafts');
35
+ var filterDocumentTypes = require('./filterDocumentTypes');
36
36
 
37
- const logFirstChunk = require('./logFirstChunk');
37
+ var filterDrafts = require('./filterDrafts');
38
38
 
39
- const tryParseJson = require('./tryParseJson');
39
+ var logFirstChunk = require('./logFirstChunk');
40
40
 
41
- const noop = () => null;
41
+ var tryParseJson = require('./tryParseJson');
42
+
43
+ var noop = () => null;
42
44
 
43
45
  function exportDataset(opts) {
44
- const options = validateOptions(opts);
45
- const onProgress = options.onProgress || noop;
46
- const archive = archiver('tar', {
46
+ var options = validateOptions(opts);
47
+ var onProgress = options.onProgress || noop;
48
+ var archive = archiver('tar', {
47
49
  gzip: true,
48
50
  gzipOptions: {
49
51
  level: options.compress ? zlib.Z_DEFAULT_COMPRESSION : zlib.Z_NO_COMPRESSION
50
52
  }
51
53
  });
52
- const slugDate = new Date().toISOString().replace(/[^a-z0-9]/gi, '-').toLowerCase();
53
- const prefix = "".concat(opts.dataset, "-export-").concat(slugDate);
54
- const tmpDir = path.join(os.tmpdir(), prefix);
54
+ var slugDate = new Date().toISOString().replace(/[^a-z0-9]/gi, '-').toLowerCase();
55
+ var prefix = "".concat(opts.dataset, "-export-").concat(slugDate);
56
+ var tmpDir = path.join(os.tmpdir(), prefix);
55
57
 
56
- const cleanup = () => rimraf(tmpDir).catch(err => {
58
+ var cleanup = () => fse.remove(tmpDir).catch(err => {
57
59
  debug("Error while cleaning up temporary files: ".concat(err.message));
58
60
  });
59
61
 
60
- const assetHandler = new AssetHandler({
62
+ var assetHandler = new AssetHandler({
61
63
  client: options.client,
62
64
  tmpDir,
63
65
  prefix,
@@ -65,154 +67,179 @@ function exportDataset(opts) {
65
67
  });
66
68
  debug('Outputting assets (temporarily) to %s', tmpDir);
67
69
  debug('Outputting to %s', options.outputPath === '-' ? 'stdout' : options.outputPath);
68
- let outputStream;
70
+ var outputStream;
69
71
 
70
72
  if (isWritableStream(options.outputPath)) {
71
73
  outputStream = options.outputPath;
72
74
  } else {
73
- outputStream = options.outputPath === '-' ? process.stdout : fs.createWriteStream(options.outputPath);
75
+ outputStream = options.outputPath === '-' ? process.stdout : fse.createWriteStream(options.outputPath);
74
76
  }
75
77
 
76
- let assetStreamHandler = assetHandler.noop;
78
+ var assetStreamHandler = assetHandler.noop;
77
79
 
78
80
  if (!options.raw) {
79
81
  assetStreamHandler = options.assets ? assetHandler.rewriteAssets : assetHandler.stripAssets;
80
82
  }
81
83
 
82
- return new Promise(async (resolve, reject) => {
83
- miss.finished(archive, async archiveErr => {
84
- if (archiveErr) {
85
- debug('Archiving errored! %s', archiveErr.stack);
86
- await cleanup();
87
- reject(archiveErr);
88
- return;
89
- }
90
-
91
- debug('Archive finished!');
92
- });
93
- debug('Getting dataset export stream');
94
- onProgress({
95
- step: 'Exporting documents...'
96
- });
97
- let documentCount = 0;
98
- let lastReported = Date.now();
99
-
100
- const reportDocumentCount = (chunk, enc, cb) => {
101
- ++documentCount;
102
- const now = Date.now();
103
-
104
- if (now - lastReported > 50) {
105
- onProgress({
106
- step: 'Exporting documents...',
107
- current: documentCount,
108
- total: '?',
109
- update: true
84
+ return new Promise( /*#__PURE__*/function () {
85
+ var _ref = _asyncToGenerator(function* (resolve, reject) {
86
+ miss.finished(archive, /*#__PURE__*/function () {
87
+ var _ref2 = _asyncToGenerator(function* (archiveErr) {
88
+ if (archiveErr) {
89
+ debug('Archiving errored! %s', archiveErr.stack);
90
+ yield cleanup();
91
+ reject(archiveErr);
92
+ return;
93
+ }
94
+
95
+ debug('Archive finished!');
110
96
  });
111
- lastReported = now;
112
- }
113
-
114
- cb(null, chunk);
115
- };
116
-
117
- const inputStream = await getDocumentsStream(options.client, options.dataset);
118
- debug('Got HTTP %d', inputStream.statusCode);
119
- debug('Response headers: %o', inputStream.headers);
120
- const jsonStream = miss.pipeline(inputStream, logFirstChunk(), split(tryParseJson), rejectOnApiError(), filterSystemDocuments(), assetStreamHandler, filterDocumentTypes(options.types), options.drafts ? miss.through.obj() : filterDrafts(), stringifyStream(), miss.through(reportDocumentCount));
121
- miss.finished(jsonStream, async err => {
122
- if (err) {
123
- return;
124
- }
125
97
 
98
+ return function (_x3) {
99
+ return _ref2.apply(this, arguments);
100
+ };
101
+ }());
102
+ debug('Getting dataset export stream');
126
103
  onProgress({
127
- step: 'Exporting documents...',
128
- current: documentCount,
129
- total: documentCount,
130
- update: true
104
+ step: 'Exporting documents...'
131
105
  });
132
-
133
- if (!options.raw && options.assets) {
134
- onProgress({
135
- step: 'Downloading assets...'
136
- });
137
- }
138
-
139
- let prevCompleted = 0;
140
- const progressInterval = setInterval(() => {
141
- const completed = assetHandler.queueSize - assetHandler.queue.size - assetHandler.queue.pending;
142
-
143
- if (prevCompleted === completed) {
144
- return;
106
+ var documentCount = 0;
107
+ var lastReported = Date.now();
108
+
109
+ var reportDocumentCount = (chunk, enc, cb) => {
110
+ ++documentCount;
111
+ var now = Date.now();
112
+
113
+ if (now - lastReported > 50) {
114
+ onProgress({
115
+ step: 'Exporting documents...',
116
+ current: documentCount,
117
+ total: '?',
118
+ update: true
119
+ });
120
+ lastReported = now;
145
121
  }
146
122
 
147
- prevCompleted = completed;
148
- onProgress({
149
- step: 'Downloading assets...',
150
- current: completed,
151
- total: assetHandler.queueSize,
152
- update: true
123
+ cb(null, chunk);
124
+ };
125
+
126
+ var inputStream = yield getDocumentsStream(options.client, options.dataset);
127
+ debug('Got HTTP %d', inputStream.statusCode);
128
+ debug('Response headers: %o', inputStream.headers);
129
+ var jsonStream = miss.pipeline(inputStream, logFirstChunk(), split(tryParseJson), rejectOnApiError(), filterSystemDocuments(), assetStreamHandler, filterDocumentTypes(options.types), options.drafts ? miss.through.obj() : filterDrafts(), stringifyStream(), miss.through(reportDocumentCount));
130
+ miss.finished(jsonStream, /*#__PURE__*/function () {
131
+ var _ref3 = _asyncToGenerator(function* (err) {
132
+ if (err) {
133
+ return;
134
+ }
135
+
136
+ onProgress({
137
+ step: 'Exporting documents...',
138
+ current: documentCount,
139
+ total: documentCount,
140
+ update: true
141
+ });
142
+
143
+ if (!options.raw && options.assets) {
144
+ onProgress({
145
+ step: 'Downloading assets...'
146
+ });
147
+ }
148
+
149
+ var prevCompleted = 0;
150
+ var progressInterval = setInterval(() => {
151
+ var completed = assetHandler.queueSize - assetHandler.queue.size - assetHandler.queue.pending;
152
+
153
+ if (prevCompleted === completed) {
154
+ return;
155
+ }
156
+
157
+ prevCompleted = completed;
158
+ onProgress({
159
+ step: 'Downloading assets...',
160
+ current: completed,
161
+ total: assetHandler.queueSize,
162
+ update: true
163
+ });
164
+ }, 500);
165
+ debug('Waiting for asset handler to complete downloads');
166
+
167
+ try {
168
+ var assetMap = yield assetHandler.finish(); // Make sure we mark the progress as done (eg 100/100 instead of 99/100)
169
+
170
+ onProgress({
171
+ step: 'Downloading assets...',
172
+ current: assetHandler.queueSize,
173
+ total: assetHandler.queueSize,
174
+ update: true
175
+ });
176
+ archive.append(JSON.stringify(assetMap), {
177
+ name: 'assets.json',
178
+ prefix
179
+ });
180
+ clearInterval(progressInterval);
181
+ } catch (assetErr) {
182
+ clearInterval(progressInterval);
183
+ yield cleanup();
184
+ reject(assetErr);
185
+ return;
186
+ } // Add all downloaded assets to archive
187
+
188
+
189
+ archive.directory(path.join(tmpDir, 'files'), "".concat(prefix, "/files"), {
190
+ store: true
191
+ });
192
+ archive.directory(path.join(tmpDir, 'images'), "".concat(prefix, "/images"), {
193
+ store: true
194
+ });
195
+ debug('Finalizing archive, flushing streams');
196
+ onProgress({
197
+ step: 'Adding assets to archive...'
198
+ });
199
+ archive.finalize();
153
200
  });
154
- }, 500);
155
- debug('Waiting for asset handler to complete downloads');
156
201
 
157
- try {
158
- const assetMap = await assetHandler.finish(); // Make sure we mark the progress as done (eg 100/100 instead of 99/100)
159
-
160
- onProgress({
161
- step: 'Downloading assets...',
162
- current: assetHandler.queueSize,
163
- total: assetHandler.queueSize,
164
- update: true
165
- });
166
- archive.append(JSON.stringify(assetMap), {
167
- name: 'assets.json',
168
- prefix
169
- });
170
- clearInterval(progressInterval);
171
- } catch (assetErr) {
172
- clearInterval(progressInterval);
173
- await cleanup();
174
- reject(assetErr);
175
- return;
176
- } // Add all downloaded assets to archive
177
-
178
-
179
- archive.directory(path.join(tmpDir, 'files'), "".concat(prefix, "/files"), {
180
- store: true
181
- });
182
- archive.directory(path.join(tmpDir, 'images'), "".concat(prefix, "/images"), {
183
- store: true
202
+ return function (_x4) {
203
+ return _ref3.apply(this, arguments);
204
+ };
205
+ }());
206
+ archive.on('warning', err => {
207
+ debug('Archive warning: %s', err.message);
184
208
  });
185
- debug('Finalizing archive, flushing streams');
186
- onProgress({
187
- step: 'Adding assets to archive...'
209
+ archive.append(jsonStream, {
210
+ name: 'data.ndjson',
211
+ prefix
188
212
  });
189
- archive.finalize();
190
- });
191
- archive.on('warning', err => {
192
- debug('Archive warning: %s', err.message);
193
- });
194
- archive.append(jsonStream, {
195
- name: 'data.ndjson',
196
- prefix
197
- });
198
- miss.pipe(archive, outputStream, onComplete);
213
+ miss.pipe(archive, outputStream, onComplete);
199
214
 
200
- async function onComplete(err) {
201
- onProgress({
202
- step: 'Clearing temporary files...'
203
- });
204
- await cleanup();
215
+ function onComplete(_x5) {
216
+ return _onComplete.apply(this, arguments);
217
+ }
205
218
 
206
- if (!err) {
207
- resolve();
208
- return;
219
+ function _onComplete() {
220
+ _onComplete = _asyncToGenerator(function* (err) {
221
+ onProgress({
222
+ step: 'Clearing temporary files...'
223
+ });
224
+ yield cleanup();
225
+
226
+ if (!err) {
227
+ resolve();
228
+ return;
229
+ }
230
+
231
+ debug('Error during streaming: %s', err.stack);
232
+ assetHandler.clear();
233
+ reject(err);
234
+ });
235
+ return _onComplete.apply(this, arguments);
209
236
  }
237
+ });
210
238
 
211
- debug('Error during streaming: %s', err.stack);
212
- assetHandler.clear();
213
- reject(err);
214
- }
215
- });
239
+ return function (_x, _x2) {
240
+ return _ref.apply(this, arguments);
241
+ };
242
+ }());
216
243
  }
217
244
 
218
245
  function isWritableStream(val) {
@@ -1,9 +1,9 @@
1
1
  "use strict";
2
2
 
3
- const miss = require('mississippi');
3
+ var miss = require('mississippi');
4
4
 
5
5
  module.exports = allowedTypes => allowedTypes ? miss.through.obj((doc, enc, callback) => {
6
- const type = doc && doc._type;
6
+ var type = doc && doc._type;
7
7
 
8
8
  if (allowedTypes.includes(type)) {
9
9
  callback(null, doc);
@@ -1,8 +1,8 @@
1
1
  "use strict";
2
2
 
3
- const miss = require('mississippi');
3
+ var miss = require('mississippi');
4
4
 
5
- const isDraft = doc => doc && doc._id && doc._id.indexOf('drafts.') === 0;
5
+ var isDraft = doc => doc && doc._id && doc._id.indexOf('drafts.') === 0;
6
6
 
7
7
  module.exports = () => miss.through.obj((doc, enc, callback) => {
8
8
  if (isDraft(doc)) {
@@ -1,10 +1,10 @@
1
1
  "use strict";
2
2
 
3
- const miss = require('mississippi');
3
+ var miss = require('mississippi');
4
4
 
5
- const debug = require('./debug');
5
+ var debug = require('./debug');
6
6
 
7
- const isSystemDocument = doc => doc && doc._id && doc._id.indexOf('_.') === 0;
7
+ var isSystemDocument = doc => doc && doc._id && doc._id.indexOf('_.') === 0;
8
8
 
9
9
  module.exports = () => miss.through.obj((doc, enc, callback) => {
10
10
  if (isSystemDocument(doc)) {
@@ -1,20 +1,27 @@
1
1
  "use strict";
2
2
 
3
- const pkg = require('../package.json');
3
+ function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
4
4
 
5
- const requestStream = require('./requestStream');
5
+ function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
6
+
7
+ function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
8
+
9
+ var pkg = require('../package.json');
10
+
11
+ var requestStream = require('./requestStream');
6
12
 
7
13
  module.exports = (client, dataset) => {
8
14
  // Sanity client doesn't handle streams natively since we want to support node/browser
9
15
  // with same API. We're just using it here to get hold of URLs and tokens.
10
- const url = client.getUrl("/data/export/".concat(dataset));
11
- const token = client.config().token;
12
- const headers = {
13
- 'User-Agent': "".concat(pkg.name, "@").concat(pkg.version),
14
- ...(token ? {
15
- Authorization: "Bearer ".concat(token)
16
- } : {})
17
- };
16
+ var url = client.getUrl("/data/export/".concat(dataset));
17
+ var token = client.config().token;
18
+
19
+ var headers = _objectSpread({
20
+ 'User-Agent': "".concat(pkg.name, "@").concat(pkg.version)
21
+ }, token ? {
22
+ Authorization: "Bearer ".concat(token)
23
+ } : {});
24
+
18
25
  return requestStream({
19
26
  url,
20
27
  headers
@@ -1,14 +1,14 @@
1
1
  "use strict";
2
2
 
3
- const miss = require('mississippi');
3
+ var miss = require('mississippi');
4
4
 
5
- const debug = require('./debug');
5
+ var debug = require('./debug');
6
6
 
7
7
  module.exports = () => {
8
- let firstChunk = true;
8
+ var firstChunk = true;
9
9
  return miss.through((chunk, enc, callback) => {
10
10
  if (firstChunk) {
11
- const string = chunk.toString('utf8').split('\n')[0];
11
+ var string = chunk.toString('utf8').split('\n')[0];
12
12
  debug('First chunk received: %s', string.slice(0, 300));
13
13
  firstChunk = false;
14
14
  }
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
 
3
- const miss = require('mississippi');
3
+ var miss = require('mississippi');
4
4
 
5
5
  module.exports = () => miss.through.obj((doc, enc, callback) => {
6
6
  if (doc.error && doc.statusCode) {
@@ -1,23 +1,32 @@
1
1
  "use strict";
2
2
 
3
- const getIt = require('get-it');
3
+ function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
4
4
 
5
- const {
6
- keepAlive,
7
- promise
8
- } = require('get-it/middleware');
5
+ function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
9
6
 
10
- const debug = require('./debug');
7
+ function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
11
8
 
12
- const request = getIt([keepAlive(), promise({
9
+ function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
10
+
11
+ function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
12
+
13
+ var getIt = require('get-it');
14
+
15
+ var _require = require('get-it/middleware'),
16
+ keepAlive = _require.keepAlive,
17
+ promise = _require.promise;
18
+
19
+ var debug = require('./debug');
20
+
21
+ var request = getIt([keepAlive(), promise({
13
22
  onlyBody: true
14
23
  })]);
15
- const socketsWithTimeout = new WeakSet();
16
- const CONNECTION_TIMEOUT = 15 * 1000; // 15 seconds
24
+ var socketsWithTimeout = new WeakSet();
25
+ var CONNECTION_TIMEOUT = 15 * 1000; // 15 seconds
17
26
 
18
- const READ_TIMEOUT = 3 * 60 * 1000; // 3 minutes
27
+ var READ_TIMEOUT = 3 * 60 * 1000; // 3 minutes
19
28
 
20
- const MAX_RETRIES = 5;
29
+ var MAX_RETRIES = 5;
21
30
 
22
31
  function delay(ms) {
23
32
  return new Promise(resolve => setTimeout(resolve, ms));
@@ -25,39 +34,51 @@ function delay(ms) {
25
34
  /* eslint-disable no-await-in-loop, max-depth */
26
35
 
27
36
 
28
- module.exports = async options => {
29
- let error;
37
+ module.exports = /*#__PURE__*/function () {
38
+ var _ref = _asyncToGenerator(function* (options) {
39
+ var error;
30
40
 
31
- for (let i = 0; i < MAX_RETRIES; i++) {
32
- try {
33
- const response = await request({ ...options,
34
- stream: true,
35
- maxRedirects: 0,
36
- timeout: {
37
- connect: CONNECTION_TIMEOUT,
38
- socket: READ_TIMEOUT
39
- }
40
- });
41
+ for (var i = 0; i < MAX_RETRIES; i++) {
42
+ try {
43
+ var _ret = yield* function* () {
44
+ var response = yield request(_objectSpread(_objectSpread({}, options), {}, {
45
+ stream: true,
46
+ maxRedirects: 0,
47
+ timeout: {
48
+ connect: CONNECTION_TIMEOUT,
49
+ socket: READ_TIMEOUT
50
+ }
51
+ }));
41
52
 
42
- if (response.connection && typeof response.connection.setTimeout === 'function' && !socketsWithTimeout.has(response.connection)) {
43
- socketsWithTimeout.add(response.connection);
44
- response.connection.setTimeout(READ_TIMEOUT, () => {
45
- response.destroy(new Error("Read timeout: No data received on socket for ".concat(READ_TIMEOUT, " ms")));
46
- });
47
- }
53
+ if (response.connection && typeof response.connection.setTimeout === 'function' && !socketsWithTimeout.has(response.connection)) {
54
+ socketsWithTimeout.add(response.connection);
55
+ response.connection.setTimeout(READ_TIMEOUT, () => {
56
+ response.destroy(new Error("Read timeout: No data received on socket for ".concat(READ_TIMEOUT, " ms")));
57
+ });
58
+ }
48
59
 
49
- return response;
50
- } catch (err) {
51
- error = err;
60
+ return {
61
+ v: response
62
+ };
63
+ }();
52
64
 
53
- if (err.response && err.response.statusCode && err.response.statusCode < 500) {
54
- break;
55
- }
65
+ if (typeof _ret === "object") return _ret.v;
66
+ } catch (err) {
67
+ error = err;
68
+
69
+ if (err.response && err.response.statusCode && err.response.statusCode < 500) {
70
+ break;
71
+ }
56
72
 
57
- debug('Error, retrying after 1500ms: %s', err.message);
58
- await delay(1500);
73
+ debug('Error, retrying after 1500ms: %s', err.message);
74
+ yield delay(1500);
75
+ }
59
76
  }
60
- }
61
77
 
62
- throw error;
63
- };
78
+ throw error;
79
+ });
80
+
81
+ return function (_x) {
82
+ return _ref.apply(this, arguments);
83
+ };
84
+ }();
@@ -1,5 +1,5 @@
1
1
  "use strict";
2
2
 
3
- const miss = require('mississippi');
3
+ var miss = require('mississippi');
4
4
 
5
5
  module.exports = () => miss.through.obj((doc, enc, callback) => callback(null, "".concat(JSON.stringify(doc), "\n")));
@@ -5,16 +5,16 @@ module.exports = line => {
5
5
  return JSON.parse(line);
6
6
  } catch (err) {
7
7
  // Catch half-done lines with an error at the end
8
- const errorPosition = line.lastIndexOf('{"error":');
8
+ var errorPosition = line.lastIndexOf('{"error":');
9
9
 
10
10
  if (errorPosition === -1) {
11
11
  err.message = "".concat(err.message, " (").concat(line, ")");
12
12
  throw err;
13
13
  }
14
14
 
15
- const errorJson = line.slice(errorPosition);
16
- const errorLine = JSON.parse(errorJson);
17
- const error = errorLine && errorLine.error;
15
+ var errorJson = line.slice(errorPosition);
16
+ var errorLine = JSON.parse(errorJson);
17
+ var error = errorLine && errorLine.error;
18
18
 
19
19
  if (error && error.description) {
20
20
  throw new Error("Error streaming dataset: ".concat(error.description, "\n\n").concat(errorJson, "\n"));