@sanity/export 2.28.1 → 2.29.4-purple-unicorn.648

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE CHANGED
@@ -1,6 +1,6 @@
1
1
  MIT License
2
2
 
3
- Copyright (c) 2016 - 2021 Sanity.io
3
+ Copyright (c) 2016 - 2022 Sanity.io
4
4
 
5
5
  Permission is hereby granted, free of charge, to any person obtaining a copy
6
6
  of this software and associated documentation files (the "Software"), to deal
@@ -1,105 +1,64 @@
1
1
  "use strict";
2
2
 
3
- var _excluded = ["asset"];
4
-
5
- function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _unsupportedIterableToArray(arr, i) || _nonIterableRest(); }
6
-
7
- function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
8
-
9
- function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
10
-
11
- function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
12
-
13
- function _iterableToArrayLimit(arr, i) { var _i = arr == null ? null : typeof Symbol !== "undefined" && arr[Symbol.iterator] || arr["@@iterator"]; if (_i == null) return; var _arr = []; var _n = true; var _d = false; var _s, _e; try { for (_i = _i.call(arr); !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
14
-
15
- function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; }
16
-
17
- function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
18
-
19
- function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
20
-
21
- function _objectWithoutProperties(source, excluded) { if (source == null) return {}; var target = _objectWithoutPropertiesLoose(source, excluded); var key, i; if (Object.getOwnPropertySymbols) { var sourceSymbolKeys = Object.getOwnPropertySymbols(source); for (i = 0; i < sourceSymbolKeys.length; i++) { key = sourceSymbolKeys[i]; if (excluded.indexOf(key) >= 0) continue; if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; target[key] = source[key]; } } return target; }
22
-
23
- function _objectWithoutPropertiesLoose(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } return target; }
24
-
25
- function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
26
-
27
- function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
28
-
29
3
  function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
30
4
 
31
- var path = require('path');
5
+ const path = require('path');
32
6
 
33
- var crypto = require('crypto');
7
+ const crypto = require('crypto');
34
8
 
35
- var _require = require('url'),
36
- parseUrl = _require.parse,
37
- formatUrl = _require.format;
9
+ const {
10
+ parse: parseUrl,
11
+ format: formatUrl
12
+ } = require('url');
38
13
 
39
- var fse = require('fs-extra');
14
+ const fse = require('fs-extra');
40
15
 
41
- var miss = require('mississippi');
16
+ const miss = require('mississippi');
42
17
 
43
- var PQueue = require('p-queue');
18
+ const PQueue = require('p-queue');
44
19
 
45
- var _require2 = require('lodash'),
46
- omit = _require2.omit,
47
- noop = _require2.noop;
20
+ const {
21
+ omit,
22
+ noop
23
+ } = require('lodash');
48
24
 
49
- var pkg = require('../package.json');
25
+ const pkg = require('../package.json');
50
26
 
51
- var requestStream = require('./requestStream');
27
+ const requestStream = require('./requestStream');
52
28
 
53
- var debug = require('./debug');
29
+ const debug = require('./debug');
54
30
 
55
- var EXCLUDE_PROPS = ['_id', '_type', 'assetId', 'extension', 'mimeType', 'path', 'url'];
56
- var ACTION_REMOVE = 'remove';
57
- var ACTION_REWRITE = 'rewrite';
58
- var ASSET_DOWNLOAD_CONCURRENCY = 8;
31
+ const EXCLUDE_PROPS = ['_id', '_type', 'assetId', 'extension', 'mimeType', 'path', 'url'];
32
+ const ACTION_REMOVE = 'remove';
33
+ const ACTION_REWRITE = 'rewrite';
34
+ const ASSET_DOWNLOAD_CONCURRENCY = 8;
59
35
 
60
36
  class AssetHandler {
61
37
  constructor(options) {
62
- var _this = this;
63
-
64
- _defineProperty(this, "rewriteAssets", miss.through.obj( /*#__PURE__*/function () {
65
- var _ref = _asyncToGenerator(function* (doc, enc, callback) {
66
- if (['sanity.imageAsset', 'sanity.fileAsset'].includes(doc._type)) {
67
- var type = doc._type === 'sanity.imageAsset' ? 'image' : 'file';
68
- var filePath = "".concat(type, "s/").concat(generateFilename(doc._id));
69
-
70
- _this.assetsSeen.set(doc._id, type);
71
-
72
- _this.queueAssetDownload(doc, filePath, type);
73
-
74
- callback();
75
- return;
76
- }
77
-
78
- callback(null, _this.findAndModify(doc, ACTION_REWRITE));
79
- });
80
-
81
- return function (_x, _x2, _x3) {
82
- return _ref.apply(this, arguments);
83
- };
84
- }()));
38
+ _defineProperty(this, "rewriteAssets", miss.through.obj(async (doc, enc, callback) => {
39
+ if (['sanity.imageAsset', 'sanity.fileAsset'].includes(doc._type)) {
40
+ const type = doc._type === 'sanity.imageAsset' ? 'image' : 'file';
41
+ const filePath = "".concat(type, "s/").concat(generateFilename(doc._id));
42
+ this.assetsSeen.set(doc._id, type);
43
+ this.queueAssetDownload(doc, filePath, type);
44
+ callback();
45
+ return;
46
+ }
85
47
 
86
- _defineProperty(this, "stripAssets", miss.through.obj( /*#__PURE__*/function () {
87
- var _ref2 = _asyncToGenerator(function* (doc, enc, callback) {
88
- if (['sanity.imageAsset', 'sanity.fileAsset'].includes(doc._type)) {
89
- callback();
90
- return;
91
- }
48
+ callback(null, this.findAndModify(doc, ACTION_REWRITE));
49
+ }));
92
50
 
93
- callback(null, _this.findAndModify(doc, ACTION_REMOVE));
94
- });
51
+ _defineProperty(this, "stripAssets", miss.through.obj(async (doc, enc, callback) => {
52
+ if (['sanity.imageAsset', 'sanity.fileAsset'].includes(doc._type)) {
53
+ callback();
54
+ return;
55
+ }
95
56
 
96
- return function (_x4, _x5, _x6) {
97
- return _ref2.apply(this, arguments);
98
- };
99
- }()));
57
+ callback(null, this.findAndModify(doc, ACTION_REMOVE));
58
+ }));
100
59
 
101
60
  _defineProperty(this, "skipAssets", miss.through.obj((doc, enc, callback) => {
102
- var isAsset = ['sanity.imageAsset', 'sanity.fileAsset'].includes(doc._type);
61
+ const isAsset = ['sanity.imageAsset', 'sanity.fileAsset'].includes(doc._type);
103
62
 
104
63
  if (isAsset) {
105
64
  callback();
@@ -113,7 +72,7 @@ class AssetHandler {
113
72
 
114
73
  _defineProperty(this, "findAndModify", (item, action) => {
115
74
  if (Array.isArray(item)) {
116
- var children = item.map(child => this.findAndModify(child, action));
75
+ const children = item.map(child => this.findAndModify(child, action));
117
76
  return children.filter(Boolean);
118
77
  }
119
78
 
@@ -121,30 +80,32 @@ class AssetHandler {
121
80
  return item;
122
81
  }
123
82
 
124
- var isAsset = isAssetField(item);
83
+ const isAsset = isAssetField(item);
125
84
 
126
85
  if (isAsset && action === ACTION_REMOVE) {
127
86
  return undefined;
128
87
  }
129
88
 
130
89
  if (isAsset && action === ACTION_REWRITE) {
131
- var asset = item.asset,
132
- other = _objectWithoutProperties(item, _excluded);
133
-
134
- var assetId = asset._ref;
135
- var assetType = getAssetType(item);
136
- var filePath = "".concat(assetType, "s/").concat(generateFilename(assetId));
137
- return _objectSpread({
138
- _sanityAsset: "".concat(assetType, "@file://./").concat(filePath)
139
- }, this.findAndModify(other, action));
90
+ const {
91
+ asset,
92
+ ...other
93
+ } = item;
94
+ const assetId = asset._ref;
95
+ const assetType = getAssetType(item);
96
+ const filePath = "".concat(assetType, "s/").concat(generateFilename(assetId));
97
+ return {
98
+ _sanityAsset: "".concat(assetType, "@file://./").concat(filePath),
99
+ ...this.findAndModify(other, action)
100
+ };
140
101
  }
141
102
 
142
- var newItem = {};
143
- var keys = Object.keys(item);
103
+ const newItem = {};
104
+ const keys = Object.keys(item);
144
105
 
145
- for (var i = 0; i < keys.length; i++) {
146
- var key = keys[i];
147
- var value = item[key];
106
+ for (let i = 0; i < keys.length; i++) {
107
+ const key = keys[i];
108
+ const value = item[key];
148
109
  newItem[key] = this.findAndModify(value, action);
149
110
 
150
111
  if (typeof newItem[key] === 'undefined') {
@@ -155,7 +116,7 @@ class AssetHandler {
155
116
  return newItem;
156
117
  });
157
118
 
158
- var concurrency = options.concurrency || ASSET_DOWNLOAD_CONCURRENCY;
119
+ const concurrency = options.concurrency || ASSET_DOWNLOAD_CONCURRENCY;
159
120
  debug('Using asset download concurrency of %d', concurrency);
160
121
  this.client = options.client;
161
122
  this.tmpDir = options.tmpDir;
@@ -222,18 +183,18 @@ class AssetHandler {
222
183
  }
223
184
 
224
185
  getAssetRequestOptions(assetDoc) {
225
- var token = this.client.config().token;
226
- var headers = {
186
+ const token = this.client.config().token;
187
+ const headers = {
227
188
  'User-Agent': "".concat(pkg.name, "@").concat(pkg.version)
228
189
  };
229
- var isImage = assetDoc._type === 'sanity.imageAsset';
230
- var url = parseUrl(assetDoc.url, true);
190
+ const isImage = assetDoc._type === 'sanity.imageAsset';
191
+ const url = parseUrl(assetDoc.url, true);
231
192
 
232
193
  if (isImage && ['cdn.sanity.io', 'cdn.sanity.work'].includes(url.hostname)) {
233
194
  headers.Authorization = "Bearer ".concat(token);
234
- url.query = _objectSpread(_objectSpread({}, url.query || {}), {}, {
195
+ url.query = { ...(url.query || {}),
235
196
  dlRaw: 'true'
236
- });
197
+ };
237
198
  }
238
199
 
239
200
  return {
@@ -242,96 +203,81 @@ class AssetHandler {
242
203
  };
243
204
  }
244
205
 
245
- downloadAsset(assetDoc, dstPath) {
246
- var _arguments = arguments,
247
- _this2 = this;
248
-
249
- return _asyncToGenerator(function* () {
250
- var attemptNum = _arguments.length > 2 && _arguments[2] !== undefined ? _arguments[2] : 0;
251
- var url = assetDoc.url;
252
-
253
- var options = _this2.getAssetRequestOptions(assetDoc);
254
-
255
- var stream;
256
-
257
- try {
258
- stream = yield requestStream(options);
259
- } catch (err) {
260
- _this2.reject(err);
261
-
262
- return false;
263
- }
264
-
265
- if (stream.statusCode !== 200) {
266
- _this2.queue.clear();
267
-
268
- var err = yield tryGetErrorFromStream(stream);
269
- var errMsg = "Referenced asset URL \"".concat(url, "\" returned HTTP ").concat(stream.statusCode);
270
-
271
- if (err) {
272
- errMsg = "".concat(errMsg, ":\n\n").concat(err);
273
- }
274
-
275
- _this2.reject(new Error(errMsg));
276
-
277
- return false;
278
- }
279
-
280
- _this2.maybeCreateAssetDirs();
281
-
282
- debug('Asset stream ready, writing to filesystem at %s', dstPath);
283
- var tmpPath = path.join(_this2.tmpDir, dstPath);
284
-
285
- var _yield$writeHashedStr = yield writeHashedStream(tmpPath, stream),
286
- sha1 = _yield$writeHashedStr.sha1,
287
- md5 = _yield$writeHashedStr.md5,
288
- size = _yield$writeHashedStr.size; // Verify it against our downloaded stream to make sure we have the same copy
289
-
206
+ async downloadAsset(assetDoc, dstPath) {
207
+ let attemptNum = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
208
+ const {
209
+ url
210
+ } = assetDoc;
211
+ const options = this.getAssetRequestOptions(assetDoc);
212
+ let stream;
213
+
214
+ try {
215
+ stream = await requestStream(options);
216
+ } catch (err) {
217
+ this.reject(err);
218
+ return false;
219
+ }
290
220
 
291
- var contentLength = stream.headers['content-length'];
292
- var remoteSha1 = stream.headers['x-sanity-sha1'];
293
- var remoteMd5 = stream.headers['x-sanity-md5'];
294
- var hasHash = Boolean(remoteSha1 || remoteMd5);
295
- var method = md5 ? 'md5' : 'sha1';
296
- var differs = false;
221
+ if (stream.statusCode !== 200) {
222
+ this.queue.clear();
223
+ const err = await tryGetErrorFromStream(stream);
224
+ let errMsg = "Referenced asset URL \"".concat(url, "\" returned HTTP ").concat(stream.statusCode);
297
225
 
298
- if (remoteMd5 && md5) {
299
- differs = remoteMd5 !== md5;
300
- } else if (remoteSha1 && sha1) {
301
- differs = remoteSha1 !== sha1;
226
+ if (err) {
227
+ errMsg = "".concat(errMsg, ":\n\n").concat(err);
302
228
  }
303
229
 
304
- if (differs && attemptNum < 3) {
305
- debug('%s does not match downloaded asset, retrying (#%d) [%s]', method, attemptNum + 1, url);
306
- return _this2.downloadAsset(assetDoc, dstPath, attemptNum + 1);
307
- } else if (differs) {
308
- var details = [hasHash && (method === 'md5' ? "md5 should be ".concat(remoteMd5, ", got ").concat(md5) : "sha1 should be ".concat(remoteSha1, ", got ").concat(sha1)), contentLength && parseInt(contentLength, 10) !== size && "Asset should be ".concat(contentLength, " bytes, got ").concat(size), "Did not succeed after ".concat(attemptNum, " attempts.")];
309
- var detailsString = "Details:\n - ".concat(details.filter(Boolean).join('\n - '));
310
- yield fse.unlink(tmpPath);
311
-
312
- _this2.queue.clear();
313
-
314
- var error = new Error("Failed to download asset at ".concat(assetDoc.url, ", giving up. ").concat(detailsString));
315
-
316
- _this2.reject(error);
230
+ this.reject(new Error(errMsg));
231
+ return false;
232
+ }
317
233
 
318
- return false;
319
- }
234
+ this.maybeCreateAssetDirs();
235
+ debug('Asset stream ready, writing to filesystem at %s', dstPath);
236
+ const tmpPath = path.join(this.tmpDir, dstPath);
237
+ const {
238
+ sha1,
239
+ md5,
240
+ size
241
+ } = await writeHashedStream(tmpPath, stream); // Verify it against our downloaded stream to make sure we have the same copy
242
+
243
+ const contentLength = stream.headers['content-length'];
244
+ const remoteSha1 = stream.headers['x-sanity-sha1'];
245
+ const remoteMd5 = stream.headers['x-sanity-md5'];
246
+ const hasHash = Boolean(remoteSha1 || remoteMd5);
247
+ const method = md5 ? 'md5' : 'sha1';
248
+ let differs = false;
249
+
250
+ if (remoteMd5 && md5) {
251
+ differs = remoteMd5 !== md5;
252
+ } else if (remoteSha1 && sha1) {
253
+ differs = remoteSha1 !== sha1;
254
+ }
320
255
 
321
- var isImage = assetDoc._type === 'sanity.imageAsset';
322
- var type = isImage ? 'image' : 'file';
323
- var id = "".concat(type, "-").concat(sha1);
324
- var metaProps = omit(assetDoc, EXCLUDE_PROPS);
256
+ if (differs && attemptNum < 3) {
257
+ debug('%s does not match downloaded asset, retrying (#%d) [%s]', method, attemptNum + 1, url);
258
+ return this.downloadAsset(assetDoc, dstPath, attemptNum + 1);
259
+ } else if (differs) {
260
+ const details = [hasHash && (method === 'md5' ? "md5 should be ".concat(remoteMd5, ", got ").concat(md5) : "sha1 should be ".concat(remoteSha1, ", got ").concat(sha1)), contentLength && parseInt(contentLength, 10) !== size && "Asset should be ".concat(contentLength, " bytes, got ").concat(size), "Did not succeed after ".concat(attemptNum, " attempts.")];
261
+ const detailsString = "Details:\n - ".concat(details.filter(Boolean).join('\n - '));
262
+ await fse.unlink(tmpPath);
263
+ this.queue.clear();
264
+ const error = new Error("Failed to download asset at ".concat(assetDoc.url, ", giving up. ").concat(detailsString));
265
+ this.reject(error);
266
+ return false;
267
+ }
325
268
 
326
- if (Object.keys(metaProps).length > 0) {
327
- _this2.assetMap[id] = metaProps;
328
- }
269
+ const isImage = assetDoc._type === 'sanity.imageAsset';
270
+ const type = isImage ? 'image' : 'file';
271
+ const id = "".concat(type, "-").concat(sha1);
272
+ const metaProps = omit(assetDoc, EXCLUDE_PROPS);
329
273
 
330
- _this2.downloading.splice(_this2.downloading.findIndex(datUrl => datUrl === url), 1);
274
+ if (Object.keys(metaProps).length > 0) {
275
+ this.assetMap[id] = metaProps;
276
+ }
331
277
 
332
- _this2.filesWritten++;
333
- return true;
334
- })();
278
+ this.downloading.splice(this.downloading.findIndex(datUrl => datUrl === url), 1);
279
+ this.filesWritten++;
280
+ return true;
335
281
  }
336
282
 
337
283
  }
@@ -345,10 +291,7 @@ function getAssetType(item) {
345
291
  return null;
346
292
  }
347
293
 
348
- var _ref3 = item.asset._ref.match(/^(image|file)-/) || [],
349
- _ref4 = _slicedToArray(_ref3, 2),
350
- type = _ref4[1];
351
-
294
+ const [, type] = item.asset._ref.match(/^(image|file)-/) || [];
352
295
  return type || null;
353
296
  }
354
297
 
@@ -357,20 +300,16 @@ function isSanityAsset(assetId) {
357
300
  }
358
301
 
359
302
  function generateFilename(assetId) {
360
- var _ref5 = assetId.match(/^(image|file)-(.*?)(-[a-z]+)?$/) || [],
361
- _ref6 = _slicedToArray(_ref5, 4),
362
- asset = _ref6[2],
363
- ext = _ref6[3];
364
-
365
- var extension = (ext || 'bin').replace(/^-/, '');
303
+ const [,, asset, ext] = assetId.match(/^(image|file)-(.*?)(-[a-z]+)?$/) || [];
304
+ const extension = (ext || 'bin').replace(/^-/, '');
366
305
  return asset ? "".concat(asset, ".").concat(extension) : "".concat(assetId, ".bin");
367
306
  }
368
307
 
369
308
  function writeHashedStream(filePath, stream) {
370
- var size = 0;
371
- var md5 = crypto.createHash('md5');
372
- var sha1 = crypto.createHash('sha1');
373
- var hasher = miss.through((chunk, enc, cb) => {
309
+ let size = 0;
310
+ const md5 = crypto.createHash('md5');
311
+ const sha1 = crypto.createHash('sha1');
312
+ const hasher = miss.through((chunk, enc, cb) => {
374
313
  size += chunk.length;
375
314
  md5.update(chunk);
376
315
  sha1.update(chunk);
@@ -396,7 +335,7 @@ function tryGetErrorFromStream(stream) {
396
335
 
397
336
  function parse(body) {
398
337
  try {
399
- var parsed = JSON.parse(body.toString('utf8'));
338
+ const parsed = JSON.parse(body.toString('utf8'));
400
339
  resolve(parsed.message || parsed.error || null);
401
340
  } catch (err) {
402
341
  resolve(body.toString('utf8').slice(0, 16000));
package/lib/export.js CHANGED
@@ -1,65 +1,61 @@
1
1
  "use strict";
2
2
 
3
- function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
3
+ const os = require('os');
4
4
 
5
- function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
5
+ const path = require('path');
6
6
 
7
- var os = require('os');
7
+ const zlib = require('zlib');
8
8
 
9
- var path = require('path');
9
+ const fse = require('fs-extra');
10
10
 
11
- var zlib = require('zlib');
11
+ const miss = require('mississippi');
12
12
 
13
- var fse = require('fs-extra');
13
+ const split = require('split2');
14
14
 
15
- var miss = require('mississippi');
15
+ const archiver = require('archiver');
16
16
 
17
- var split = require('split2');
17
+ const debug = require('./debug');
18
18
 
19
- var archiver = require('archiver');
19
+ const AssetHandler = require('./AssetHandler');
20
20
 
21
- var debug = require('./debug');
21
+ const stringifyStream = require('./stringifyStream');
22
22
 
23
- var AssetHandler = require('./AssetHandler');
23
+ const validateOptions = require('./validateOptions');
24
24
 
25
- var stringifyStream = require('./stringifyStream');
25
+ const rejectOnApiError = require('./rejectOnApiError');
26
26
 
27
- var validateOptions = require('./validateOptions');
27
+ const getDocumentsStream = require('./getDocumentsStream');
28
28
 
29
- var rejectOnApiError = require('./rejectOnApiError');
29
+ const filterSystemDocuments = require('./filterSystemDocuments');
30
30
 
31
- var getDocumentsStream = require('./getDocumentsStream');
31
+ const filterDocumentTypes = require('./filterDocumentTypes');
32
32
 
33
- var filterSystemDocuments = require('./filterSystemDocuments');
33
+ const filterDrafts = require('./filterDrafts');
34
34
 
35
- var filterDocumentTypes = require('./filterDocumentTypes');
35
+ const logFirstChunk = require('./logFirstChunk');
36
36
 
37
- var filterDrafts = require('./filterDrafts');
37
+ const tryParseJson = require('./tryParseJson');
38
38
 
39
- var logFirstChunk = require('./logFirstChunk');
40
-
41
- var tryParseJson = require('./tryParseJson');
42
-
43
- var noop = () => null;
39
+ const noop = () => null;
44
40
 
45
41
  function exportDataset(opts) {
46
- var options = validateOptions(opts);
47
- var onProgress = options.onProgress || noop;
48
- var archive = archiver('tar', {
42
+ const options = validateOptions(opts);
43
+ const onProgress = options.onProgress || noop;
44
+ const archive = archiver('tar', {
49
45
  gzip: true,
50
46
  gzipOptions: {
51
47
  level: options.compress ? zlib.Z_DEFAULT_COMPRESSION : zlib.Z_NO_COMPRESSION
52
48
  }
53
49
  });
54
- var slugDate = new Date().toISOString().replace(/[^a-z0-9]/gi, '-').toLowerCase();
55
- var prefix = "".concat(opts.dataset, "-export-").concat(slugDate);
56
- var tmpDir = path.join(os.tmpdir(), prefix);
50
+ const slugDate = new Date().toISOString().replace(/[^a-z0-9]/gi, '-').toLowerCase();
51
+ const prefix = "".concat(opts.dataset, "-export-").concat(slugDate);
52
+ const tmpDir = path.join(os.tmpdir(), prefix);
57
53
 
58
- var cleanup = () => fse.remove(tmpDir).catch(err => {
54
+ const cleanup = () => fse.remove(tmpDir).catch(err => {
59
55
  debug("Error while cleaning up temporary files: ".concat(err.message));
60
56
  });
61
57
 
62
- var assetHandler = new AssetHandler({
58
+ const assetHandler = new AssetHandler({
63
59
  client: options.client,
64
60
  tmpDir,
65
61
  prefix,
@@ -67,7 +63,7 @@ function exportDataset(opts) {
67
63
  });
68
64
  debug('Outputting assets (temporarily) to %s', tmpDir);
69
65
  debug('Outputting to %s', options.outputPath === '-' ? 'stdout' : options.outputPath);
70
- var outputStream;
66
+ let outputStream;
71
67
 
72
68
  if (isWritableStream(options.outputPath)) {
73
69
  outputStream = options.outputPath;
@@ -75,171 +71,146 @@ function exportDataset(opts) {
75
71
  outputStream = options.outputPath === '-' ? process.stdout : fse.createWriteStream(options.outputPath);
76
72
  }
77
73
 
78
- var assetStreamHandler = assetHandler.noop;
74
+ let assetStreamHandler = assetHandler.noop;
79
75
 
80
76
  if (!options.raw) {
81
77
  assetStreamHandler = options.assets ? assetHandler.rewriteAssets : assetHandler.stripAssets;
82
78
  }
83
79
 
84
- return new Promise( /*#__PURE__*/function () {
85
- var _ref = _asyncToGenerator(function* (resolve, reject) {
86
- miss.finished(archive, /*#__PURE__*/function () {
87
- var _ref2 = _asyncToGenerator(function* (archiveErr) {
88
- if (archiveErr) {
89
- debug('Archiving errored! %s', archiveErr.stack);
90
- yield cleanup();
91
- reject(archiveErr);
92
- return;
93
- }
94
-
95
- debug('Archive finished!');
80
+ return new Promise(async (resolve, reject) => {
81
+ miss.finished(archive, async archiveErr => {
82
+ if (archiveErr) {
83
+ debug('Archiving errored! %s', archiveErr.stack);
84
+ await cleanup();
85
+ reject(archiveErr);
86
+ return;
87
+ }
88
+
89
+ debug('Archive finished!');
90
+ });
91
+ debug('Getting dataset export stream');
92
+ onProgress({
93
+ step: 'Exporting documents...'
94
+ });
95
+ let documentCount = 0;
96
+ let lastReported = Date.now();
97
+
98
+ const reportDocumentCount = (chunk, enc, cb) => {
99
+ ++documentCount;
100
+ const now = Date.now();
101
+
102
+ if (now - lastReported > 50) {
103
+ onProgress({
104
+ step: 'Exporting documents...',
105
+ current: documentCount,
106
+ total: '?',
107
+ update: true
96
108
  });
109
+ lastReported = now;
110
+ }
111
+
112
+ cb(null, chunk);
113
+ };
114
+
115
+ const inputStream = await getDocumentsStream(options.client, options.dataset);
116
+ debug('Got HTTP %d', inputStream.statusCode);
117
+ debug('Response headers: %o', inputStream.headers);
118
+ const jsonStream = miss.pipeline(inputStream, logFirstChunk(), split(tryParseJson), rejectOnApiError(), filterSystemDocuments(), assetStreamHandler, filterDocumentTypes(options.types), options.drafts ? miss.through.obj() : filterDrafts(), stringifyStream(), miss.through(reportDocumentCount));
119
+ miss.finished(jsonStream, async err => {
120
+ if (err) {
121
+ return;
122
+ }
97
123
 
98
- return function (_x3) {
99
- return _ref2.apply(this, arguments);
100
- };
101
- }());
102
- debug('Getting dataset export stream');
103
124
  onProgress({
104
- step: 'Exporting documents...'
125
+ step: 'Exporting documents...',
126
+ current: documentCount,
127
+ total: documentCount,
128
+ update: true
105
129
  });
106
- var documentCount = 0;
107
- var lastReported = Date.now();
108
-
109
- var reportDocumentCount = (chunk, enc, cb) => {
110
- ++documentCount;
111
- var now = Date.now();
112
-
113
- if (now - lastReported > 50) {
114
- onProgress({
115
- step: 'Exporting documents...',
116
- current: documentCount,
117
- total: '?',
118
- update: true
119
- });
120
- lastReported = now;
130
+
131
+ if (!options.raw && options.assets) {
132
+ onProgress({
133
+ step: 'Downloading assets...'
134
+ });
135
+ }
136
+
137
+ let prevCompleted = 0;
138
+ const progressInterval = setInterval(() => {
139
+ const completed = assetHandler.queueSize - assetHandler.queue.size - assetHandler.queue.pending;
140
+
141
+ if (prevCompleted === completed) {
142
+ return;
121
143
  }
122
144
 
123
- cb(null, chunk);
124
- };
125
-
126
- var inputStream = yield getDocumentsStream(options.client, options.dataset);
127
- debug('Got HTTP %d', inputStream.statusCode);
128
- debug('Response headers: %o', inputStream.headers);
129
- var jsonStream = miss.pipeline(inputStream, logFirstChunk(), split(tryParseJson), rejectOnApiError(), filterSystemDocuments(), assetStreamHandler, filterDocumentTypes(options.types), options.drafts ? miss.through.obj() : filterDrafts(), stringifyStream(), miss.through(reportDocumentCount));
130
- miss.finished(jsonStream, /*#__PURE__*/function () {
131
- var _ref3 = _asyncToGenerator(function* (err) {
132
- if (err) {
133
- return;
134
- }
135
-
136
- onProgress({
137
- step: 'Exporting documents...',
138
- current: documentCount,
139
- total: documentCount,
140
- update: true
141
- });
142
-
143
- if (!options.raw && options.assets) {
144
- onProgress({
145
- step: 'Downloading assets...'
146
- });
147
- }
148
-
149
- var prevCompleted = 0;
150
- var progressInterval = setInterval(() => {
151
- var completed = assetHandler.queueSize - assetHandler.queue.size - assetHandler.queue.pending;
152
-
153
- if (prevCompleted === completed) {
154
- return;
155
- }
156
-
157
- prevCompleted = completed;
158
- onProgress({
159
- step: 'Downloading assets...',
160
- current: completed,
161
- total: assetHandler.queueSize,
162
- update: true
163
- });
164
- }, 500);
165
- debug('Waiting for asset handler to complete downloads');
166
-
167
- try {
168
- var assetMap = yield assetHandler.finish(); // Make sure we mark the progress as done (eg 100/100 instead of 99/100)
169
-
170
- onProgress({
171
- step: 'Downloading assets...',
172
- current: assetHandler.queueSize,
173
- total: assetHandler.queueSize,
174
- update: true
175
- });
176
- archive.append(JSON.stringify(assetMap), {
177
- name: 'assets.json',
178
- prefix
179
- });
180
- clearInterval(progressInterval);
181
- } catch (assetErr) {
182
- clearInterval(progressInterval);
183
- yield cleanup();
184
- reject(assetErr);
185
- return;
186
- } // Add all downloaded assets to archive
187
-
188
-
189
- archive.directory(path.join(tmpDir, 'files'), "".concat(prefix, "/files"), {
190
- store: true
191
- });
192
- archive.directory(path.join(tmpDir, 'images'), "".concat(prefix, "/images"), {
193
- store: true
194
- });
195
- debug('Finalizing archive, flushing streams');
196
- onProgress({
197
- step: 'Adding assets to archive...'
198
- });
199
- archive.finalize();
145
+ prevCompleted = completed;
146
+ onProgress({
147
+ step: 'Downloading assets...',
148
+ current: completed,
149
+ total: assetHandler.queueSize,
150
+ update: true
151
+ });
152
+ }, 500);
153
+ debug('Waiting for asset handler to complete downloads');
154
+
155
+ try {
156
+ const assetMap = await assetHandler.finish(); // Make sure we mark the progress as done (eg 100/100 instead of 99/100)
157
+
158
+ onProgress({
159
+ step: 'Downloading assets...',
160
+ current: assetHandler.queueSize,
161
+ total: assetHandler.queueSize,
162
+ update: true
163
+ });
164
+ archive.append(JSON.stringify(assetMap), {
165
+ name: 'assets.json',
166
+ prefix
200
167
  });
168
+ clearInterval(progressInterval);
169
+ } catch (assetErr) {
170
+ clearInterval(progressInterval);
171
+ await cleanup();
172
+ reject(assetErr);
173
+ return;
174
+ } // Add all downloaded assets to archive
201
175
 
202
- return function (_x4) {
203
- return _ref3.apply(this, arguments);
204
- };
205
- }());
206
- archive.on('warning', err => {
207
- debug('Archive warning: %s', err.message);
176
+
177
+ archive.directory(path.join(tmpDir, 'files'), "".concat(prefix, "/files"), {
178
+ store: true
208
179
  });
209
- archive.append(jsonStream, {
210
- name: 'data.ndjson',
211
- prefix
180
+ archive.directory(path.join(tmpDir, 'images'), "".concat(prefix, "/images"), {
181
+ store: true
212
182
  });
213
- miss.pipe(archive, outputStream, onComplete);
183
+ debug('Finalizing archive, flushing streams');
184
+ onProgress({
185
+ step: 'Adding assets to archive...'
186
+ });
187
+ archive.finalize();
188
+ });
189
+ archive.on('warning', err => {
190
+ debug('Archive warning: %s', err.message);
191
+ });
192
+ archive.append(jsonStream, {
193
+ name: 'data.ndjson',
194
+ prefix
195
+ });
196
+ miss.pipe(archive, outputStream, onComplete);
214
197
 
215
- function onComplete(_x5) {
216
- return _onComplete.apply(this, arguments);
217
- }
198
+ async function onComplete(err) {
199
+ onProgress({
200
+ step: 'Clearing temporary files...'
201
+ });
202
+ await cleanup();
218
203
 
219
- function _onComplete() {
220
- _onComplete = _asyncToGenerator(function* (err) {
221
- onProgress({
222
- step: 'Clearing temporary files...'
223
- });
224
- yield cleanup();
225
-
226
- if (!err) {
227
- resolve();
228
- return;
229
- }
230
-
231
- debug('Error during streaming: %s', err.stack);
232
- assetHandler.clear();
233
- reject(err);
234
- });
235
- return _onComplete.apply(this, arguments);
204
+ if (!err) {
205
+ resolve();
206
+ return;
236
207
  }
237
- });
238
208
 
239
- return function (_x, _x2) {
240
- return _ref.apply(this, arguments);
241
- };
242
- }());
209
+ debug('Error during streaming: %s', err.stack);
210
+ assetHandler.clear();
211
+ reject(err);
212
+ }
213
+ });
243
214
  }
244
215
 
245
216
  function isWritableStream(val) {
@@ -1,9 +1,9 @@
1
1
  "use strict";
2
2
 
3
- var miss = require('mississippi');
3
+ const miss = require('mississippi');
4
4
 
5
5
  module.exports = allowedTypes => allowedTypes ? miss.through.obj((doc, enc, callback) => {
6
- var type = doc && doc._type;
6
+ const type = doc && doc._type;
7
7
 
8
8
  if (allowedTypes.includes(type)) {
9
9
  callback(null, doc);
@@ -1,8 +1,8 @@
1
1
  "use strict";
2
2
 
3
- var miss = require('mississippi');
3
+ const miss = require('mississippi');
4
4
 
5
- var isDraft = doc => doc && doc._id && doc._id.indexOf('drafts.') === 0;
5
+ const isDraft = doc => doc && doc._id && doc._id.indexOf('drafts.') === 0;
6
6
 
7
7
  module.exports = () => miss.through.obj((doc, enc, callback) => {
8
8
  if (isDraft(doc)) {
@@ -1,10 +1,10 @@
1
1
  "use strict";
2
2
 
3
- var miss = require('mississippi');
3
+ const miss = require('mississippi');
4
4
 
5
- var debug = require('./debug');
5
+ const debug = require('./debug');
6
6
 
7
- var isSystemDocument = doc => doc && doc._id && doc._id.indexOf('_.') === 0;
7
+ const isSystemDocument = doc => doc && doc._id && doc._id.indexOf('_.') === 0;
8
8
 
9
9
  module.exports = () => miss.through.obj((doc, enc, callback) => {
10
10
  if (isSystemDocument(doc)) {
@@ -1,27 +1,20 @@
1
1
  "use strict";
2
2
 
3
- function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
3
+ const pkg = require('../package.json');
4
4
 
5
- function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
6
-
7
- function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
8
-
9
- var pkg = require('../package.json');
10
-
11
- var requestStream = require('./requestStream');
5
+ const requestStream = require('./requestStream');
12
6
 
13
7
  module.exports = (client, dataset) => {
14
8
  // Sanity client doesn't handle streams natively since we want to support node/browser
15
9
  // with same API. We're just using it here to get hold of URLs and tokens.
16
- var url = client.getUrl("/data/export/".concat(dataset));
17
- var token = client.config().token;
18
-
19
- var headers = _objectSpread({
20
- 'User-Agent': "".concat(pkg.name, "@").concat(pkg.version)
21
- }, token ? {
22
- Authorization: "Bearer ".concat(token)
23
- } : {});
24
-
10
+ const url = client.getUrl("/data/export/".concat(dataset));
11
+ const token = client.config().token;
12
+ const headers = {
13
+ 'User-Agent': "".concat(pkg.name, "@").concat(pkg.version),
14
+ ...(token ? {
15
+ Authorization: "Bearer ".concat(token)
16
+ } : {})
17
+ };
25
18
  return requestStream({
26
19
  url,
27
20
  headers
@@ -1,14 +1,14 @@
1
1
  "use strict";
2
2
 
3
- var miss = require('mississippi');
3
+ const miss = require('mississippi');
4
4
 
5
- var debug = require('./debug');
5
+ const debug = require('./debug');
6
6
 
7
7
  module.exports = () => {
8
- var firstChunk = true;
8
+ let firstChunk = true;
9
9
  return miss.through((chunk, enc, callback) => {
10
10
  if (firstChunk) {
11
- var string = chunk.toString('utf8').split('\n')[0];
11
+ const string = chunk.toString('utf8').split('\n')[0];
12
12
  debug('First chunk received: %s', string.slice(0, 300));
13
13
  firstChunk = false;
14
14
  }
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
 
3
- var miss = require('mississippi');
3
+ const miss = require('mississippi');
4
4
 
5
5
  module.exports = () => miss.through.obj((doc, enc, callback) => {
6
6
  if (doc.error && doc.statusCode) {
@@ -1,32 +1,23 @@
1
1
  "use strict";
2
2
 
3
- function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
3
+ const getIt = require('get-it');
4
4
 
5
- function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
5
+ const {
6
+ keepAlive,
7
+ promise
8
+ } = require('get-it/middleware');
6
9
 
7
- function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
10
+ const debug = require('./debug');
8
11
 
9
- function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
10
-
11
- function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
12
-
13
- var getIt = require('get-it');
14
-
15
- var _require = require('get-it/middleware'),
16
- keepAlive = _require.keepAlive,
17
- promise = _require.promise;
18
-
19
- var debug = require('./debug');
20
-
21
- var request = getIt([keepAlive(), promise({
12
+ const request = getIt([keepAlive(), promise({
22
13
  onlyBody: true
23
14
  })]);
24
- var socketsWithTimeout = new WeakSet();
25
- var CONNECTION_TIMEOUT = 15 * 1000; // 15 seconds
15
+ const socketsWithTimeout = new WeakSet();
16
+ const CONNECTION_TIMEOUT = 15 * 1000; // 15 seconds
26
17
 
27
- var READ_TIMEOUT = 3 * 60 * 1000; // 3 minutes
18
+ const READ_TIMEOUT = 3 * 60 * 1000; // 3 minutes
28
19
 
29
- var MAX_RETRIES = 5;
20
+ const MAX_RETRIES = 5;
30
21
 
31
22
  function delay(ms) {
32
23
  return new Promise(resolve => setTimeout(resolve, ms));
@@ -34,51 +25,39 @@ function delay(ms) {
34
25
  /* eslint-disable no-await-in-loop, max-depth */
35
26
 
36
27
 
37
- module.exports = /*#__PURE__*/function () {
38
- var _ref = _asyncToGenerator(function* (options) {
39
- var error;
40
-
41
- for (var i = 0; i < MAX_RETRIES; i++) {
42
- try {
43
- var _ret = yield* function* () {
44
- var response = yield request(_objectSpread(_objectSpread({}, options), {}, {
45
- stream: true,
46
- maxRedirects: 0,
47
- timeout: {
48
- connect: CONNECTION_TIMEOUT,
49
- socket: READ_TIMEOUT
50
- }
51
- }));
28
+ module.exports = async options => {
29
+ let error;
52
30
 
53
- if (response.connection && typeof response.connection.setTimeout === 'function' && !socketsWithTimeout.has(response.connection)) {
54
- socketsWithTimeout.add(response.connection);
55
- response.connection.setTimeout(READ_TIMEOUT, () => {
56
- response.destroy(new Error("Read timeout: No data received on socket for ".concat(READ_TIMEOUT, " ms")));
57
- });
58
- }
59
-
60
- return {
61
- v: response
62
- };
63
- }();
31
+ for (let i = 0; i < MAX_RETRIES; i++) {
32
+ try {
33
+ const response = await request({ ...options,
34
+ stream: true,
35
+ maxRedirects: 0,
36
+ timeout: {
37
+ connect: CONNECTION_TIMEOUT,
38
+ socket: READ_TIMEOUT
39
+ }
40
+ });
64
41
 
65
- if (typeof _ret === "object") return _ret.v;
66
- } catch (err) {
67
- error = err;
42
+ if (response.connection && typeof response.connection.setTimeout === 'function' && !socketsWithTimeout.has(response.connection)) {
43
+ socketsWithTimeout.add(response.connection);
44
+ response.connection.setTimeout(READ_TIMEOUT, () => {
45
+ response.destroy(new Error("Read timeout: No data received on socket for ".concat(READ_TIMEOUT, " ms")));
46
+ });
47
+ }
68
48
 
69
- if (err.response && err.response.statusCode && err.response.statusCode < 500) {
70
- break;
71
- }
49
+ return response;
50
+ } catch (err) {
51
+ error = err;
72
52
 
73
- debug('Error, retrying after 1500ms: %s', err.message);
74
- yield delay(1500);
53
+ if (err.response && err.response.statusCode && err.response.statusCode < 500) {
54
+ break;
75
55
  }
76
- }
77
56
 
78
- throw error;
79
- });
57
+ debug('Error, retrying after 1500ms: %s', err.message);
58
+ await delay(1500);
59
+ }
60
+ }
80
61
 
81
- return function (_x) {
82
- return _ref.apply(this, arguments);
83
- };
84
- }();
62
+ throw error;
63
+ };
@@ -1,5 +1,5 @@
1
1
  "use strict";
2
2
 
3
- var miss = require('mississippi');
3
+ const miss = require('mississippi');
4
4
 
5
5
  module.exports = () => miss.through.obj((doc, enc, callback) => callback(null, "".concat(JSON.stringify(doc), "\n")));
@@ -5,16 +5,16 @@ module.exports = line => {
5
5
  return JSON.parse(line);
6
6
  } catch (err) {
7
7
  // Catch half-done lines with an error at the end
8
- var errorPosition = line.lastIndexOf('{"error":');
8
+ const errorPosition = line.lastIndexOf('{"error":');
9
9
 
10
10
  if (errorPosition === -1) {
11
11
  err.message = "".concat(err.message, " (").concat(line, ")");
12
12
  throw err;
13
13
  }
14
14
 
15
- var errorJson = line.slice(errorPosition);
16
- var errorLine = JSON.parse(errorJson);
17
- var error = errorLine && errorLine.error;
15
+ const errorJson = line.slice(errorPosition);
16
+ const errorLine = JSON.parse(errorJson);
17
+ const error = errorLine && errorLine.error;
18
18
 
19
19
  if (error && error.description) {
20
20
  throw new Error("Error streaming dataset: ".concat(error.description, "\n\n").concat(errorJson, "\n"));
@@ -1,10 +1,10 @@
1
1
  "use strict";
2
2
 
3
- var defaults = require('lodash/defaults');
3
+ const defaults = require('lodash/defaults');
4
4
 
5
- var clientMethods = ['getUrl', 'config'];
6
- var booleanFlags = ['assets', 'raw', 'compress', 'drafts'];
7
- var exportDefaults = {
5
+ const clientMethods = ['getUrl', 'config'];
6
+ const booleanFlags = ['assets', 'raw', 'compress', 'drafts'];
7
+ const exportDefaults = {
8
8
  compress: true,
9
9
  drafts: true,
10
10
  assets: true,
@@ -12,7 +12,7 @@ var exportDefaults = {
12
12
  };
13
13
 
14
14
  function validateOptions(opts) {
15
- var options = defaults({}, opts, exportDefaults);
15
+ const options = defaults({}, opts, exportDefaults);
16
16
 
17
17
  if (typeof options.dataset !== 'string' || options.dataset.length < 1) {
18
18
  throw new Error("options.dataset must be a valid dataset name");
@@ -26,13 +26,13 @@ function validateOptions(opts) {
26
26
  throw new Error('`options.client` must be set to an instance of @sanity/client');
27
27
  }
28
28
 
29
- var missing = clientMethods.find(key => typeof options.client[key] !== 'function');
29
+ const missing = clientMethods.find(key => typeof options.client[key] !== 'function');
30
30
 
31
31
  if (missing) {
32
32
  throw new Error("`options.client` is not a valid @sanity/client instance - no \"".concat(missing, "\" method found"));
33
33
  }
34
34
 
35
- var clientConfig = options.client.config();
35
+ const clientConfig = options.client.config();
36
36
 
37
37
  if (!clientConfig.token) {
38
38
  throw new Error('Client is not instantiated with a `token`');
package/package.json CHANGED
@@ -1,18 +1,19 @@
1
1
  {
2
2
  "name": "@sanity/export",
3
- "version": "2.28.1",
3
+ "version": "2.29.4-purple-unicorn.648+c8cbef48aa",
4
4
  "description": "Export Sanity documents and assets",
5
- "main": "lib/export.js",
5
+ "main": "./lib/export.js",
6
+ "types": "./lib/dts/src/export.d.ts",
6
7
  "engines": {
7
8
  "node": ">=12.0.0"
8
9
  },
9
10
  "author": "Sanity.io <hello@sanity.io>",
10
11
  "license": "MIT",
11
12
  "scripts": {
12
- "build": "babel src --copy-files --out-dir lib",
13
- "clean": "rimraf lib dest",
14
- "prebuild": "npm run clean",
15
- "test": "jest"
13
+ "build": "../../../bin/pkg-utils transpile --target node",
14
+ "clean": "rimraf lib",
15
+ "test": "jest",
16
+ "watch": "../../../bin/pkg-utils transpile --target node --watch"
16
17
  },
17
18
  "keywords": [
18
19
  "sanity",
@@ -28,13 +29,13 @@
28
29
  "debug": "^3.2.7",
29
30
  "fs-extra": "^7.0.0",
30
31
  "get-it": "^5.2.1",
31
- "lodash": "^4.17.15",
32
+ "lodash": "^4.17.21",
32
33
  "mississippi": "^4.0.0",
33
34
  "p-queue": "^2.3.0",
34
35
  "split2": "^3.2.2"
35
36
  },
36
37
  "devDependencies": {
37
- "rimraf": "^2.7.1",
38
+ "rimraf": "^3.0.2",
38
39
  "string-to-stream": "^1.1.0"
39
40
  },
40
41
  "publishConfig": {
@@ -49,5 +50,5 @@
49
50
  "url": "https://github.com/sanity-io/sanity/issues"
50
51
  },
51
52
  "homepage": "https://www.sanity.io/",
52
- "gitHead": "b91a9b3c23ab70f861e3bfd5e13d50d49b6c3cb2"
53
+ "gitHead": "c8cbef48aaef4d7d1471d74f2f8146631e22aa0a"
53
54
  }
package/jest.config.js DELETED
@@ -1,6 +0,0 @@
1
- const createConfig = require('../../../createJestConfig')
2
-
3
- module.exports = createConfig({
4
- displayName: require('./package.json').name,
5
- testEnvironment: 'node',
6
- })