@sanity/export 4.0.0 → 5.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -11,7 +11,7 @@ npm install --save @sanity/export
11
11
  ## Usage
12
12
 
13
13
  ```js
14
- const exportDataset = require('@sanity/export')
14
+ import {exportDataset} from '@sanity/export'
15
15
 
16
16
  exportDataset({
17
17
  // Instance of @sanity/client configured to correct project ID and dataset
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@sanity/export",
3
- "version": "4.0.0",
3
+ "version": "5.0.0",
4
4
  "description": "Export Sanity documents and assets",
5
5
  "keywords": [
6
6
  "sanity",
@@ -21,7 +21,7 @@
21
21
  },
22
22
  "license": "MIT",
23
23
  "author": "Sanity.io <hello@sanity.io>",
24
- "type": "commonjs",
24
+ "type": "module",
25
25
  "exports": "./src/export.js",
26
26
  "main": "./src/export.js",
27
27
  "files": [
@@ -29,39 +29,32 @@
29
29
  ],
30
30
  "scripts": {
31
31
  "lint": "eslint .",
32
- "test": "jest --verbose"
32
+ "test": "vitest"
33
33
  },
34
34
  "dependencies": {
35
- "@sanity/client": "^7.8.2",
36
- "@sanity/util": "^4.3.0",
37
- "archiver": "^7.0.0",
35
+ "archiver": "^7.0.1",
38
36
  "debug": "^4.3.4",
39
37
  "get-it": "^8.6.10",
40
- "json-stream-stringify": "^2.0.2",
41
- "lodash": "^4.17.21",
42
- "mississippi": "^4.0.0",
43
- "p-queue": "^2.3.0",
44
- "rimraf": "^6.0.1",
45
- "split2": "^4.2.0",
46
- "tar": "^7.0.1",
47
- "yaml": "^2.4.2"
38
+ "json-stream-stringify": "^3.1.6",
39
+ "p-queue": "^9.0.1",
40
+ "rimraf": "^6.1.2"
48
41
  },
49
42
  "devDependencies": {
50
- "@jest/globals": "^29.7.0",
51
- "@sanity/semantic-release-preset": "^4.1.7",
43
+ "@sanity/client": "^7.13.1",
44
+ "@sanity/semantic-release-preset": "^5.0.0",
45
+ "@vitest/coverage-v8": "^4.0.15",
52
46
  "eslint": "^8.57.0",
53
47
  "eslint-config-prettier": "^9.1.0",
54
- "eslint-config-sanity": "^7.1.2",
55
- "eslint-plugin-prettier": "^5.1.3",
56
- "jest": "^29.7.0",
57
- "nock": "^13.5.4",
58
- "prettier": "^3.6.2",
59
- "prettier-plugin-packagejson": "^2.5.19",
60
- "string-to-stream": "^1.1.0",
61
- "tar": "^7.0.1"
48
+ "eslint-config-sanity": "^7.1.4",
49
+ "eslint-plugin-prettier": "^5.5.4",
50
+ "nock": "^14.0.10",
51
+ "prettier": "^3.7.4",
52
+ "prettier-plugin-packagejson": "^2.5.20",
53
+ "tar": "^7.5.2",
54
+ "vitest": "^4.0.15"
62
55
  },
63
56
  "engines": {
64
- "node": ">=20.19 >=22.12.0"
57
+ "node": ">=20.19 <22 || >=22.12"
65
58
  },
66
59
  "publishConfig": {
67
60
  "access": "public",
@@ -1,21 +1,27 @@
1
- const crypto = require('crypto')
2
- const {mkdirSync, createWriteStream} = require('fs')
3
- const path = require('path')
4
- const {parse: parseUrl, format: formatUrl} = require('url')
5
- const {omit} = require('lodash')
6
- const miss = require('mississippi')
7
- const PQueue = require('p-queue')
8
- const pkg = require('../package.json')
9
- const debug = require('./debug')
10
- const requestStream = require('./requestStream')
11
- const rimraf = require('./util/rimraf')
12
- const {ASSET_DOWNLOAD_MAX_RETRIES, ASSET_DOWNLOAD_CONCURRENCY} = require('./constants')
1
+ import {createHash} from 'node:crypto'
2
+ import {createWriteStream, mkdirSync} from 'node:fs'
3
+ import {join as joinPath} from 'node:path'
4
+ import {pipeline} from 'node:stream/promises'
5
+
6
+ import PQueue from 'p-queue'
7
+ import {rimraf} from 'rimraf'
8
+
9
+ import {
10
+ ASSET_DOWNLOAD_CONCURRENCY,
11
+ ASSET_DOWNLOAD_MAX_RETRIES,
12
+ DEFAULT_RETRY_DELAY,
13
+ } from './constants.js'
14
+ import {debug} from './debug.js'
15
+ import {getUserAgent} from './getUserAgent.js'
16
+ import {requestStream} from './requestStream.js'
17
+ import {delay} from './util/delay.js'
18
+ import {through, throughObj} from './util/streamHelpers.js'
13
19
 
14
20
  const EXCLUDE_PROPS = ['_id', '_type', 'assetId', 'extension', 'mimeType', 'path', 'url']
15
21
  const ACTION_REMOVE = 'remove'
16
22
  const ACTION_REWRITE = 'rewrite'
17
23
 
18
- class AssetHandler {
24
+ export class AssetHandler {
19
25
  constructor(options) {
20
26
  const concurrency = options.concurrency || ASSET_DOWNLOAD_CONCURRENCY
21
27
  debug('Using asset download concurrency of %d', concurrency)
@@ -30,6 +36,7 @@ class AssetHandler {
30
36
  this.filesWritten = 0
31
37
  this.queueSize = 0
32
38
  this.maxRetries = options.maxRetries || ASSET_DOWNLOAD_MAX_RETRIES
39
+ this.retryDelayMs = options.retryDelayMs
33
40
  this.queue = options.queue || new PQueue({concurrency})
34
41
 
35
42
  this.rejectedError = null
@@ -58,7 +65,7 @@ class AssetHandler {
58
65
 
59
66
  // Called when we want to download all assets to local filesystem and rewrite documents to hold
60
67
  // placeholder asset references (_sanityAsset: 'image@file:///local/path')
61
- rewriteAssets = miss.through.obj(async (doc, enc, callback) => {
68
+ rewriteAssets = throughObj(async (doc, enc, callback) => {
62
69
  if (['sanity.imageAsset', 'sanity.fileAsset'].includes(doc._type)) {
63
70
  const type = doc._type === 'sanity.imageAsset' ? 'image' : 'file'
64
71
  const filePath = `${type}s/${generateFilename(doc._id)}`
@@ -73,7 +80,7 @@ class AssetHandler {
73
80
 
74
81
  // Called in the case where we don't _want_ assets, so basically just remove all asset documents
75
82
  // as well as references to assets (*.asset._ref ^= (image|file)-)
76
- stripAssets = miss.through.obj(async (doc, enc, callback) => {
83
+ stripAssets = throughObj(async (doc, enc, callback) => {
77
84
  if (['sanity.imageAsset', 'sanity.fileAsset'].includes(doc._type)) {
78
85
  callback()
79
86
  return
@@ -84,7 +91,7 @@ class AssetHandler {
84
91
 
85
92
  // Called when we are using raw export mode along with `assets: false`, where we simply
86
93
  // want to skip asset documents but retain asset references (useful for data mangling)
87
- skipAssets = miss.through.obj((doc, enc, callback) => {
94
+ skipAssets = throughObj((doc, enc, callback) => {
88
95
  const isAsset = ['sanity.imageAsset', 'sanity.fileAsset'].includes(doc._type)
89
96
  if (isAsset) {
90
97
  callback()
@@ -94,9 +101,9 @@ class AssetHandler {
94
101
  callback(null, doc)
95
102
  })
96
103
 
97
- noop = miss.through.obj((doc, enc, callback) => callback(null, doc))
104
+ noop = throughObj((doc, enc, callback) => callback(null, doc))
98
105
 
99
- queueAssetDownload(assetDoc, dstPath, type) {
106
+ queueAssetDownload(assetDoc, dstPath) {
100
107
  if (!assetDoc.url) {
101
108
  debug('Asset document "%s" does not have a URL property, skipping', assetDoc._id)
102
109
  return
@@ -140,6 +147,8 @@ class AssetHandler {
140
147
  // Don't retry on client errors
141
148
  break
142
149
  }
150
+
151
+ await delay(this.retryDelayMs || DEFAULT_RETRY_DELAY)
143
152
  }
144
153
  }
145
154
  throw dlError
@@ -164,18 +173,23 @@ class AssetHandler {
164
173
  }
165
174
 
166
175
  /* eslint-disable no-sync */
167
- mkdirSync(path.join(this.tmpDir, 'files'), {recursive: true})
168
- mkdirSync(path.join(this.tmpDir, 'images'), {recursive: true})
176
+ mkdirSync(joinPath(this.tmpDir, 'files'), {recursive: true})
177
+ mkdirSync(joinPath(this.tmpDir, 'images'), {recursive: true})
169
178
  /* eslint-enable no-sync */
170
179
  this.assetDirsCreated = true
171
180
  }
172
181
 
173
182
  getAssetRequestOptions(assetDoc) {
174
183
  const token = this.client.config().token
175
- const headers = {'User-Agent': `${pkg.name}@${pkg.version}`}
184
+ const headers = {'User-Agent': getUserAgent()}
176
185
  const isImage = assetDoc._type === 'sanity.imageAsset'
177
186
 
178
- const url = parseUrl(assetDoc.url, true)
187
+ const url = URL.parse(assetDoc.url)
188
+ // If we can't parse it, return as-is
189
+ if (!url) {
190
+ return {url: assetDoc.url, headers}
191
+ }
192
+
179
193
  if (
180
194
  isImage &&
181
195
  token &&
@@ -184,10 +198,10 @@ class AssetHandler {
184
198
  url.host === 'localhost:43216')
185
199
  ) {
186
200
  headers.Authorization = `Bearer ${token}`
187
- url.query = {...(url.query || {}), dlRaw: 'true'}
201
+ url.searchParams.set('dlRaw', 'true')
188
202
  }
189
203
 
190
- return {url: formatUrl(url), headers}
204
+ return {url: url.toString(), headers}
191
205
  }
192
206
 
193
207
  // eslint-disable-next-line max-statements
@@ -200,7 +214,10 @@ class AssetHandler {
200
214
 
201
215
  let stream
202
216
  try {
203
- stream = await requestStream(options)
217
+ stream = await requestStream({
218
+ maxRetries: 0, // We handle retries ourselves in queueAssetDownload
219
+ ...options,
220
+ })
204
221
  } catch (err) {
205
222
  const message = 'Failed to create asset stream'
206
223
  if (typeof err.message === 'string') {
@@ -239,7 +256,7 @@ class AssetHandler {
239
256
  this.maybeCreateAssetDirs()
240
257
 
241
258
  debug('Asset stream ready, writing to filesystem at %s', dstPath)
242
- const tmpPath = path.join(this.tmpDir, dstPath)
259
+ const tmpPath = joinPath(this.tmpDir, dstPath)
243
260
  let sha1 = ''
244
261
  let md5 = ''
245
262
  let size = 0
@@ -381,58 +398,61 @@ function generateFilename(assetId) {
381
398
  return asset ? `${asset}.${extension}` : `${assetId}.bin`
382
399
  }
383
400
 
384
- function writeHashedStream(filePath, stream) {
401
+ async function writeHashedStream(filePath, stream) {
385
402
  let size = 0
386
- const md5 = crypto.createHash('md5')
387
- const sha1 = crypto.createHash('sha1')
403
+ const md5 = createHash('md5')
404
+ const sha1 = createHash('sha1')
388
405
 
389
- const hasher = miss.through((chunk, enc, cb) => {
406
+ const hasher = through((chunk, enc, cb) => {
390
407
  size += chunk.length
391
408
  md5.update(chunk)
392
409
  sha1.update(chunk)
393
410
  cb(null, chunk)
394
411
  })
395
412
 
396
- return new Promise((resolve, reject) =>
397
- miss.pipe(stream, hasher, createWriteStream(filePath), (err) => {
398
- if (err) {
399
- reject(err)
400
- return
401
- }
402
-
403
- resolve({
404
- size,
405
- sha1: sha1.digest('hex'),
406
- md5: md5.digest('hex'),
407
- })
408
- }),
409
- )
413
+ await pipeline(stream, hasher, createWriteStream(filePath))
414
+ return {
415
+ size,
416
+ sha1: sha1.digest('hex'),
417
+ md5: md5.digest('hex'),
418
+ }
410
419
  }
411
420
 
412
421
  function tryGetErrorFromStream(stream) {
413
422
  return new Promise((resolve, reject) => {
423
+ const chunks = []
414
424
  let receivedData = false
415
425
 
416
- miss.pipe(stream, miss.concat(parse), (err) => {
417
- if (err) {
418
- reject(err)
419
- } else if (!receivedData) {
420
- // Resolve with null if no data was received, to let the caller
421
- // know we couldn't parse the error.
426
+ stream.on('data', (chunk) => {
427
+ receivedData = true
428
+ chunks.push(chunk)
429
+ })
430
+
431
+ stream.on('end', () => {
432
+ if (!receivedData) {
422
433
  resolve(null)
434
+ return
423
435
  }
424
- })
425
436
 
426
- function parse(body) {
427
- receivedData = true
437
+ const body = Buffer.concat(chunks)
428
438
  try {
429
439
  const parsed = JSON.parse(body.toString('utf8'))
430
440
  resolve(parsed.message || parsed.error || null)
431
- } catch (err) {
441
+ } catch {
432
442
  resolve(body.toString('utf8').slice(0, 16000))
433
443
  }
434
- }
444
+ })
445
+
446
+ stream.on('error', reject)
435
447
  })
436
448
  }
437
449
 
438
- module.exports = AssetHandler
450
+ function omit(obj, keys) {
451
+ const copy = {}
452
+ Object.entries(obj).forEach(([key, value]) => {
453
+ if (!keys.includes(key)) {
454
+ copy[key] = value
455
+ }
456
+ })
457
+ return copy
458
+ }
package/src/constants.js CHANGED
@@ -5,39 +5,46 @@
5
5
  * Note: Only for initial connection - if download fails while streaming, we cannot easily resume.
6
6
  * @internal
7
7
  */
8
- exports.DOCUMENT_STREAM_MAX_RETRIES = 5
8
+ export const DOCUMENT_STREAM_MAX_RETRIES = 5
9
9
 
10
10
  /**
11
11
  * How many retries to attempt when downloading an asset.
12
12
  * User overridable as `options.maxAssetRetries`.
13
13
  * @internal
14
14
  */
15
- exports.ASSET_DOWNLOAD_MAX_RETRIES = 10
15
+ export const ASSET_DOWNLOAD_MAX_RETRIES = 10
16
+
17
+ /**
18
+ * Default delay between retries when retrieving assets or document stream.
19
+ * User overridable as `options.retryDelayMs`.
20
+ * @internal
21
+ */
22
+ export const DEFAULT_RETRY_DELAY = 1500
16
23
 
17
24
  /**
18
25
  * How many concurrent asset downloads to allow.
19
26
  * User overridable as `options.assetConcurrency`.
20
27
  * @internal
21
28
  */
22
- exports.ASSET_DOWNLOAD_CONCURRENCY = 8
29
+ export const ASSET_DOWNLOAD_CONCURRENCY = 8
23
30
 
24
31
  /**
25
32
  * How frequently we will `debug` log while streaming the documents.
26
33
  * @internal
27
34
  */
28
- exports.DOCUMENT_STREAM_DEBUG_INTERVAL = 10000
35
+ export const DOCUMENT_STREAM_DEBUG_INTERVAL = 10000
29
36
 
30
37
  /**
31
38
  * How long to wait before timing out the read of a request due to inactivity.
32
39
  * User overridable as `options.readTimeout`.
33
40
  * @internal
34
41
  */
35
- exports.REQUEST_READ_TIMEOUT = 3 * 60 * 1000 // 3 minutes
42
+ export const REQUEST_READ_TIMEOUT = 3 * 60 * 1000 // 3 minutes
36
43
 
37
44
  /**
38
45
  What mode to use when exporting documents.
39
46
  stream: Export all documents in the dataset in one request, this will be consistent but might be slow on large datasets.
40
47
  cursor: Export documents using a cursor, this might lead to inconsistent results if a mutation is performed while exporting.
41
48
  */
42
- exports.MODE_STREAM = 'stream'
43
- exports.MODE_CURSOR = 'cursor'
49
+ export const MODE_STREAM = 'stream'
50
+ export const MODE_CURSOR = 'cursor'
package/src/debug.js CHANGED
@@ -1 +1,3 @@
1
- module.exports = require('debug')('sanity:export')
1
+ import debugIt from 'debug'
2
+
3
+ export const debug = debugIt('sanity:export')
package/src/export.js CHANGED
@@ -1,37 +1,38 @@
1
- const fs = require('fs')
2
- const os = require('os')
3
- const path = require('path')
4
- const zlib = require('zlib')
5
- const archiver = require('archiver')
6
- const miss = require('mississippi')
7
- const split = require('split2')
8
- const JsonStreamStringify = require('json-stream-stringify')
9
- const AssetHandler = require('./AssetHandler')
10
- const debug = require('./debug')
11
- const pipeAsync = require('./util/pipeAsync')
12
- const filterDocuments = require('./filterDocuments')
13
- const filterDocumentTypes = require('./filterDocumentTypes')
14
- const getDocumentsStream = require('./getDocumentsStream')
15
- const getDocumentCursorStream = require('./getDocumentCursorStream')
16
- const logFirstChunk = require('./logFirstChunk')
17
- const rejectOnApiError = require('./rejectOnApiError')
18
- const stringifyStream = require('./stringifyStream')
19
- const tryParseJson = require('./tryParseJson')
20
- const rimraf = require('./util/rimraf')
21
- const validateOptions = require('./validateOptions')
22
- const {DOCUMENT_STREAM_DEBUG_INTERVAL, MODE_CURSOR, MODE_STREAM} = require('./constants')
1
+ import {createWriteStream} from 'node:fs'
2
+ import {mkdir} from 'node:fs/promises'
3
+ import {tmpdir} from 'node:os'
4
+ import {join as joinPath} from 'node:path'
5
+ import {PassThrough} from 'node:stream'
6
+ import {finished, pipeline} from 'node:stream/promises'
7
+ import {constants as zlib} from 'node:zlib'
8
+
9
+ import archiver from 'archiver'
10
+ import {JsonStreamStringify} from 'json-stream-stringify'
11
+ import {rimraf} from 'rimraf'
12
+
13
+ import {AssetHandler} from './AssetHandler.js'
14
+ import {DOCUMENT_STREAM_DEBUG_INTERVAL, MODE_CURSOR, MODE_STREAM} from './constants.js'
15
+ import {debug} from './debug.js'
16
+ import {filterDocuments} from './filterDocuments.js'
17
+ import {filterDocumentTypes} from './filterDocumentTypes.js'
18
+ import {getDocumentCursorStream} from './getDocumentCursorStream.js'
19
+ import {getDocumentsStream} from './getDocumentsStream.js'
20
+ import {logFirstChunk} from './logFirstChunk.js'
21
+ import {rejectOnApiError} from './rejectOnApiError.js'
22
+ import {stringifyStream} from './stringifyStream.js'
23
+ import {tryParseJson} from './tryParseJson.js'
24
+ import {isWritableStream, split, throughObj} from './util/streamHelpers.js'
25
+ import {validateOptions} from './validateOptions.js'
23
26
 
24
27
  const noop = () => null
25
28
 
26
- async function exportDataset(opts) {
29
+ export async function exportDataset(opts) {
27
30
  const options = validateOptions(opts)
28
31
  const onProgress = options.onProgress || noop
29
32
  const archive = archiver('tar', {
30
33
  gzip: true,
31
34
  gzipOptions: {
32
- level: options.compress
33
- ? zlib.constants.Z_DEFAULT_COMPRESSION
34
- : zlib.constants.Z_NO_COMPRESSION,
35
+ level: options.compress ? zlib.Z_DEFAULT_COMPRESSION : zlib.Z_NO_COMPRESSION,
35
36
  },
36
37
  })
37
38
  archive.on('warning', (err) => {
@@ -47,10 +48,10 @@ async function exportDataset(opts) {
47
48
  .toLowerCase()
48
49
 
49
50
  const prefix = `${opts.dataset ?? opts.mediaLibraryId}-export-${slugDate}`
50
- const tmpDir = path.join(os.tmpdir(), prefix)
51
- fs.mkdirSync(tmpDir, {recursive: true})
52
- const dataPath = path.join(tmpDir, 'data.ndjson')
53
- const assetsPath = path.join(tmpDir, 'assets.json')
51
+ const tmpDir = joinPath(tmpdir(), prefix)
52
+ await mkdir(tmpDir, {recursive: true})
53
+ const dataPath = joinPath(tmpDir, 'data.ndjson')
54
+ const assetsPath = joinPath(tmpDir, 'assets.json')
54
55
 
55
56
  const cleanup = () =>
56
57
  rimraf(tmpDir).catch((err) => {
@@ -63,6 +64,7 @@ async function exportDataset(opts) {
63
64
  prefix,
64
65
  concurrency: options.assetConcurrency,
65
66
  maxRetries: options.maxAssetRetries,
67
+ retryDelayMs: options.retryDelayMs,
66
68
  })
67
69
 
68
70
  debug('Downloading assets (temporarily) to %s', tmpDir)
@@ -73,7 +75,7 @@ async function exportDataset(opts) {
73
75
  outputStream = options.outputPath
74
76
  } else {
75
77
  outputStream =
76
- options.outputPath === '-' ? process.stdout : fs.createWriteStream(options.outputPath)
78
+ options.outputPath === '-' ? process.stdout : createWriteStream(options.outputPath)
77
79
  }
78
80
 
79
81
  let assetStreamHandler = assetHandler.noop
@@ -88,16 +90,15 @@ async function exportDataset(opts) {
88
90
  reject = rej
89
91
  })
90
92
 
91
- miss.finished(archive, async (archiveErr) => {
92
- if (archiveErr) {
93
+ finished(archive)
94
+ .then(async () => {
95
+ debug('Archive finished')
96
+ })
97
+ .catch(async (archiveErr) => {
93
98
  debug('Archiving errored: %s', archiveErr.stack)
94
99
  await cleanup()
95
100
  reject(archiveErr)
96
- return
97
- }
98
-
99
- debug('Archive finished')
100
- })
101
+ })
101
102
 
102
103
  debug('Getting dataset export stream, mode: "%s"', options.mode)
103
104
  onProgress({step: 'Exporting documents...'})
@@ -150,7 +151,40 @@ async function exportDataset(opts) {
150
151
 
151
152
  scheduleDebugTimer()
152
153
 
153
- const jsonStream = miss.pipeline(
154
+ const filterTransform = throughObj((doc, _enc, callback) => {
155
+ if (!options.filterDocument) {
156
+ return callback(null, doc)
157
+ }
158
+
159
+ try {
160
+ const include = options.filterDocument(doc)
161
+ return include ? callback(null, doc) : callback()
162
+ } catch (err) {
163
+ return callback(err)
164
+ }
165
+ })
166
+
167
+ const transformTransform = throughObj((doc, _enc, callback) => {
168
+ if (!options.transformDocument) {
169
+ return callback(null, doc)
170
+ }
171
+
172
+ try {
173
+ return callback(null, options.transformDocument(doc))
174
+ } catch (err) {
175
+ return callback(err)
176
+ }
177
+ })
178
+
179
+ const reportTransform = throughObj(reportDocumentCount)
180
+
181
+ // Use pipeline to chain streams with proper error handling
182
+ const jsonStream = new PassThrough()
183
+ finished(jsonStream)
184
+ .then(() => debug('JSON stream finished'))
185
+ .catch((err) => reject(err))
186
+
187
+ pipeline(
154
188
  inputStream,
155
189
  logFirstChunk(),
156
190
  split(tryParseJson),
@@ -158,98 +192,98 @@ async function exportDataset(opts) {
158
192
  filterDocuments(options.drafts),
159
193
  filterDocumentTypes(options.types),
160
194
  assetStreamHandler,
161
- miss.through.obj((doc, _enc, callback) => {
162
- if (options.filterDocument(doc)) {
163
- return callback(null, doc)
164
- }
165
- return callback()
166
- }),
167
- miss.through.obj((doc, _enc, callback) => {
168
- callback(null, options.transformDocument(doc))
169
- }),
170
- miss.through.obj(reportDocumentCount),
195
+ filterTransform,
196
+ transformTransform,
197
+ reportTransform,
171
198
  stringifyStream(),
172
- )
173
-
174
- miss.pipe(jsonStream, fs.createWriteStream(dataPath), async (err) => {
199
+ jsonStream,
200
+ ).catch((err) => {
175
201
  if (debugTimer !== null) clearTimeout(debugTimer)
202
+ debug(`Export stream error @ ${lastDocumentID}/${documentCount}: `, err)
203
+ reject(err)
204
+ })
176
205
 
177
- if (err) {
178
- debug(`Export stream error @ ${lastDocumentID}/${documentCount}: `, err)
179
- reject(err)
180
- return
181
- }
182
-
183
- debug('Export stream completed')
184
- onProgress({
185
- step: 'Exporting documents...',
186
- current: documentCount,
187
- total: documentCount,
188
- update: true,
189
- })
190
-
191
- debug('Adding data.ndjson to archive')
192
- archive.file(dataPath, {name: 'data.ndjson', prefix})
193
-
194
- if (!options.raw && options.assets) {
195
- onProgress({step: 'Downloading assets...'})
196
- }
197
-
198
- let prevCompleted = 0
199
- const progressInterval = setInterval(() => {
200
- const completed =
201
- assetHandler.queueSize - assetHandler.queue.size - assetHandler.queue.pending
202
-
203
- if (prevCompleted === completed) {
204
- return
205
- }
206
-
207
- prevCompleted = completed
208
- onProgress({
209
- step: 'Downloading assets...',
210
- current: completed,
211
- total: assetHandler.queueSize,
212
- update: true,
213
- })
214
- }, 500)
215
-
216
- debug('Waiting for asset handler to complete downloads')
217
- try {
218
- const assetMap = await assetHandler.finish()
206
+ pipeline(jsonStream, createWriteStream(dataPath))
207
+ .then(async () => {
208
+ if (debugTimer !== null) clearTimeout(debugTimer)
219
209
 
220
- // Make sure we mark the progress as done (eg 100/100 instead of 99/100)
210
+ debug('Export stream completed')
221
211
  onProgress({
222
- step: 'Downloading assets...',
223
- current: assetHandler.queueSize,
224
- total: assetHandler.queueSize,
212
+ step: 'Exporting documents...',
213
+ current: documentCount,
214
+ total: documentCount,
225
215
  update: true,
226
216
  })
227
217
 
228
- const assetsStream = fs.createWriteStream(assetsPath)
229
- await pipeAsync(new JsonStreamStringify(assetMap), assetsStream)
218
+ debug('Adding data.ndjson to archive')
219
+ archive.file(dataPath, {name: 'data.ndjson', prefix})
230
220
 
231
- if (options.assetsMap) {
232
- archive.file(assetsPath, {name: 'assets.json', prefix})
221
+ if (!options.raw && options.assets) {
222
+ onProgress({step: 'Downloading assets...'})
233
223
  }
234
224
 
235
- clearInterval(progressInterval)
236
- } catch (assetErr) {
237
- clearInterval(progressInterval)
238
- await cleanup()
239
- reject(assetErr)
240
- return
241
- }
225
+ let prevCompleted = 0
226
+ const progressInterval = setInterval(() => {
227
+ const completed =
228
+ assetHandler.queueSize - assetHandler.queue.size - assetHandler.queue.pending
229
+
230
+ if (prevCompleted === completed) {
231
+ return
232
+ }
233
+
234
+ prevCompleted = completed
235
+ onProgress({
236
+ step: 'Downloading assets...',
237
+ current: completed,
238
+ total: assetHandler.queueSize,
239
+ update: true,
240
+ })
241
+ }, 500)
242
+
243
+ debug('Waiting for asset handler to complete downloads')
244
+ try {
245
+ const assetMap = await assetHandler.finish()
246
+
247
+ // Make sure we mark the progress as done (eg 100/100 instead of 99/100)
248
+ onProgress({
249
+ step: 'Downloading assets...',
250
+ current: assetHandler.queueSize,
251
+ total: assetHandler.queueSize,
252
+ update: true,
253
+ })
254
+
255
+ const assetsStream = createWriteStream(assetsPath)
256
+ await pipeline(new JsonStreamStringify(assetMap), assetsStream)
257
+
258
+ if (options.assetsMap) {
259
+ archive.file(assetsPath, {name: 'assets.json', prefix})
260
+ }
261
+
262
+ clearInterval(progressInterval)
263
+ } catch (assetErr) {
264
+ clearInterval(progressInterval)
265
+ await cleanup()
266
+ reject(assetErr)
267
+ return
268
+ }
242
269
 
243
- // Add all downloaded assets to archive
244
- archive.directory(path.join(tmpDir, 'files'), `${prefix}/files`, {store: true})
245
- archive.directory(path.join(tmpDir, 'images'), `${prefix}/images`, {store: true})
270
+ // Add all downloaded assets to archive
271
+ archive.directory(joinPath(tmpDir, 'files'), `${prefix}/files`, {store: true})
272
+ archive.directory(joinPath(tmpDir, 'images'), `${prefix}/images`, {store: true})
246
273
 
247
- debug('Finalizing archive, flushing streams')
248
- onProgress({step: 'Adding assets to archive...'})
249
- await archive.finalize()
250
- })
274
+ debug('Finalizing archive, flushing streams')
275
+ onProgress({step: 'Adding assets to archive...'})
276
+ await archive.finalize()
277
+ })
278
+ .catch(async (err) => {
279
+ if (debugTimer !== null) clearTimeout(debugTimer)
280
+ debug(`Export stream error @ ${lastDocumentID}/${documentCount}: `, err)
281
+ reject(err)
282
+ })
251
283
 
252
- miss.pipe(archive, outputStream, onComplete)
284
+ pipeline(archive, outputStream)
285
+ .then(() => onComplete())
286
+ .catch(onComplete)
253
287
 
254
288
  async function onComplete(err) {
255
289
  onProgress({step: 'Clearing temporary files...'})
@@ -283,15 +317,3 @@ function getDocumentInputStream(options) {
283
317
 
284
318
  throw new Error(`Invalid mode: ${options.mode}`)
285
319
  }
286
-
287
- function isWritableStream(val) {
288
- return (
289
- val !== null &&
290
- typeof val === 'object' &&
291
- typeof val.pipe === 'function' &&
292
- typeof val._write === 'function' &&
293
- typeof val._writableState === 'object'
294
- )
295
- }
296
-
297
- module.exports = exportDataset
@@ -1,14 +1,18 @@
1
- const miss = require('mississippi')
1
+ import {throughObj} from './util/streamHelpers.js'
2
2
 
3
- module.exports = (allowedTypes) =>
4
- allowedTypes && allowedTypes.length > 0
5
- ? miss.through.obj((doc, enc, callback) => {
6
- const type = doc && doc._type
7
- if (allowedTypes.includes(type)) {
8
- callback(null, doc)
9
- return
10
- }
3
+ export function filterDocumentTypes(allowedTypes) {
4
+ if (!allowedTypes || allowedTypes.length === 0) {
5
+ // Pass-through
6
+ return throughObj((doc, enc, callback) => callback(null, doc))
7
+ }
11
8
 
12
- callback()
13
- })
14
- : miss.through.obj()
9
+ return throughObj(function docTypesFilter(doc, enc, callback) {
10
+ const type = doc && doc._type
11
+ if (allowedTypes.includes(type)) {
12
+ callback(null, doc)
13
+ return
14
+ }
15
+
16
+ callback()
17
+ })
18
+ }
@@ -1,5 +1,5 @@
1
- const miss = require('mississippi')
2
- const debug = require('./debug')
1
+ import {debug} from './debug.js'
2
+ import {throughObj} from './util/streamHelpers.js'
3
3
 
4
4
  const isDraftOrVersion = (doc) =>
5
5
  doc && doc._id && (doc._id.indexOf('drafts.') === 0 || doc._id.indexOf('versions.') === 0)
@@ -8,8 +8,8 @@ const isSystemDocument = (doc) => doc && doc._id && doc._id.indexOf('_.') === 0
8
8
  const isReleaseDocument = (doc) => doc && doc._id && doc._id.indexOf('_.releases.') === 0
9
9
  const isCursor = (doc) => doc && !doc._id && doc.nextCursor !== undefined
10
10
 
11
- module.exports = (drafts) =>
12
- miss.through.obj((doc, enc, callback) => {
11
+ export function filterDocuments(drafts) {
12
+ return throughObj(function filterDocs(doc, enc, callback) {
13
13
  if (isCursor(doc)) {
14
14
  debug('%o is a cursor, skipping', doc)
15
15
  return callback()
@@ -21,7 +21,7 @@ module.exports = (drafts) =>
21
21
  }
22
22
 
23
23
  if (isSystemDocument(doc)) {
24
- if (!drafts && isReleaseDocument(doc)) {
24
+ if (drafts && isReleaseDocument(doc)) {
25
25
  return callback(null, doc)
26
26
  }
27
27
  debug('%s is a system document, skipping', doc && doc._id)
@@ -30,3 +30,4 @@ module.exports = (drafts) =>
30
30
 
31
31
  return callback(null, doc)
32
32
  })
33
+ }
@@ -1,13 +1,13 @@
1
- const {Transform} = require('node:stream')
1
+ import {Transform} from 'node:stream'
2
2
 
3
- const pkg = require('../package.json')
4
- const debug = require('./debug')
5
- const requestStream = require('./requestStream')
3
+ import {debug} from './debug.js'
4
+ import {getUserAgent} from './getUserAgent.js'
5
+ import {requestStream} from './requestStream.js'
6
6
 
7
7
  // same regex as split2 is using by default: https://github.com/mcollina/split2/blob/53432f54bd5bf422bd55d91d38f898b6c9496fc1/index.js#L86
8
8
  const splitRegex = /\r?\n/
9
9
 
10
- module.exports = async (options) => {
10
+ export async function getDocumentCursorStream(options) {
11
11
  let streamsInflight = 0
12
12
  function decrementInflight(stream) {
13
13
  streamsInflight--
@@ -80,17 +80,21 @@ function startStream(options, nextCursor) {
80
80
  }
81
81
  const token = options.client.config().token
82
82
  const headers = {
83
- 'User-Agent': `${pkg.name}@${pkg.version}`,
83
+ 'User-Agent': getUserAgent(),
84
84
  ...(token ? {Authorization: `Bearer ${token}`} : {}),
85
85
  }
86
86
 
87
87
  debug('Starting stream with cursor "%s"', nextCursor)
88
88
 
89
- return requestStream({url: url.toString(), headers, maxRetries: options.maxRetries}).then(
90
- (res) => {
91
- debug('Got stream with HTTP %d', res.statusCode)
89
+ return requestStream({
90
+ url: url.toString(),
91
+ headers,
92
+ maxRetries: options.maxRetries,
93
+ retryDelayMs: options.retryDelayMs,
94
+ readTimeout: options.readTimeout,
95
+ }).then((res) => {
96
+ debug('Got stream with HTTP %d', res.statusCode)
92
97
 
93
- return res
94
- },
95
- )
98
+ return res
99
+ })
96
100
  }
@@ -1,7 +1,7 @@
1
- const pkg = require('../package.json')
2
- const requestStream = require('./requestStream')
1
+ import {getUserAgent} from './getUserAgent.js'
2
+ import {requestStream} from './requestStream.js'
3
3
 
4
- module.exports = (options) => {
4
+ export function getDocumentsStream(options) {
5
5
  // Sanity client doesn't handle streams natively since we want to support node/browser
6
6
  // with same API. We're just using it here to get hold of URLs and tokens.
7
7
  const baseUrl = options.client.getUrl(
@@ -17,7 +17,7 @@ module.exports = (options) => {
17
17
 
18
18
  const token = options.client.config().token
19
19
  const headers = {
20
- 'User-Agent': `${pkg.name}@${pkg.version}`,
20
+ 'User-Agent': getUserAgent(),
21
21
  ...(token ? {Authorization: `Bearer ${token}`} : {}),
22
22
  }
23
23
 
@@ -25,6 +25,7 @@ module.exports = (options) => {
25
25
  url: url.toString(),
26
26
  headers,
27
27
  maxRetries: options.maxRetries,
28
+ retryDelayMs: options.retryDelayMs,
28
29
  readTimeout: options.readTimeout,
29
30
  })
30
31
  }
@@ -0,0 +1,14 @@
1
+ import {readFileSync} from 'node:fs'
2
+ import {join as joinPath} from 'node:path'
3
+
4
+ let ua = null
5
+
6
+ export function getUserAgent() {
7
+ if (!ua) {
8
+ const data = readFileSync(joinPath(import.meta.dirname, '..', 'package.json'), 'utf-8')
9
+ const pkg = JSON.parse(data)
10
+ ua = `${pkg.name}@${pkg.version}`
11
+ }
12
+
13
+ return ua
14
+ }
@@ -1,9 +1,9 @@
1
- const miss = require('mississippi')
2
- const debug = require('./debug')
1
+ import {debug} from './debug.js'
2
+ import {through} from './util/streamHelpers.js'
3
3
 
4
- module.exports = () => {
4
+ export function logFirstChunk() {
5
5
  let firstChunk = true
6
- return miss.through((chunk, enc, callback) => {
6
+ return through((chunk, enc, callback) => {
7
7
  if (firstChunk) {
8
8
  const string = chunk.toString('utf8').split('\n')[0]
9
9
  debug('First chunk received: %s', string.slice(0, 300))
@@ -1,7 +1,7 @@
1
- const miss = require('mississippi')
1
+ import {throughObj} from './util/streamHelpers.js'
2
2
 
3
- module.exports = () =>
4
- miss.through.obj((doc, enc, callback) => {
3
+ export function rejectOnApiError() {
4
+ return throughObj((doc, enc, callback) => {
5
5
  // check if the document passed contains a document attribtue first, and return early.
6
6
  if (doc._id) {
7
7
  callback(null, doc)
@@ -28,3 +28,4 @@ module.exports = () =>
28
28
 
29
29
  callback(null, doc)
30
30
  })
31
+ }
@@ -1,28 +1,37 @@
1
- const {getIt} = require('get-it')
2
- const {keepAlive, promise} = require('get-it/middleware')
3
- const debug = require('./debug')
4
- const {extractFirstError} = require('./util/extractFirstError')
5
- const {DOCUMENT_STREAM_MAX_RETRIES, REQUEST_READ_TIMEOUT} = require('./constants')
1
+ import {getIt} from 'get-it'
2
+ import {keepAlive, promise} from 'get-it/middleware'
3
+
4
+ import {
5
+ DEFAULT_RETRY_DELAY,
6
+ DOCUMENT_STREAM_MAX_RETRIES,
7
+ REQUEST_READ_TIMEOUT,
8
+ } from './constants.js'
9
+ import {debug} from './debug.js'
10
+ import {delay} from './util/delay.js'
11
+ import {extractFirstError} from './util/extractFirstError.js'
12
+ import {tryThrowFriendlyError} from './util/friendlyError.js'
6
13
 
7
14
  const request = getIt([keepAlive(), promise({onlyBody: true})])
8
15
 
9
16
  const CONNECTION_TIMEOUT = 15 * 1000 // 15 seconds
10
- const RETRY_DELAY_MS = 1500 // 1.5 seconds
11
-
12
- function delay(ms) {
13
- return new Promise((resolve) => setTimeout(resolve, ms))
14
- }
15
17
 
16
18
  /* eslint-disable no-await-in-loop, max-depth */
17
- module.exports = async (options) => {
19
+ export async function requestStream(options) {
18
20
  const maxRetries =
19
21
  typeof options.maxRetries === 'number' ? options.maxRetries : DOCUMENT_STREAM_MAX_RETRIES
20
22
 
21
23
  const readTimeout =
22
24
  typeof options.readTimeout === 'number' ? options.readTimeout : REQUEST_READ_TIMEOUT
23
25
 
26
+ const retryDelayMs =
27
+ typeof options.retryDelayMs === 'number' ? options.retryDelayMs : DEFAULT_RETRY_DELAY
28
+
24
29
  let error
25
- for (let i = 0; i < maxRetries; i++) {
30
+
31
+ let i = 0
32
+ do {
33
+ i++
34
+
26
35
  try {
27
36
  return await request({
28
37
  ...options,
@@ -31,16 +40,24 @@ module.exports = async (options) => {
31
40
  timeout: {connect: CONNECTION_TIMEOUT, socket: readTimeout},
32
41
  })
33
42
  } catch (err) {
34
- error = extractFirstError(err)
43
+ error = extractFirstError(err) || err
44
+
45
+ if (maxRetries === 0) {
46
+ throw error
47
+ }
35
48
 
36
49
  if (err.response && err.response.statusCode && err.response.statusCode < 500) {
37
50
  break
38
51
  }
39
52
 
40
- debug('Error, retrying after %d ms: %s', RETRY_DELAY_MS, error.message)
41
- await delay(RETRY_DELAY_MS)
53
+ if (i < maxRetries) {
54
+ debug('Error, retrying after %d ms: %s', retryDelayMs, error.message)
55
+ await delay(retryDelayMs)
56
+ }
42
57
  }
43
- }
58
+ } while (i < maxRetries)
59
+
60
+ await tryThrowFriendlyError(error)
44
61
 
45
62
  error.message = `Export: Failed to fetch ${options.url}: ${error.message}`
46
63
  throw error
@@ -1,4 +1,5 @@
1
- const miss = require('mississippi')
1
+ import {throughObj} from './util/streamHelpers.js'
2
2
 
3
- module.exports = () =>
4
- miss.through.obj((doc, enc, callback) => callback(null, `${JSON.stringify(doc)}\n`))
3
+ export function stringifyStream() {
4
+ return throughObj((doc, enc, callback) => callback(null, `${JSON.stringify(doc)}\n`))
5
+ }
@@ -1,5 +1,3 @@
1
- const {createSafeJsonParser} = require('@sanity/util/createSafeJsonParser')
2
-
3
1
  /**
4
2
  * Safe JSON parser that is able to handle lines interrupted by an error object.
5
3
  *
@@ -8,6 +6,30 @@ const {createSafeJsonParser} = require('@sanity/util/createSafeJsonParser')
8
6
  * @internal
9
7
  * @see {@link https://github.com/sanity-io/sanity/pull/1787 | Initial pull request}
10
8
  */
11
- module.exports = createSafeJsonParser({
9
+ export const tryParseJson = createSafeJsonParser({
12
10
  errorLabel: 'Error streaming dataset',
13
11
  })
12
+
13
+ function createSafeJsonParser({errorLabel}) {
14
+ return function safeJsonParser(line) {
15
+ try {
16
+ return JSON.parse(line)
17
+ } catch (err) {
18
+ // Catch half-done lines with an error at the end
19
+ const errorPosition = line.lastIndexOf('{"error":')
20
+ if (errorPosition === -1) {
21
+ err.message = `${err.message} (${line})`
22
+ throw err
23
+ }
24
+
25
+ const errorJson = line.slice(errorPosition)
26
+ const errorLine = JSON.parse(errorJson)
27
+ const error = errorLine && errorLine.error
28
+ if (error && error.description) {
29
+ throw new Error(`${errorLabel}: ${error.description}\n\n${errorJson}\n`, {cause: err})
30
+ }
31
+
32
+ throw err
33
+ }
34
+ }
35
+ }
@@ -0,0 +1,3 @@
1
+ export function delay(ms) {
2
+ return new Promise((resolve) => setTimeout(resolve, ms))
3
+ }
@@ -1,4 +1,4 @@
1
- exports.extractFirstError = function extractFirstError(err) {
1
+ export function extractFirstError(err) {
2
2
  if (
3
3
  // eslint-disable-next-line no-undef
4
4
  ((typeof AggregateError !== 'undefined' && err instanceof AggregateError) ||
@@ -0,0 +1,58 @@
1
+ export async function tryThrowFriendlyError(err) {
2
+ if (!isRecord(err)) {
3
+ return null
4
+ }
5
+
6
+ if (!('response' in err) || !isRecord(err.response)) {
7
+ return null
8
+ }
9
+
10
+ if (
11
+ !('body' in err.response) ||
12
+ !('pipe' in err.response.body) ||
13
+ !('headers' in err.response) ||
14
+ !isRecord(err.response.headers)
15
+ ) {
16
+ return null
17
+ }
18
+
19
+ if (
20
+ typeof err.response.headers['content-type'] !== 'string' ||
21
+ !err.response.headers['content-type'].includes('application/json')
22
+ ) {
23
+ return null
24
+ }
25
+
26
+ const body = await readBodyJson(err.response.body)
27
+
28
+ if (!isRecord(body)) {
29
+ return null
30
+ }
31
+
32
+ // Look for Sanity API(ish) standard error shape
33
+ const status =
34
+ typeof err.response.statusCode === 'number' ? `HTTP ${err.response.statusCode}` : undefined
35
+ const error = typeof body.error === 'string' ? body.error : undefined
36
+ const message = typeof body.message === 'string' ? body.message : undefined
37
+ if (!error && !message) {
38
+ return null
39
+ }
40
+
41
+ throw new Error(['Export', status, error, message].filter(Boolean).join(': '))
42
+ }
43
+
44
+ function isRecord(thing) {
45
+ return typeof thing === 'object' && thing !== null && !Array.isArray(thing)
46
+ }
47
+
48
+ async function readBody(req) {
49
+ const chunks = []
50
+ for await (const chunk of req) {
51
+ chunks.push(typeof chunk === 'string' ? Buffer.from(chunk) : chunk)
52
+ }
53
+ return Buffer.concat(chunks)
54
+ }
55
+
56
+ async function readBodyJson(req) {
57
+ return JSON.parse((await readBody(req)).toString('utf8'))
58
+ }
@@ -0,0 +1,103 @@
1
+ import {Transform} from 'node:stream'
2
+
3
+ export function through(transformFn) {
4
+ return new Transform({
5
+ transform(chunk, encoding, callback) {
6
+ transformFn(chunk, encoding, callback)
7
+ },
8
+ })
9
+ }
10
+
11
+ export function throughObj(transformFn) {
12
+ return new Transform({
13
+ objectMode: true,
14
+ transform(chunk, encoding, callback) {
15
+ transformFn(chunk, encoding, callback)
16
+ },
17
+ })
18
+ }
19
+
20
+ export function isWritableStream(val) {
21
+ return (
22
+ val !== null &&
23
+ typeof val === 'object' &&
24
+ typeof val.pipe === 'function' &&
25
+ typeof val._write === 'function' &&
26
+ typeof val._writableState === 'object'
27
+ )
28
+ }
29
+
30
+ export function concat(onData) {
31
+ const chunks = []
32
+ return new Transform({
33
+ objectMode: true,
34
+ transform(chunk, encoding, callback) {
35
+ chunks.push(chunk)
36
+ callback()
37
+ },
38
+ flush(callback) {
39
+ try {
40
+ onData(chunks)
41
+ callback()
42
+ } catch (err) {
43
+ callback(err)
44
+ }
45
+ },
46
+ })
47
+ }
48
+
49
+ export const split = (transformFn) => {
50
+ let buffer = ''
51
+ const splitRegex = /\r?\n/
52
+
53
+ return new Transform({
54
+ objectMode: !!transformFn,
55
+ transform(chunk, encoding, callback) {
56
+ buffer += chunk.toString()
57
+ const lines = buffer.split(splitRegex)
58
+
59
+ // Keep the last line in buffer as it might be incomplete
60
+ buffer = lines.pop() || ''
61
+
62
+ for (const line of lines) {
63
+ if (line.length === 0) continue
64
+
65
+ if (transformFn) {
66
+ try {
67
+ const result = transformFn(line)
68
+ if (result !== undefined) {
69
+ this.push(result)
70
+ }
71
+ } catch (err) {
72
+ callback(err)
73
+ return
74
+ }
75
+ } else {
76
+ this.push(line)
77
+ }
78
+ }
79
+ callback()
80
+ },
81
+ flush(callback) {
82
+ if (buffer.length === 0) {
83
+ callback()
84
+ return
85
+ }
86
+
87
+ if (!transformFn) {
88
+ callback(null, buffer)
89
+ return
90
+ }
91
+
92
+ try {
93
+ const result = transformFn(buffer)
94
+ if (result !== undefined) {
95
+ this.push(result)
96
+ }
97
+ callback()
98
+ } catch (err) {
99
+ callback(err)
100
+ }
101
+ },
102
+ })
103
+ }
@@ -1,11 +1,10 @@
1
- const defaults = require('lodash/defaults')
2
- const {
3
- DOCUMENT_STREAM_MAX_RETRIES,
1
+ import {
4
2
  ASSET_DOWNLOAD_MAX_RETRIES,
5
- REQUEST_READ_TIMEOUT,
6
- MODE_STREAM,
3
+ DOCUMENT_STREAM_MAX_RETRIES,
7
4
  MODE_CURSOR,
8
- } = require('./constants')
5
+ MODE_STREAM,
6
+ REQUEST_READ_TIMEOUT,
7
+ } from './constants.js'
9
8
 
10
9
  const clientMethods = ['getUrl', 'config']
11
10
  const booleanFlags = ['assets', 'raw', 'compress', 'drafts']
@@ -24,8 +23,8 @@ const exportDefaults = {
24
23
  transformDocument: (doc) => doc,
25
24
  }
26
25
 
27
- function validateOptions(opts) {
28
- const options = defaults({}, opts, exportDefaults)
26
+ export function validateOptions(opts) {
27
+ const options = {...exportDefaults, ...opts}
29
28
 
30
29
  const resources = [options.dataset, options.mediaLibraryId].filter(
31
30
  (resource) => typeof resource === 'string' && resource.length !== 0,
@@ -112,5 +111,3 @@ function validateOptions(opts) {
112
111
 
113
112
  return options
114
113
  }
115
-
116
- module.exports = validateOptions
@@ -1,17 +0,0 @@
1
- const miss = require('mississippi')
2
-
3
- module.exports = async (readable, writable) => {
4
- return new Promise((resolve, reject) => {
5
- try {
6
- miss.pipe(readable, writable, (jsonErr) => {
7
- if (jsonErr) {
8
- reject(jsonErr)
9
- } else {
10
- resolve()
11
- }
12
- })
13
- } catch (assetErr) {
14
- reject(assetErr)
15
- }
16
- })
17
- }
@@ -1 +0,0 @@
1
- module.exports = require('rimraf').rimraf