@sanity/export 0.136.3-gql-rtb.372 → 0.136.3-purple-unicorn-patch.5627

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,9 +1,10 @@
1
1
  const path = require('path')
2
2
  const crypto = require('crypto')
3
+ const {parse: parseUrl, format: formatUrl} = require('url')
3
4
  const fse = require('fs-extra')
4
5
  const miss = require('mississippi')
5
6
  const PQueue = require('p-queue')
6
- const {omit} = require('lodash')
7
+ const {omit, noop} = require('lodash')
7
8
  const pkg = require('../package.json')
8
9
  const requestStream = require('./requestStream')
9
10
  const debug = require('./debug')
@@ -11,20 +12,27 @@ const debug = require('./debug')
11
12
  const EXCLUDE_PROPS = ['_id', '_type', 'assetId', 'extension', 'mimeType', 'path', 'url']
12
13
  const ACTION_REMOVE = 'remove'
13
14
  const ACTION_REWRITE = 'rewrite'
15
+ const ASSET_DOWNLOAD_CONCURRENCY = 8
14
16
 
15
17
  class AssetHandler {
16
18
  constructor(options) {
19
+ const concurrency = options.concurrency || ASSET_DOWNLOAD_CONCURRENCY
20
+ debug('Using asset download concurrency of %d', concurrency)
21
+
17
22
  this.client = options.client
18
23
  this.tmpDir = options.tmpDir
19
24
  this.assetDirsCreated = false
20
25
 
26
+ this.downloading = []
21
27
  this.assetsSeen = new Map()
22
28
  this.assetMap = {}
23
29
  this.filesWritten = 0
24
30
  this.queueSize = 0
25
- this.queue = options.queue || new PQueue({concurrency: 3})
26
- this.reject = () => {
27
- throw new Error('Asset handler errored before `finish()` was called')
31
+ this.queue = options.queue || new PQueue({concurrency})
32
+
33
+ this.rejectedError = null
34
+ this.reject = (err) => {
35
+ this.rejectedError = err
28
36
  }
29
37
  }
30
38
 
@@ -36,6 +44,11 @@ class AssetHandler {
36
44
 
37
45
  finish() {
38
46
  return new Promise((resolve, reject) => {
47
+ if (this.rejectedError) {
48
+ reject(this.rejectedError)
49
+ return
50
+ }
51
+
39
52
  this.reject = reject
40
53
  this.queue.onIdle().then(() => resolve(this.assetMap))
41
54
  })
@@ -53,7 +66,7 @@ class AssetHandler {
53
66
  return
54
67
  }
55
68
 
56
- callback(null, await this.findAndModify(doc, ACTION_REWRITE))
69
+ callback(null, this.findAndModify(doc, ACTION_REWRITE))
57
70
  })
58
71
 
59
72
  // Called in the case where we don't _want_ assets, so basically just remove all asset documents
@@ -64,7 +77,7 @@ class AssetHandler {
64
77
  return
65
78
  }
66
79
 
67
- callback(null, await this.findAndModify(doc, ACTION_REMOVE))
80
+ callback(null, this.findAndModify(doc, ACTION_REMOVE))
68
81
  })
69
82
 
70
83
  // Called when we are using raw export mode along with `assets: false`, where we simply
@@ -89,45 +102,131 @@ class AssetHandler {
89
102
 
90
103
  debug('Adding download task for %s (destination: %s)', assetDoc._id, dstPath)
91
104
  this.queueSize++
105
+ this.downloading.push(assetDoc.url)
92
106
  this.queue.add(() => this.downloadAsset(assetDoc, dstPath))
93
107
  }
94
108
 
95
- async downloadAsset(assetDoc, dstPath) {
96
- const {url} = assetDoc
109
+ maybeCreateAssetDirs() {
110
+ if (this.assetDirsCreated) {
111
+ return
112
+ }
113
+
114
+ /* eslint-disable no-sync */
115
+ fse.ensureDirSync(path.join(this.tmpDir, 'files'))
116
+ fse.ensureDirSync(path.join(this.tmpDir, 'images'))
117
+ /* eslint-enable no-sync */
118
+ this.assetDirsCreated = true
119
+ }
120
+
121
+ getAssetRequestOptions(assetDoc) {
122
+ const token = this.client.config().token
97
123
  const headers = {'User-Agent': `${pkg.name}@${pkg.version}`}
98
- const stream = await requestStream({url, headers})
124
+ const isImage = assetDoc._type === 'sanity.imageAsset'
125
+
126
+ const url = parseUrl(assetDoc.url, true)
127
+ if (isImage && ['cdn.sanity.io', 'cdn.sanity.work'].includes(url.hostname)) {
128
+ headers.Authorization = `Bearer ${token}`
129
+ url.query = {...(url.query || {}), dlRaw: 'true'}
130
+ }
131
+
132
+ return {url: formatUrl(url), headers}
133
+ }
134
+
135
+ async downloadAsset(assetDoc, dstPath, attemptNum = 0) {
136
+ const {url} = assetDoc
137
+ const options = this.getAssetRequestOptions(assetDoc)
138
+
139
+ let stream
140
+ try {
141
+ stream = await requestStream(options)
142
+ } catch (err) {
143
+ this.reject(err)
144
+ return false
145
+ }
99
146
 
100
147
  if (stream.statusCode !== 200) {
101
148
  this.queue.clear()
102
- this.reject(new Error(`Referenced asset URL "${url}" returned HTTP ${stream.statusCode}`))
103
- return
104
- }
149
+ const err = await tryGetErrorFromStream(stream)
150
+ let errMsg = `Referenced asset URL "${url}" returned HTTP ${stream.statusCode}`
151
+ if (err) {
152
+ errMsg = `${errMsg}:\n\n${err}`
153
+ }
105
154
 
106
- if (!this.assetDirsCreated) {
107
- /* eslint-disable no-sync */
108
- fse.ensureDirSync(path.join(this.tmpDir, 'files'))
109
- fse.ensureDirSync(path.join(this.tmpDir, 'images'))
110
- /* eslint-enable no-sync */
111
- this.assetDirsCreated = true
155
+ this.reject(new Error(errMsg))
156
+ return false
112
157
  }
113
158
 
159
+ this.maybeCreateAssetDirs()
160
+
114
161
  debug('Asset stream ready, writing to filesystem at %s', dstPath)
115
- const hash = await writeHashedStream(path.join(this.tmpDir, dstPath), stream)
116
- const type = assetDoc._type === 'sanity.imageAsset' ? 'image' : 'file'
117
- const id = `${type}-${hash}`
162
+ const tmpPath = path.join(this.tmpDir, dstPath)
163
+ const {sha1, md5, size} = await writeHashedStream(tmpPath, stream)
164
+
165
+ // Verify it against our downloaded stream to make sure we have the same copy
166
+ const contentLength = stream.headers['content-length']
167
+ const remoteSha1 = stream.headers['x-sanity-sha1']
168
+ const remoteMd5 = stream.headers['x-sanity-md5']
169
+ const hasHash = Boolean(remoteSha1 || remoteMd5)
170
+ const method = md5 ? 'md5' : 'sha1'
171
+
172
+ let differs = false
173
+ if (remoteMd5 && md5) {
174
+ differs = remoteMd5 !== md5
175
+ } else if (remoteSha1 && sha1) {
176
+ differs = remoteSha1 !== sha1
177
+ }
178
+
179
+ if (differs && attemptNum < 3) {
180
+ debug('%s does not match downloaded asset, retrying (#%d) [%s]', method, attemptNum + 1, url)
181
+ return this.downloadAsset(assetDoc, dstPath, attemptNum + 1)
182
+ } else if (differs) {
183
+ const details = [
184
+ hasHash &&
185
+ (method === 'md5'
186
+ ? `md5 should be ${remoteMd5}, got ${md5}`
187
+ : `sha1 should be ${remoteSha1}, got ${sha1}`),
188
+
189
+ contentLength &&
190
+ parseInt(contentLength, 10) !== size &&
191
+ `Asset should be ${contentLength} bytes, got ${size}`,
192
+
193
+ `Did not succeed after ${attemptNum} attempts.`,
194
+ ]
195
+
196
+ const detailsString = `Details:\n - ${details.filter(Boolean).join('\n - ')}`
197
+
198
+ await fse.unlink(tmpPath)
199
+ this.queue.clear()
200
+
201
+ const error = new Error(
202
+ `Failed to download asset at ${assetDoc.url}, giving up. ${detailsString}`
203
+ )
204
+
205
+ this.reject(error)
206
+ return false
207
+ }
208
+
209
+ const isImage = assetDoc._type === 'sanity.imageAsset'
210
+ const type = isImage ? 'image' : 'file'
211
+ const id = `${type}-${sha1}`
118
212
 
119
213
  const metaProps = omit(assetDoc, EXCLUDE_PROPS)
120
214
  if (Object.keys(metaProps).length > 0) {
121
215
  this.assetMap[id] = metaProps
122
216
  }
123
217
 
218
+ this.downloading.splice(
219
+ this.downloading.findIndex((datUrl) => datUrl === url),
220
+ 1
221
+ )
222
+
124
223
  this.filesWritten++
224
+ return true
125
225
  }
126
226
 
127
- // eslint-disable-next-line complexity
128
- findAndModify = async (item, action) => {
227
+ findAndModify = (item, action) => {
129
228
  if (Array.isArray(item)) {
130
- const children = await Promise.all(item.map(child => this.findAndModify(child, action)))
229
+ const children = item.map((child) => this.findAndModify(child, action))
131
230
  return children.filter(Boolean)
132
231
  }
133
232
 
@@ -143,16 +242,12 @@ class AssetHandler {
143
242
  if (isAsset && action === ACTION_REWRITE) {
144
243
  const {asset, ...other} = item
145
244
  const assetId = asset._ref
146
- if (isModernAsset(assetId)) {
147
- const assetType = getAssetType(item)
148
- const filePath = `${assetType}s/${generateFilename(assetId)}`
149
- return {_sanityAsset: `${assetType}@file://./${filePath}`, ...other}
245
+ const assetType = getAssetType(item)
246
+ const filePath = `${assetType}s/${generateFilename(assetId)}`
247
+ return {
248
+ _sanityAsset: `${assetType}@file://./${filePath}`,
249
+ ...this.findAndModify(other, action),
150
250
  }
151
-
152
- // Legacy asset
153
- const type = this.assetsSeen.get(assetId) || (await this.lookupAssetType(assetId))
154
- const filePath = `${type}s/${generateFilename(assetId)}`
155
- return {_sanityAsset: `${type}@file://./${filePath}`, ...other}
156
251
  }
157
252
 
158
253
  const newItem = {}
@@ -161,8 +256,7 @@ class AssetHandler {
161
256
  const key = keys[i]
162
257
  const value = item[key]
163
258
 
164
- // eslint-disable-next-line no-await-in-loop
165
- newItem[key] = await this.findAndModify(value, action)
259
+ newItem[key] = this.findAndModify(value, action)
166
260
 
167
261
  if (typeof newItem[key] === 'undefined') {
168
262
  delete newItem[key]
@@ -171,15 +265,10 @@ class AssetHandler {
171
265
 
172
266
  return newItem
173
267
  }
174
-
175
- lookupAssetType = async assetId => {
176
- const docType = await this.client.fetch('*[_id == $id][0]._type', {id: assetId})
177
- return docType === 'sanity.imageAsset' ? 'image' : 'file'
178
- }
179
268
  }
180
269
 
181
270
  function isAssetField(item) {
182
- return item.asset && item.asset._ref
271
+ return item.asset && item.asset._ref && isSanityAsset(item.asset._ref)
183
272
  }
184
273
 
185
274
  function getAssetType(item) {
@@ -191,8 +280,11 @@ function getAssetType(item) {
191
280
  return type || null
192
281
  }
193
282
 
194
- function isModernAsset(assetId) {
195
- return /^(image|file)/.test(assetId)
283
+ function isSanityAsset(assetId) {
284
+ return (
285
+ /^image-[a-f0-9]{40}-\d+x\d+-[a-z]+$/.test(assetId) ||
286
+ /^file-[a-f0-9]{40}-[a-z0-9]+$/.test(assetId)
287
+ )
196
288
  }
197
289
 
198
290
  function generateFilename(assetId) {
@@ -202,22 +294,46 @@ function generateFilename(assetId) {
202
294
  }
203
295
 
204
296
  function writeHashedStream(filePath, stream) {
205
- const hash = crypto.createHash('sha1')
297
+ let size = 0
298
+ const md5 = crypto.createHash('md5')
299
+ const sha1 = crypto.createHash('sha1')
300
+
206
301
  const hasher = miss.through((chunk, enc, cb) => {
207
- hash.update(chunk)
302
+ size += chunk.length
303
+ md5.update(chunk)
304
+ sha1.update(chunk)
208
305
  cb(null, chunk)
209
306
  })
210
307
 
211
308
  return new Promise((resolve, reject) =>
212
- miss.pipe(
213
- stream,
214
- hasher,
215
- fse.createWriteStream(filePath),
216
- err => {
217
- return err ? reject(err) : resolve(hash.digest('hex'))
309
+ miss.pipe(stream, hasher, fse.createWriteStream(filePath), (err) => {
310
+ if (err) {
311
+ reject(err)
312
+ return
218
313
  }
219
- )
314
+
315
+ resolve({
316
+ size,
317
+ sha1: sha1.digest('hex'),
318
+ md5: md5.digest('hex'),
319
+ })
320
+ })
220
321
  )
221
322
  }
222
323
 
324
+ function tryGetErrorFromStream(stream) {
325
+ return new Promise((resolve, reject) => {
326
+ miss.pipe(stream, miss.concat(parse), (err) => (err ? reject(err) : noop))
327
+
328
+ function parse(body) {
329
+ try {
330
+ const parsed = JSON.parse(body.toString('utf8'))
331
+ resolve(parsed.message || parsed.error || null)
332
+ } catch (err) {
333
+ resolve(body.toString('utf8').slice(0, 16000))
334
+ }
335
+ }
336
+ })
337
+ }
338
+
223
339
  module.exports = AssetHandler
package/src/export.js CHANGED
@@ -14,6 +14,8 @@ const getDocumentsStream = require('./getDocumentsStream')
14
14
  const filterSystemDocuments = require('./filterSystemDocuments')
15
15
  const filterDocumentTypes = require('./filterDocumentTypes')
16
16
  const filterDrafts = require('./filterDrafts')
17
+ const logFirstChunk = require('./logFirstChunk')
18
+ const tryParseJson = require('./tryParseJson')
17
19
 
18
20
  const noop = () => null
19
21
 
@@ -22,7 +24,7 @@ function exportDataset(opts) {
22
24
  const onProgress = options.onProgress || noop
23
25
  const archive = archiver('tar', {
24
26
  gzip: true,
25
- gzipOptions: {level: options.compress ? zlib.Z_DEFAULT_COMPRESSION : zlib.Z_NO_COMPRESSION}
27
+ gzipOptions: {level: options.compress ? zlib.Z_DEFAULT_COMPRESSION : zlib.Z_NO_COMPRESSION},
26
28
  })
27
29
 
28
30
  const slugDate = new Date()
@@ -32,16 +34,28 @@ function exportDataset(opts) {
32
34
 
33
35
  const prefix = `${opts.dataset}-export-${slugDate}`
34
36
  const tmpDir = path.join(os.tmpdir(), prefix)
37
+ const cleanup = () =>
38
+ fse.remove(tmpDir).catch((err) => {
39
+ debug(`Error while cleaning up temporary files: ${err.message}`)
40
+ })
41
+
35
42
  const assetHandler = new AssetHandler({
36
43
  client: options.client,
37
44
  tmpDir,
38
- prefix
45
+ prefix,
46
+ concurrency: options.assetConcurrency,
39
47
  })
40
48
 
41
49
  debug('Outputting assets (temporarily) to %s', tmpDir)
42
50
  debug('Outputting to %s', options.outputPath === '-' ? 'stdout' : options.outputPath)
43
- const outputStream =
44
- options.outputPath === '-' ? process.stdout : fse.createWriteStream(options.outputPath)
51
+
52
+ let outputStream
53
+ if (isWritableStream(options.outputPath)) {
54
+ outputStream = options.outputPath
55
+ } else {
56
+ outputStream =
57
+ options.outputPath === '-' ? process.stdout : fse.createWriteStream(options.outputPath)
58
+ }
45
59
 
46
60
  let assetStreamHandler = assetHandler.noop
47
61
  if (!options.raw) {
@@ -49,9 +63,10 @@ function exportDataset(opts) {
49
63
  }
50
64
 
51
65
  return new Promise(async (resolve, reject) => {
52
- miss.finished(archive, archiveErr => {
66
+ miss.finished(archive, async (archiveErr) => {
53
67
  if (archiveErr) {
54
68
  debug('Archiving errored! %s', archiveErr.stack)
69
+ await cleanup()
55
70
  reject(archiveErr)
56
71
  return
57
72
  }
@@ -73,7 +88,7 @@ function exportDataset(opts) {
73
88
  step: 'Exporting documents...',
74
89
  current: documentCount,
75
90
  total: '?',
76
- update: true
91
+ update: true,
77
92
  })
78
93
 
79
94
  lastReported = now
@@ -83,19 +98,23 @@ function exportDataset(opts) {
83
98
  }
84
99
 
85
100
  const inputStream = await getDocumentsStream(options.client, options.dataset)
101
+ debug('Got HTTP %d', inputStream.statusCode)
102
+ debug('Response headers: %o', inputStream.headers)
103
+
86
104
  const jsonStream = miss.pipeline(
87
105
  inputStream,
88
- split(JSON.parse),
89
- rejectOnApiError,
90
- filterSystemDocuments,
106
+ logFirstChunk(),
107
+ split(tryParseJson),
108
+ rejectOnApiError(),
109
+ filterSystemDocuments(),
91
110
  assetStreamHandler,
92
111
  filterDocumentTypes(options.types),
93
- options.drafts ? miss.through.obj() : filterDrafts,
94
- stringifyStream,
112
+ options.drafts ? miss.through.obj() : filterDrafts(),
113
+ stringifyStream(),
95
114
  miss.through(reportDocumentCount)
96
115
  )
97
116
 
98
- miss.finished(jsonStream, async err => {
117
+ miss.finished(jsonStream, async (err) => {
99
118
  if (err) {
100
119
  return
101
120
  }
@@ -104,7 +123,7 @@ function exportDataset(opts) {
104
123
  step: 'Exporting documents...',
105
124
  current: documentCount,
106
125
  total: documentCount,
107
- update: true
126
+ update: true,
108
127
  })
109
128
 
110
129
  if (!options.raw && options.assets) {
@@ -113,7 +132,9 @@ function exportDataset(opts) {
113
132
 
114
133
  let prevCompleted = 0
115
134
  const progressInterval = setInterval(() => {
116
- const completed = assetHandler.queueSize - assetHandler.queue.size
135
+ const completed =
136
+ assetHandler.queueSize - assetHandler.queue.size - assetHandler.queue.pending
137
+
117
138
  if (prevCompleted === completed) {
118
139
  return
119
140
  }
@@ -123,17 +144,27 @@ function exportDataset(opts) {
123
144
  step: 'Downloading assets...',
124
145
  current: completed,
125
146
  total: assetHandler.queueSize,
126
- update: true
147
+ update: true,
127
148
  })
128
149
  }, 500)
129
150
 
130
151
  debug('Waiting for asset handler to complete downloads')
131
152
  try {
132
153
  const assetMap = await assetHandler.finish()
154
+
155
+ // Make sure we mark the progress as done (eg 100/100 instead of 99/100)
156
+ onProgress({
157
+ step: 'Downloading assets...',
158
+ current: assetHandler.queueSize,
159
+ total: assetHandler.queueSize,
160
+ update: true,
161
+ })
162
+
133
163
  archive.append(JSON.stringify(assetMap), {name: 'assets.json', prefix})
134
164
  clearInterval(progressInterval)
135
165
  } catch (assetErr) {
136
166
  clearInterval(progressInterval)
167
+ await cleanup()
137
168
  reject(assetErr)
138
169
  return
139
170
  }
@@ -147,20 +178,16 @@ function exportDataset(opts) {
147
178
  archive.finalize()
148
179
  })
149
180
 
150
- archive.on('warning', err => {
181
+ archive.on('warning', (err) => {
151
182
  debug('Archive warning: %s', err.message)
152
183
  })
153
184
 
154
185
  archive.append(jsonStream, {name: 'data.ndjson', prefix})
155
- miss.pipe(
156
- archive,
157
- outputStream,
158
- onComplete
159
- )
186
+ miss.pipe(archive, outputStream, onComplete)
160
187
 
161
188
  async function onComplete(err) {
162
189
  onProgress({step: 'Clearing temporary files...'})
163
- await fse.remove(tmpDir)
190
+ await cleanup()
164
191
 
165
192
  if (!err) {
166
193
  resolve()
@@ -174,4 +201,14 @@ function exportDataset(opts) {
174
201
  })
175
202
  }
176
203
 
204
+ function isWritableStream(val) {
205
+ return (
206
+ val !== null &&
207
+ typeof val === 'object' &&
208
+ typeof val.pipe === 'function' &&
209
+ typeof val._write === 'function' &&
210
+ typeof val._writableState === 'object'
211
+ )
212
+ }
213
+
177
214
  module.exports = exportDataset
@@ -1,6 +1,6 @@
1
1
  const miss = require('mississippi')
2
2
 
3
- module.exports = allowedTypes =>
3
+ module.exports = (allowedTypes) =>
4
4
  allowedTypes
5
5
  ? miss.through.obj((doc, enc, callback) => {
6
6
  const type = doc && doc._type
@@ -1,11 +1,12 @@
1
1
  const miss = require('mississippi')
2
2
 
3
- const isDraft = doc => doc && doc._id && doc._id.indexOf('drafts.') === 0
3
+ const isDraft = (doc) => doc && doc._id && doc._id.indexOf('drafts.') === 0
4
4
 
5
- module.exports = miss.through.obj((doc, enc, callback) => {
6
- if (isDraft(doc)) {
7
- return callback()
8
- }
5
+ module.exports = () =>
6
+ miss.through.obj((doc, enc, callback) => {
7
+ if (isDraft(doc)) {
8
+ return callback()
9
+ }
9
10
 
10
- return callback(null, doc)
11
- })
11
+ return callback(null, doc)
12
+ })
@@ -1,13 +1,14 @@
1
1
  const miss = require('mississippi')
2
2
  const debug = require('./debug')
3
3
 
4
- const isSystemDocument = doc => doc && doc._id && doc._id.indexOf('_.') === 0
4
+ const isSystemDocument = (doc) => doc && doc._id && doc._id.indexOf('_.') === 0
5
5
 
6
- module.exports = miss.through.obj((doc, enc, callback) => {
7
- if (isSystemDocument(doc)) {
8
- debug('%s is a system document, skipping', doc && doc._id)
9
- return callback()
10
- }
6
+ module.exports = () =>
7
+ miss.through.obj((doc, enc, callback) => {
8
+ if (isSystemDocument(doc)) {
9
+ debug('%s is a system document, skipping', doc && doc._id)
10
+ return callback()
11
+ }
11
12
 
12
- return callback(null, doc)
13
- })
13
+ return callback(null, doc)
14
+ })
@@ -5,9 +5,10 @@ module.exports = (client, dataset) => {
5
5
  // Sanity client doesn't handle streams natively since we want to support node/browser
6
6
  // with same API. We're just using it here to get hold of URLs and tokens.
7
7
  const url = client.getUrl(`/data/export/${dataset}`)
8
+ const token = client.config().token
8
9
  const headers = {
9
- Authorization: `Bearer ${client.config().token}`,
10
- 'User-Agent': `${pkg.name}@${pkg.version}`
10
+ 'User-Agent': `${pkg.name}@${pkg.version}`,
11
+ ...(token ? {Authorization: `Bearer ${token}`} : {}),
11
12
  }
12
13
 
13
14
  return requestStream({url, headers})
@@ -0,0 +1,15 @@
1
+ const miss = require('mississippi')
2
+ const debug = require('./debug')
3
+
4
+ module.exports = () => {
5
+ let firstChunk = true
6
+ return miss.through((chunk, enc, callback) => {
7
+ if (firstChunk) {
8
+ const string = chunk.toString('utf8').split('\n')[0]
9
+ debug('First chunk received: %s', string.slice(0, 300))
10
+ firstChunk = false
11
+ }
12
+
13
+ callback(null, chunk)
14
+ })
15
+ }
@@ -1,10 +1,16 @@
1
1
  const miss = require('mississippi')
2
2
 
3
- module.exports = miss.through.obj((doc, enc, callback) => {
4
- if (doc.error && doc.statusCode) {
5
- callback(new Error([doc.statusCode, doc.error].join(': ')))
6
- return
7
- }
3
+ module.exports = () =>
4
+ miss.through.obj((doc, enc, callback) => {
5
+ if (doc.error && doc.statusCode) {
6
+ callback(new Error([doc.statusCode, doc.error].join(': ')))
7
+ return
8
+ }
8
9
 
9
- callback(null, doc)
10
- })
10
+ if (!doc._id && doc.error) {
11
+ callback(new Error(doc.error.description || doc.error.message || JSON.stringify(doc)))
12
+ return
13
+ }
14
+
15
+ callback(null, doc)
16
+ })