@sanity/export 3.38.2 → 3.40.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@sanity/export",
3
- "version": "3.38.2",
3
+ "version": "3.40.0",
4
4
  "description": "Export Sanity documents and assets",
5
5
  "keywords": [
6
6
  "sanity",
@@ -34,7 +34,7 @@
34
34
  "@sanity/util": "3.37.2",
35
35
  "archiver": "^7.0.0",
36
36
  "debug": "^4.3.4",
37
- "get-it": "^8.4.21",
37
+ "get-it": "^8.6.2",
38
38
  "lodash": "^4.17.21",
39
39
  "mississippi": "^4.0.0",
40
40
  "p-queue": "^2.3.0",
package/src/constants.js CHANGED
@@ -26,3 +26,18 @@ exports.ASSET_DOWNLOAD_CONCURRENCY = 8
26
26
  * @internal
27
27
  */
28
28
  exports.DOCUMENT_STREAM_DEBUG_INTERVAL = 10000
29
+
30
+ /**
31
+ * How long to wait before timing out the read of a request due to inactivity.
32
+ * User overridable as `options.readTimeout`.
33
+ * @internal
34
+ */
35
+ exports.REQUEST_READ_TIMEOUT = 3 * 60 * 1000 // 3 minutes
36
+
37
+ /**
38
+ What mode to use when exporting documents.
39
+ stream: Export all documents in the dataset in one request, this will be consistent but might be slow on large datasets.
40
+ cursor: Export documents using a cursor, this might lead to inconsistent results if a mutation is performed while exporting.
41
+ */
42
+ exports.MODE_STREAM = 'stream'
43
+ exports.MODE_CURSOR = 'cursor'
package/src/export.js CHANGED
@@ -11,13 +11,14 @@ const filterDocumentTypes = require('./filterDocumentTypes')
11
11
  const filterDrafts = require('./filterDrafts')
12
12
  const filterSystemDocuments = require('./filterSystemDocuments')
13
13
  const getDocumentsStream = require('./getDocumentsStream')
14
+ const getDocumentCursorStream = require('./getDocumentCursorStream')
14
15
  const logFirstChunk = require('./logFirstChunk')
15
16
  const rejectOnApiError = require('./rejectOnApiError')
16
17
  const stringifyStream = require('./stringifyStream')
17
18
  const tryParseJson = require('./tryParseJson')
18
19
  const rimraf = require('./util/rimraf')
19
20
  const validateOptions = require('./validateOptions')
20
- const {DOCUMENT_STREAM_DEBUG_INTERVAL} = require('./constants')
21
+ const {DOCUMENT_STREAM_DEBUG_INTERVAL, MODE_CURSOR, MODE_STREAM} = require('./constants')
21
22
 
22
23
  const noop = () => null
23
24
 
@@ -118,7 +119,7 @@ async function exportDataset(opts) {
118
119
  cb(null, doc)
119
120
  }
120
121
 
121
- const inputStream = await getDocumentsStream(options)
122
+ const inputStream = await getDocumentInputStream(options)
122
123
  debug('Got HTTP %d', inputStream.statusCode)
123
124
  debug('Response headers: %o', inputStream.headers)
124
125
 
@@ -250,6 +251,17 @@ async function exportDataset(opts) {
250
251
  return result
251
252
  }
252
253
 
254
+ function getDocumentInputStream(options) {
255
+ if (options.mode === MODE_STREAM) {
256
+ return getDocumentsStream(options)
257
+ }
258
+ if (options.mode === MODE_CURSOR) {
259
+ return getDocumentCursorStream(options)
260
+ }
261
+
262
+ throw new Error(`Invalid mode: ${options.mode}`)
263
+ }
264
+
253
265
  function isWritableStream(val) {
254
266
  return (
255
267
  val !== null &&
@@ -0,0 +1,73 @@
1
+ const {Transform} = require('node:stream')
2
+
3
+ const pkg = require('../package.json')
4
+ const requestStream = require('./requestStream')
5
+
6
+ module.exports = async (options) => {
7
+ let streamsInflight = 0
8
+ const stream = new Transform({
9
+ async transform(chunk, encoding, callback) {
10
+ if (encoding !== 'buffer' && encoding !== 'string') {
11
+ callback(null, chunk)
12
+ return
13
+ }
14
+
15
+ let parsedChunk = null
16
+ try {
17
+ parsedChunk = JSON.parse(chunk.toString())
18
+ } catch (err) {
19
+ // Ignore JSON parse errors
20
+ // this can happen if the chunk is not a JSON object. We just pass it through and let the caller handle it.
21
+ }
22
+
23
+ if (
24
+ parsedChunk !== null &&
25
+ typeof parsedChunk === 'object' &&
26
+ 'nextCursor' in parsedChunk &&
27
+ typeof parsedChunk.nextCursor === 'string' &&
28
+ !('_id' in parsedChunk)
29
+ ) {
30
+ streamsInflight++
31
+
32
+ const reqStream = await startStream(options, parsedChunk.nextCursor)
33
+ reqStream.on('end', () => {
34
+ streamsInflight--
35
+ if (streamsInflight === 0) {
36
+ stream.end()
37
+ }
38
+ })
39
+ reqStream.pipe(this, {end: false})
40
+
41
+ callback()
42
+ return
43
+ }
44
+
45
+ callback(null, chunk)
46
+ },
47
+ })
48
+
49
+ streamsInflight++
50
+ const reqStream = await startStream(options, '')
51
+ reqStream.on('end', () => {
52
+ streamsInflight--
53
+ if (streamsInflight === 0) {
54
+ stream.end()
55
+ }
56
+ })
57
+
58
+ reqStream.pipe(stream, {end: false})
59
+ return stream
60
+ }
61
+
62
+ function startStream(options, nextCursor) {
63
+ const url = options.client.getUrl(
64
+ `/data/export/${options.dataset}?nextCursor=${encodeURIComponent(nextCursor)}`,
65
+ )
66
+ const token = options.client.config().token
67
+ const headers = {
68
+ 'User-Agent': `${pkg.name}@${pkg.version}`,
69
+ ...(token ? {Authorization: `Bearer ${token}`} : {}),
70
+ }
71
+
72
+ return requestStream({url, headers, maxRetries: options.maxRetries})
73
+ }
@@ -11,5 +11,10 @@ module.exports = (options) => {
11
11
  ...(token ? {Authorization: `Bearer ${token}`} : {}),
12
12
  }
13
13
 
14
- return requestStream({url, headers, maxRetries: options.maxRetries})
14
+ return requestStream({
15
+ url,
16
+ headers,
17
+ maxRetries: options.maxRetries,
18
+ readTimeout: options.readTimeout,
19
+ })
15
20
  }
@@ -2,13 +2,11 @@ const {getIt} = require('get-it')
2
2
  const {keepAlive, promise} = require('get-it/middleware')
3
3
  const debug = require('./debug')
4
4
  const {extractFirstError} = require('./util/extractFirstError')
5
- const {DOCUMENT_STREAM_MAX_RETRIES} = require('./constants')
5
+ const {DOCUMENT_STREAM_MAX_RETRIES, REQUEST_READ_TIMEOUT} = require('./constants')
6
6
 
7
7
  const request = getIt([keepAlive(), promise({onlyBody: true})])
8
- const socketsWithTimeout = new WeakSet()
9
8
 
10
9
  const CONNECTION_TIMEOUT = 15 * 1000 // 15 seconds
11
- const READ_TIMEOUT = 3 * 60 * 1000 // 3 minutes
12
10
  const RETRY_DELAY_MS = 1500 // 1.5 seconds
13
11
 
14
12
  function delay(ms) {
@@ -20,30 +18,18 @@ module.exports = async (options) => {
20
18
  const maxRetries =
21
19
  typeof options.maxRetries === 'number' ? options.maxRetries : DOCUMENT_STREAM_MAX_RETRIES
22
20
 
21
+ const readTimeout =
22
+ typeof options.readTimeout === 'number' ? options.readTimeout : REQUEST_READ_TIMEOUT
23
+
23
24
  let error
24
25
  for (let i = 0; i < maxRetries; i++) {
25
26
  try {
26
- const response = await request({
27
+ return await request({
27
28
  ...options,
28
29
  stream: true,
29
30
  maxRedirects: 0,
30
- timeout: {connect: CONNECTION_TIMEOUT, socket: READ_TIMEOUT},
31
+ timeout: {connect: CONNECTION_TIMEOUT, socket: readTimeout},
31
32
  })
32
-
33
- if (
34
- response.connection &&
35
- typeof response.connection.setTimeout === 'function' &&
36
- !socketsWithTimeout.has(response.connection)
37
- ) {
38
- socketsWithTimeout.add(response.connection)
39
- response.connection.setTimeout(READ_TIMEOUT, () => {
40
- response.destroy(
41
- new Error(`Export: Read timeout: No data received on socket for ${READ_TIMEOUT} ms`),
42
- )
43
- })
44
- }
45
-
46
- return response
47
33
  } catch (err) {
48
34
  error = extractFirstError(err)
49
35
 
@@ -1,16 +1,24 @@
1
1
  const defaults = require('lodash/defaults')
2
- const {DOCUMENT_STREAM_MAX_RETRIES, ASSET_DOWNLOAD_MAX_RETRIES} = require('./constants')
2
+ const {
3
+ DOCUMENT_STREAM_MAX_RETRIES,
4
+ ASSET_DOWNLOAD_MAX_RETRIES,
5
+ REQUEST_READ_TIMEOUT,
6
+ MODE_STREAM,
7
+ MODE_CURSOR,
8
+ } = require('./constants')
3
9
 
4
10
  const clientMethods = ['getUrl', 'config']
5
11
  const booleanFlags = ['assets', 'raw', 'compress', 'drafts']
6
- const numberFlags = ['maxAssetRetries', 'maxRetries', 'assetConcurrency']
12
+ const numberFlags = ['maxAssetRetries', 'maxRetries', 'assetConcurrency', 'readTimeout']
7
13
  const exportDefaults = {
8
14
  compress: true,
9
15
  drafts: true,
10
16
  assets: true,
11
17
  raw: false,
18
+ mode: MODE_STREAM,
12
19
  maxRetries: DOCUMENT_STREAM_MAX_RETRIES,
13
20
  maxAssetRetries: ASSET_DOWNLOAD_MAX_RETRIES,
21
+ readTimeout: REQUEST_READ_TIMEOUT,
14
22
  }
15
23
 
16
24
  function validateOptions(opts) {
@@ -20,6 +28,15 @@ function validateOptions(opts) {
20
28
  throw new Error(`options.dataset must be a valid dataset name`)
21
29
  }
22
30
 
31
+ if (
32
+ typeof options.mode !== 'string' ||
33
+ (options.mode !== MODE_STREAM && options.mode !== MODE_CURSOR)
34
+ ) {
35
+ throw new Error(
36
+ `options.mode must be either "${MODE_STREAM}" or "${MODE_CURSOR}", got "${options.mode}"`,
37
+ )
38
+ }
39
+
23
40
  if (options.onProgress && typeof options.onProgress !== 'function') {
24
41
  throw new Error(`options.onProgress must be a function`)
25
42
  }