@sanity/export 5.0.1 → 6.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (115) hide show
  1. package/dist/AssetHandler.d.ts +47 -0
  2. package/dist/AssetHandler.d.ts.map +1 -0
  3. package/dist/AssetHandler.js +384 -0
  4. package/dist/AssetHandler.js.map +1 -0
  5. package/dist/constants.d.ts +45 -0
  6. package/dist/constants.d.ts.map +1 -0
  7. package/{src → dist}/constants.js +13 -18
  8. package/dist/constants.js.map +1 -0
  9. package/dist/debug.d.ts +3 -0
  10. package/dist/debug.d.ts.map +1 -0
  11. package/dist/debug.js +3 -0
  12. package/dist/debug.js.map +1 -0
  13. package/dist/export.d.ts +43 -0
  14. package/dist/export.d.ts.map +1 -0
  15. package/dist/export.js +269 -0
  16. package/dist/export.js.map +1 -0
  17. package/dist/filterDocumentTypes.d.ts +3 -0
  18. package/dist/filterDocumentTypes.d.ts.map +1 -0
  19. package/dist/filterDocumentTypes.js +16 -0
  20. package/dist/filterDocumentTypes.js.map +1 -0
  21. package/dist/filterDocuments.d.ts +3 -0
  22. package/dist/filterDocuments.d.ts.map +1 -0
  23. package/dist/filterDocuments.js +36 -0
  24. package/dist/filterDocuments.js.map +1 -0
  25. package/dist/getDocumentCursorStream.d.ts +4 -0
  26. package/dist/getDocumentCursorStream.d.ts.map +1 -0
  27. package/dist/getDocumentCursorStream.js +85 -0
  28. package/dist/getDocumentCursorStream.js.map +1 -0
  29. package/dist/getDocumentsStream.d.ts +5 -0
  30. package/dist/getDocumentsStream.d.ts.map +1 -0
  31. package/dist/getDocumentsStream.js +28 -0
  32. package/dist/getDocumentsStream.js.map +1 -0
  33. package/dist/getUserAgent.d.ts +2 -0
  34. package/dist/getUserAgent.d.ts.map +1 -0
  35. package/dist/getUserAgent.js +12 -0
  36. package/dist/getUserAgent.js.map +1 -0
  37. package/dist/index.d.ts +3 -0
  38. package/dist/index.d.ts.map +1 -0
  39. package/dist/index.js +3 -0
  40. package/dist/index.js.map +1 -0
  41. package/dist/logFirstChunk.d.ts +3 -0
  42. package/dist/logFirstChunk.d.ts.map +1 -0
  43. package/dist/logFirstChunk.js +14 -0
  44. package/dist/logFirstChunk.js.map +1 -0
  45. package/dist/options.d.ts +14 -0
  46. package/dist/options.d.ts.map +1 -0
  47. package/dist/options.js +97 -0
  48. package/dist/options.js.map +1 -0
  49. package/dist/rejectOnApiError.d.ts +3 -0
  50. package/dist/rejectOnApiError.d.ts.map +1 -0
  51. package/dist/rejectOnApiError.js +35 -0
  52. package/dist/rejectOnApiError.js.map +1 -0
  53. package/dist/requestStream.d.ts +3 -0
  54. package/dist/requestStream.d.ts.map +1 -0
  55. package/dist/requestStream.js +48 -0
  56. package/dist/requestStream.js.map +1 -0
  57. package/dist/stringifyStream.d.ts +3 -0
  58. package/dist/stringifyStream.d.ts.map +1 -0
  59. package/dist/stringifyStream.js +5 -0
  60. package/dist/stringifyStream.js.map +1 -0
  61. package/dist/tryParseJson.d.ts +10 -0
  62. package/dist/tryParseJson.d.ts.map +1 -0
  63. package/dist/tryParseJson.js +36 -0
  64. package/dist/tryParseJson.js.map +1 -0
  65. package/dist/types.d.ts +241 -0
  66. package/dist/types.d.ts.map +1 -0
  67. package/dist/types.js +2 -0
  68. package/dist/types.js.map +1 -0
  69. package/dist/util/delay.d.ts +2 -0
  70. package/dist/util/delay.d.ts.map +1 -0
  71. package/dist/util/delay.js +4 -0
  72. package/dist/util/delay.js.map +1 -0
  73. package/dist/util/extractFirstError.d.ts +2 -0
  74. package/dist/util/extractFirstError.d.ts.map +1 -0
  75. package/dist/util/extractFirstError.js +22 -0
  76. package/dist/util/extractFirstError.js.map +1 -0
  77. package/dist/util/friendlyError.d.ts +2 -0
  78. package/dist/util/friendlyError.d.ts.map +1 -0
  79. package/dist/util/friendlyError.js +49 -0
  80. package/dist/util/friendlyError.js.map +1 -0
  81. package/dist/util/streamHelpers.d.ts +10 -0
  82. package/dist/util/streamHelpers.d.ts.map +1 -0
  83. package/dist/util/streamHelpers.js +99 -0
  84. package/dist/util/streamHelpers.js.map +1 -0
  85. package/package.json +22 -7
  86. package/src/{AssetHandler.js → AssetHandler.ts} +174 -99
  87. package/src/constants.ts +50 -0
  88. package/src/debug.ts +3 -0
  89. package/src/{export.js → export.ts} +110 -70
  90. package/src/filterDocumentTypes.ts +21 -0
  91. package/src/filterDocuments.ts +55 -0
  92. package/src/{getDocumentCursorStream.js → getDocumentCursorStream.ts} +37 -18
  93. package/src/{getDocumentsStream.js → getDocumentsStream.ts} +16 -6
  94. package/src/{getUserAgent.js → getUserAgent.ts} +8 -3
  95. package/src/index.ts +11 -0
  96. package/src/{logFirstChunk.js → logFirstChunk.ts} +6 -4
  97. package/src/options.ts +138 -0
  98. package/src/rejectOnApiError.ts +62 -0
  99. package/src/requestStream.ts +81 -0
  100. package/src/stringifyStream.ts +7 -0
  101. package/src/{tryParseJson.js → tryParseJson.ts} +29 -17
  102. package/src/types.ts +274 -0
  103. package/src/util/{delay.js → delay.ts} +1 -1
  104. package/src/util/extractFirstError.ts +31 -0
  105. package/src/util/friendlyError.ts +75 -0
  106. package/src/util/{streamHelpers.js → streamHelpers.ts} +35 -18
  107. package/src/debug.js +0 -3
  108. package/src/filterDocumentTypes.js +0 -18
  109. package/src/filterDocuments.js +0 -33
  110. package/src/rejectOnApiError.js +0 -31
  111. package/src/requestStream.js +0 -64
  112. package/src/stringifyStream.js +0 -5
  113. package/src/util/extractFirstError.js +0 -14
  114. package/src/util/friendlyError.js +0 -58
  115. package/src/validateOptions.js +0 -113
@@ -1,18 +1,18 @@
1
1
  import {createWriteStream} from 'node:fs'
2
- import {mkdir} from 'node:fs/promises'
2
+ import {mkdir, rm} from 'node:fs/promises'
3
3
  import {tmpdir} from 'node:os'
4
4
  import {join as joinPath} from 'node:path'
5
- import {PassThrough} from 'node:stream'
5
+ import {PassThrough, type Writable} from 'node:stream'
6
6
  import {finished, pipeline} from 'node:stream/promises'
7
7
  import {deprecate} from 'node:util'
8
8
  import {constants as zlib} from 'node:zlib'
9
9
 
10
10
  import archiver from 'archiver'
11
11
  import {JsonStreamStringify} from 'json-stream-stringify'
12
- import {rimraf} from 'rimraf'
13
12
 
13
+ import {isWritableStream, split, throughObj} from './util/streamHelpers.js'
14
14
  import {AssetHandler} from './AssetHandler.js'
15
- import {DOCUMENT_STREAM_DEBUG_INTERVAL, MODE_CURSOR, MODE_STREAM} from './constants.js'
15
+ import {DOCUMENT_STREAM_DEBUG_INTERVAL, MODE_STREAM} from './constants.js'
16
16
  import {debug} from './debug.js'
17
17
  import {filterDocuments} from './filterDocuments.js'
18
18
  import {filterDocumentTypes} from './filterDocumentTypes.js'
@@ -22,14 +22,34 @@ import {logFirstChunk} from './logFirstChunk.js'
22
22
  import {rejectOnApiError} from './rejectOnApiError.js'
23
23
  import {stringifyStream} from './stringifyStream.js'
24
24
  import {tryParseJson} from './tryParseJson.js'
25
- import {isWritableStream, split, throughObj} from './util/streamHelpers.js'
26
- import {validateOptions} from './validateOptions.js'
25
+ import type {
26
+ ExportOptions,
27
+ NormalizedExportOptions,
28
+ ExportResult,
29
+ ResponseStream,
30
+ SanityDocument,
31
+ } from './types.js'
32
+ import {getSource, validateOptions} from './options.js'
27
33
 
28
- const noop = () => null
34
+ const noop = (): null => null
29
35
 
30
- export async function exportDataset(opts) {
36
+ /**
37
+ * Export the dataset with the given options.
38
+ *
39
+ * @param opts - Export options
40
+ * @returns The export result
41
+ * @public
42
+ */
43
+ export async function exportDataset(
44
+ opts: ExportOptions & {outputPath: Writable},
45
+ ): Promise<ExportResult<Writable>>
46
+ export async function exportDataset(
47
+ opts: ExportOptions & {outputPath: string},
48
+ ): Promise<ExportResult<string>>
49
+ export async function exportDataset(opts: ExportOptions): Promise<ExportResult>
50
+ export async function exportDataset(opts: ExportOptions): Promise<ExportResult> {
31
51
  const options = validateOptions(opts)
32
- const onProgress = options.onProgress || noop
52
+ const onProgress = options.onProgress ?? noop
33
53
  const archive = archiver('tar', {
34
54
  gzip: true,
35
55
  gzipOptions: {
@@ -48,66 +68,70 @@ export async function exportDataset(opts) {
48
68
  .replace(/[^a-z0-9]/gi, '-')
49
69
  .toLowerCase()
50
70
 
51
- const prefix = `${opts.dataset ?? opts.mediaLibraryId}-export-${slugDate}`
71
+ const source = getSource(opts)
72
+ const prefix = `${source.id}-export-${slugDate}`
52
73
  const tmpDir = joinPath(tmpdir(), prefix)
53
74
  await mkdir(tmpDir, {recursive: true})
54
75
  const dataPath = joinPath(tmpDir, 'data.ndjson')
55
76
  const assetsPath = joinPath(tmpDir, 'assets.json')
56
77
 
57
78
  const cleanup = () =>
58
- rimraf(tmpDir).catch((err) => {
59
- debug(`Error while cleaning up temporary files: ${err.message}`)
79
+ rm(tmpDir, {recursive: true, force: true}).catch((err: unknown) => {
80
+ debug(`Error while cleaning up temporary files: ${err instanceof Error ? err.message : err}`)
81
+ return false
60
82
  })
61
83
 
62
84
  const assetHandler = new AssetHandler({
63
85
  client: options.client,
64
86
  tmpDir,
65
87
  prefix,
66
- concurrency: options.assetConcurrency,
88
+ ...(options.assetConcurrency !== undefined && {concurrency: options.assetConcurrency}),
89
+ ...(options.retryDelayMs !== undefined && {retryDelayMs: options.retryDelayMs}),
67
90
  maxRetries: options.maxAssetRetries,
68
- retryDelayMs: options.retryDelayMs,
69
91
  })
70
92
 
71
93
  debug('Downloading assets (temporarily) to %s', tmpDir)
72
- debug('Downloading to %s', options.outputPath === '-' ? 'stdout' : options.outputPath)
73
-
74
- let outputStream
75
- if (isWritableStream(options.outputPath)) {
76
- outputStream = options.outputPath
77
- } else {
78
- outputStream =
79
- options.outputPath === '-' ? process.stdout : createWriteStream(options.outputPath)
80
- }
94
+ debug('Downloading to %s', isWritableStream(options.outputPath) ? 'stream' : options.outputPath)
95
+
96
+ const outputStream: Writable = isWritableStream(options.outputPath)
97
+ ? options.outputPath
98
+ : createWriteStream(options.outputPath)
81
99
 
82
100
  let assetStreamHandler = assetHandler.noop
83
101
  if (!options.raw) {
84
102
  assetStreamHandler = options.assets ? assetHandler.rewriteAssets : assetHandler.stripAssets
85
103
  }
86
104
 
87
- let resolve
88
- let reject
89
- const result = new Promise((res, rej) => {
105
+ let resolve: (value: ExportResult) => void
106
+ let reject: (reason: Error) => void
107
+ const result = new Promise<ExportResult>((res, rej) => {
90
108
  resolve = res
91
109
  reject = rej
92
110
  })
93
111
 
94
112
  finished(archive)
95
- .then(async () => {
113
+ .then(() => {
96
114
  debug('Archive finished')
97
115
  })
98
- .catch(async (archiveErr) => {
99
- debug('Archiving errored: %s', archiveErr.stack)
100
- await cleanup()
101
- reject(archiveErr)
116
+ .catch(async (archiveErr: unknown) => {
117
+ const err = archiveErr instanceof Error ? archiveErr : new Error(`${archiveErr}`)
118
+ debug('Archiving errored: %s', err.stack)
119
+ // Try cleanup, but let original error be the main rejection reason, not the cleanup
120
+ await cleanup().catch(noop)
121
+ reject(err)
102
122
  })
103
123
 
104
124
  debug('Getting dataset export stream, mode: "%s"', options.mode)
105
125
  onProgress({step: 'Exporting documents...'})
106
126
 
107
127
  let documentCount = 0
108
- let lastDocumentID = null
128
+ let lastDocumentID: string | null = null
109
129
  let lastReported = Date.now()
110
- const reportDocumentCount = (doc, enc, cb) => {
130
+ const reportDocumentCount = (
131
+ doc: SanityDocument,
132
+ _enc: BufferEncoding,
133
+ cb: (err: Error | null, doc: SanityDocument) => void,
134
+ ): void => {
111
135
  ++documentCount
112
136
 
113
137
  const now = Date.now()
@@ -130,15 +154,15 @@ export async function exportDataset(opts) {
130
154
  }
131
155
 
132
156
  const inputStream = await getDocumentInputStream(options)
133
- if (inputStream.statusCode) {
157
+ if ('statusCode' in inputStream) {
134
158
  debug('Got HTTP %d', inputStream.statusCode)
135
159
  }
136
- if (inputStream.headers) {
160
+ if ('headers' in inputStream) {
137
161
  debug('Response headers: %o', inputStream.headers)
138
162
  }
139
163
 
140
- let debugTimer = null
141
- function scheduleDebugTimer() {
164
+ let debugTimer: ReturnType<typeof setTimeout> | null = null
165
+ function scheduleDebugTimer(): void {
142
166
  debugTimer = setTimeout(() => {
143
167
  debug('Still streaming documents', {
144
168
  documentCount,
@@ -152,28 +176,24 @@ export async function exportDataset(opts) {
152
176
 
153
177
  scheduleDebugTimer()
154
178
 
155
- const filterTransform = throughObj((doc, _enc, callback) => {
156
- if (!options.filterDocument) {
157
- return callback(null, doc)
158
- }
159
-
179
+ const filterTransform = throughObj((doc: SanityDocument, _enc: BufferEncoding, callback) => {
160
180
  try {
161
181
  const include = options.filterDocument(doc)
162
- return include ? callback(null, doc) : callback()
182
+ if (include) {
183
+ callback(null, doc)
184
+ } else {
185
+ callback()
186
+ }
163
187
  } catch (err) {
164
- return callback(err)
188
+ callback(err instanceof Error ? err : new Error(`${err}`))
165
189
  }
166
190
  })
167
191
 
168
- const transformTransform = throughObj((doc, _enc, callback) => {
169
- if (!options.transformDocument) {
170
- return callback(null, doc)
171
- }
172
-
192
+ const transformTransform = throughObj((doc: SanityDocument, _enc: BufferEncoding, callback) => {
173
193
  try {
174
- return callback(null, options.transformDocument(doc))
194
+ callback(null, options.transformDocument(doc))
175
195
  } catch (err) {
176
- return callback(err)
196
+ callback(err instanceof Error ? err : new Error(`${err}`))
177
197
  }
178
198
  })
179
199
 
@@ -183,7 +203,7 @@ export async function exportDataset(opts) {
183
203
  const jsonStream = new PassThrough()
184
204
  finished(jsonStream)
185
205
  .then(() => debug('JSON stream finished'))
186
- .catch((err) => reject(err))
206
+ .catch((err: unknown) => reject(err instanceof Error ? err : new Error(`${err}`)))
187
207
 
188
208
  pipeline(
189
209
  inputStream,
@@ -198,10 +218,10 @@ export async function exportDataset(opts) {
198
218
  reportTransform,
199
219
  stringifyStream(),
200
220
  jsonStream,
201
- ).catch((err) => {
221
+ ).catch((err: unknown) => {
202
222
  if (debugTimer !== null) clearTimeout(debugTimer)
203
223
  debug(`Export stream error @ ${lastDocumentID}/${documentCount}: `, err)
204
- reject(err)
224
+ reject(err instanceof Error ? err : new Error(`${err}`))
205
225
  })
206
226
 
207
227
  pipeline(jsonStream, createWriteStream(dataPath))
@@ -263,30 +283,31 @@ export async function exportDataset(opts) {
263
283
  clearInterval(progressInterval)
264
284
  } catch (assetErr) {
265
285
  clearInterval(progressInterval)
266
- await cleanup()
267
- reject(assetErr)
286
+ await cleanup().catch(noop) // Try to clean up, but ignore errors here
287
+ reject(assetErr instanceof Error ? assetErr : new Error(`${assetErr}`))
268
288
  return
269
289
  }
270
290
 
271
291
  // Add all downloaded assets to archive
272
- archive.directory(joinPath(tmpDir, 'files'), `${prefix}/files`, {store: true})
273
- archive.directory(joinPath(tmpDir, 'images'), `${prefix}/images`, {store: true})
292
+ archive.directory(joinPath(tmpDir, 'files'), `${prefix}/files`)
293
+ archive.directory(joinPath(tmpDir, 'images'), `${prefix}/images`)
274
294
 
275
295
  debug('Finalizing archive, flushing streams')
276
296
  onProgress({step: 'Adding assets to archive...'})
277
297
  await archive.finalize()
278
298
  })
279
- .catch(async (err) => {
299
+ .catch(async (err: unknown) => {
280
300
  if (debugTimer !== null) clearTimeout(debugTimer)
281
301
  debug(`Export stream error @ ${lastDocumentID}/${documentCount}: `, err)
282
- reject(err)
302
+ await cleanup().catch(noop)
303
+ reject(err instanceof Error ? err : new Error(`${err}`))
283
304
  })
284
305
 
285
306
  pipeline(archive, outputStream)
286
307
  .then(() => onComplete())
287
308
  .catch(onComplete)
288
309
 
289
- async function onComplete(err) {
310
+ async function onComplete(err?: Error): Promise<void> {
290
311
  onProgress({step: 'Clearing temporary files...'})
291
312
  await cleanup()
292
313
 
@@ -308,15 +329,34 @@ export async function exportDataset(opts) {
308
329
  return result
309
330
  }
310
331
 
311
- function getDocumentInputStream(options) {
312
- if (options.mode === MODE_STREAM) {
313
- return getDocumentsStream(options)
314
- }
315
- if (options.mode === MODE_CURSOR) {
316
- return getDocumentCursorStream(options)
317
- }
332
+ function getDocumentInputStream(options: NormalizedExportOptions): Promise<ResponseStream> {
333
+ return options.mode === MODE_STREAM
334
+ ? getDocumentsStream(options)
335
+ : getDocumentCursorStream(options)
336
+ }
318
337
 
319
- throw new Error(`Invalid mode: ${options.mode}`)
338
+ type MediaLibraryExportOptions = Omit<ExportOptions, 'dataset' | 'mediaLibraryId'> & {
339
+ mediaLibraryId: string
340
+ }
341
+
342
+ /**
343
+ * Export the media library with the given `mediaLibraryId`.
344
+ *
345
+ * @param options - Export options
346
+ * @returns The export result
347
+ * @public
348
+ */
349
+ export async function exportMediaLibrary(
350
+ options: MediaLibraryExportOptions & {outputPath: Writable},
351
+ ): Promise<ExportResult<Writable>>
352
+ export async function exportMediaLibrary(
353
+ options: MediaLibraryExportOptions & {outputPath: string},
354
+ ): Promise<ExportResult<string>>
355
+ export async function exportMediaLibrary(options: MediaLibraryExportOptions): Promise<ExportResult>
356
+ export async function exportMediaLibrary(
357
+ options: MediaLibraryExportOptions,
358
+ ): Promise<ExportResult> {
359
+ return exportDataset(options as ExportOptions)
320
360
  }
321
361
 
322
362
  /**
@@ -327,7 +367,7 @@ function getDocumentInputStream(options) {
327
367
  * @public
328
368
  */
329
369
  export default deprecate(
330
- function deprecatedExport(opts) {
370
+ function deprecatedExport(opts: ExportOptions): Promise<ExportResult> {
331
371
  return exportDataset(opts)
332
372
  },
333
373
  `Default export of "@sanity/export" is deprecated and will be removed in a future release. Please use the named "exportDataset" function instead.`,
@@ -0,0 +1,21 @@
1
+ import type {Transform} from 'node:stream'
2
+
3
+ import {throughObj} from './util/streamHelpers.js'
4
+ import type {SanityDocument} from './types.js'
5
+
6
+ export function filterDocumentTypes(allowedTypes: string[] | undefined): Transform {
7
+ if (!allowedTypes || allowedTypes.length === 0) {
8
+ // Pass-through
9
+ return throughObj((doc: SanityDocument, _enc, callback) => callback(null, doc))
10
+ }
11
+
12
+ return throughObj(function docTypesFilter(doc: SanityDocument, _enc, callback) {
13
+ const type = doc._type
14
+ if (allowedTypes.includes(type)) {
15
+ callback(null, doc)
16
+ return
17
+ }
18
+
19
+ callback()
20
+ })
21
+ }
@@ -0,0 +1,55 @@
1
+ import type {Transform} from 'node:stream'
2
+
3
+ import {throughObj} from './util/streamHelpers.js'
4
+ import {debug} from './debug.js'
5
+ import type {SanityDocument} from './types.js'
6
+
7
+ interface CursorDocument {
8
+ nextCursor?: string
9
+ }
10
+
11
+ const isDraftOrVersion = (doc: SanityDocument): boolean =>
12
+ Boolean(doc._id && (doc._id.indexOf('drafts.') === 0 || doc._id.indexOf('versions.') === 0))
13
+
14
+ const isSystemDocument = (doc: SanityDocument): boolean =>
15
+ Boolean(doc._id && doc._id.indexOf('_.') === 0)
16
+
17
+ const isReleaseDocument = (doc: SanityDocument): boolean =>
18
+ Boolean(doc._id && doc._id.indexOf('_.releases.') === 0)
19
+
20
+ const isCursor = (doc: unknown): doc is CursorDocument =>
21
+ typeof doc === 'object' &&
22
+ doc !== null &&
23
+ !('_id' in doc) &&
24
+ 'nextCursor' in doc &&
25
+ (doc as CursorDocument).nextCursor !== undefined
26
+
27
+ export function filterDocuments(drafts: boolean): Transform {
28
+ return throughObj(function filterDocs(doc: SanityDocument | CursorDocument, _enc, callback) {
29
+ if (isCursor(doc)) {
30
+ debug('%o is a cursor, skipping', doc)
31
+ callback()
32
+ return
33
+ }
34
+
35
+ const sanityDoc = doc
36
+
37
+ if (!drafts && isDraftOrVersion(sanityDoc)) {
38
+ debug('%s is a draft or version, skipping', sanityDoc._id)
39
+ callback()
40
+ return
41
+ }
42
+
43
+ if (isSystemDocument(sanityDoc)) {
44
+ if (drafts && isReleaseDocument(sanityDoc)) {
45
+ callback(null, sanityDoc)
46
+ return
47
+ }
48
+ debug('%s is a system document, skipping', sanityDoc._id)
49
+ callback()
50
+ return
51
+ }
52
+
53
+ callback(null, sanityDoc)
54
+ })
55
+ }
@@ -1,15 +1,24 @@
1
- import {Transform} from 'node:stream'
1
+ import {Transform, type TransformCallback} from 'node:stream'
2
2
 
3
3
  import {debug} from './debug.js'
4
4
  import {getUserAgent} from './getUserAgent.js'
5
5
  import {requestStream} from './requestStream.js'
6
+ import type {NormalizedExportOptions, ResponseStream} from './types.js'
7
+ import {getSource} from './options.js'
6
8
 
7
9
  // same regex as split2 is using by default: https://github.com/mcollina/split2/blob/53432f54bd5bf422bd55d91d38f898b6c9496fc1/index.js#L86
8
10
  const splitRegex = /\r?\n/
9
11
 
10
- export async function getDocumentCursorStream(options) {
12
+ interface CursorChunk {
13
+ nextCursor?: string
14
+ _id?: string
15
+ }
16
+
17
+ export async function getDocumentCursorStream(
18
+ options: NormalizedExportOptions,
19
+ ): Promise<Transform> {
11
20
  let streamsInflight = 0
12
- function decrementInflight(stream) {
21
+ function decrementInflight(stream: Transform): void {
13
22
  streamsInflight--
14
23
  if (streamsInflight === 0) {
15
24
  stream.end()
@@ -17,25 +26,30 @@ export async function getDocumentCursorStream(options) {
17
26
  }
18
27
 
19
28
  const stream = new Transform({
20
- async transform(chunk, encoding, callback) {
21
- if (encoding !== 'buffer' && encoding !== 'string') {
29
+ transform(
30
+ this: Transform,
31
+ chunk: Buffer,
32
+ encoding: BufferEncoding,
33
+ callback: TransformCallback,
34
+ ) {
35
+ if (encoding !== ('buffer' as BufferEncoding) && encoding !== ('string' as BufferEncoding)) {
22
36
  callback(null, chunk)
23
37
  return
24
38
  }
25
39
  this.push(chunk, encoding)
26
40
 
27
- let parsedChunk = null
41
+ let parsedChunk: CursorChunk | null = null
28
42
  for (const chunkStr of chunk.toString().split(splitRegex)) {
29
43
  if (chunkStr.trim() === '') {
30
44
  continue
31
45
  }
32
46
 
33
47
  try {
34
- parsedChunk = JSON.parse(chunkStr)
35
- } catch (err) {
48
+ parsedChunk = JSON.parse(chunkStr) as CursorChunk
49
+ } catch {
36
50
  // Ignore JSON parse errors
37
51
  // this can happen if the chunk is not a JSON object. We just pass it through and let the caller handle it.
38
- debug('Failed to parse JSON chunk, ignoring', err, chunkStr)
52
+ debug('Failed to parse JSON chunk, ignoring', chunkStr)
39
53
  }
40
54
 
41
55
  if (
@@ -48,9 +62,10 @@ export async function getDocumentCursorStream(options) {
48
62
  debug('Got next cursor "%s", fetching next stream', parsedChunk.nextCursor)
49
63
  streamsInflight++
50
64
 
51
- const reqStream = await startStream(options, parsedChunk.nextCursor)
52
- reqStream.on('end', () => decrementInflight(this))
53
- reqStream.pipe(this, {end: false})
65
+ void startStream(options, parsedChunk.nextCursor).then((reqStream) => {
66
+ reqStream.on('end', () => decrementInflight(this))
67
+ reqStream.pipe(this, {end: false})
68
+ })
54
69
  }
55
70
  }
56
71
 
@@ -65,11 +80,15 @@ export async function getDocumentCursorStream(options) {
65
80
  return stream
66
81
  }
67
82
 
68
- function startStream(options, nextCursor) {
83
+ function startStream(
84
+ options: NormalizedExportOptions,
85
+ nextCursor: string,
86
+ ): Promise<ResponseStream> {
87
+ const source = getSource(options)
69
88
  const baseUrl = options.client.getUrl(
70
- options.dataset
71
- ? `/data/export/${options.dataset}`
72
- : `/media-libraries/${options.mediaLibraryId}/export`,
89
+ source.type === 'dataset'
90
+ ? `/data/export/${source.id}`
91
+ : `/media-libraries/${source.id}/export`,
73
92
  )
74
93
 
75
94
  const url = new URL(baseUrl)
@@ -79,7 +98,7 @@ function startStream(options, nextCursor) {
79
98
  url.searchParams.set('types', options.types.join())
80
99
  }
81
100
  const token = options.client.config().token
82
- const headers = {
101
+ const headers: Record<string, string> = {
83
102
  'User-Agent': getUserAgent(),
84
103
  ...(token ? {Authorization: `Bearer ${token}`} : {}),
85
104
  }
@@ -90,7 +109,7 @@ function startStream(options, nextCursor) {
90
109
  url: url.toString(),
91
110
  headers,
92
111
  maxRetries: options.maxRetries,
93
- retryDelayMs: options.retryDelayMs,
112
+ ...(options.retryDelayMs !== undefined ? {retryDelayMs: options.retryDelayMs} : {}),
94
113
  readTimeout: options.readTimeout,
95
114
  }).then((res) => {
96
115
  debug('Got stream with HTTP %d', res.statusCode)
@@ -1,13 +1,23 @@
1
1
  import {getUserAgent} from './getUserAgent.js'
2
+ import {getSource} from './options.js'
2
3
  import {requestStream} from './requestStream.js'
4
+ import type {ExportSource, NormalizedExportOptions, ResponseStream} from './types.js'
3
5
 
4
- export function getDocumentsStream(options) {
6
+ type GetDocumentStreamOptions = Partial<NormalizedExportOptions> &
7
+ Pick<
8
+ NormalizedExportOptions,
9
+ 'client' | 'types' | 'maxRetries' | 'retryDelayMs' | 'readTimeout'
10
+ > &
11
+ ExportSource
12
+
13
+ export function getDocumentsStream(options: GetDocumentStreamOptions): Promise<ResponseStream> {
5
14
  // Sanity client doesn't handle streams natively since we want to support node/browser
6
15
  // with same API. We're just using it here to get hold of URLs and tokens.
16
+ const source = getSource(options)
7
17
  const baseUrl = options.client.getUrl(
8
- options.dataset
9
- ? `/data/export/${options.dataset}`
10
- : `/media-libraries/${options.mediaLibraryId}/export`,
18
+ source.type === 'dataset'
19
+ ? `/data/export/${source.id}`
20
+ : `/media-libraries/${source.id}/export`,
11
21
  )
12
22
 
13
23
  const url = new URL(baseUrl)
@@ -16,7 +26,7 @@ export function getDocumentsStream(options) {
16
26
  }
17
27
 
18
28
  const token = options.client.config().token
19
- const headers = {
29
+ const headers: Record<string, string> = {
20
30
  'User-Agent': getUserAgent(),
21
31
  ...(token ? {Authorization: `Bearer ${token}`} : {}),
22
32
  }
@@ -25,7 +35,7 @@ export function getDocumentsStream(options) {
25
35
  url: url.toString(),
26
36
  headers,
27
37
  maxRetries: options.maxRetries,
28
- retryDelayMs: options.retryDelayMs,
38
+ ...(options.retryDelayMs !== undefined ? {retryDelayMs: options.retryDelayMs} : {}),
29
39
  readTimeout: options.readTimeout,
30
40
  })
31
41
  }
@@ -1,12 +1,17 @@
1
1
  import {readFileSync} from 'node:fs'
2
2
  import {join as joinPath} from 'node:path'
3
3
 
4
- let ua = null
4
+ interface PackageJson {
5
+ name: string
6
+ version: string
7
+ }
8
+
9
+ let ua: string | null = null
5
10
 
6
- export function getUserAgent() {
11
+ export function getUserAgent(): string {
7
12
  if (!ua) {
8
13
  const data = readFileSync(joinPath(import.meta.dirname, '..', 'package.json'), 'utf-8')
9
- const pkg = JSON.parse(data)
14
+ const pkg = JSON.parse(data) as PackageJson
10
15
  ua = `${pkg.name}@${pkg.version}`
11
16
  }
12
17
 
package/src/index.ts ADDED
@@ -0,0 +1,11 @@
1
+ // eslint-disable-next-line @typescript-eslint/no-deprecated
2
+ export {exportDataset, exportMediaLibrary, default} from './export.js'
3
+ export type {
4
+ ExportMode,
5
+ ExportOptions,
6
+ ExportResult,
7
+ SanityClientLike,
8
+ SanityDocument,
9
+ ExportProgress,
10
+ ExportSource,
11
+ } from './types.js'
@@ -1,12 +1,14 @@
1
- import {debug} from './debug.js'
1
+ import type {Transform} from 'node:stream'
2
+
2
3
  import {through} from './util/streamHelpers.js'
4
+ import {debug} from './debug.js'
3
5
 
4
- export function logFirstChunk() {
6
+ export function logFirstChunk(): Transform {
5
7
  let firstChunk = true
6
- return through((chunk, enc, callback) => {
8
+ return through((chunk, _enc, callback) => {
7
9
  if (firstChunk) {
8
10
  const string = chunk.toString('utf8').split('\n')[0]
9
- debug('First chunk received: %s', string.slice(0, 300))
11
+ debug('First chunk received: %s', string?.slice(0, 300))
10
12
  firstChunk = false
11
13
  }
12
14