@sanity/export 0.136.3-gql-rtb.372 → 0.136.3-purple-unicorn-patch.5627
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/README.md +8 -5
- package/lib/AssetHandler.js +213 -178
- package/lib/export.js +145 -140
- package/lib/filterDrafts.js +1 -1
- package/lib/filterSystemDocuments.js +1 -1
- package/lib/getDocumentsStream.js +6 -3
- package/lib/logFirstChunk.js +18 -0
- package/lib/rejectOnApiError.js +6 -1
- package/lib/requestStream.js +60 -4
- package/lib/stringifyStream.js +1 -1
- package/lib/tryParseJson.js +25 -0
- package/lib/validateOptions.js +8 -4
- package/package.json +18 -17
- package/src/AssetHandler.js +168 -52
- package/src/export.js +59 -22
- package/src/filterDocumentTypes.js +1 -1
- package/src/filterDrafts.js +8 -7
- package/src/filterSystemDocuments.js +9 -8
- package/src/getDocumentsStream.js +3 -2
- package/src/logFirstChunk.js +15 -0
- package/src/rejectOnApiError.js +13 -7
- package/src/requestStream.js +54 -5
- package/src/stringifyStream.js +2 -3
- package/src/tryParseJson.js +21 -0
- package/src/validateOptions.js +7 -3
package/lib/export.js
CHANGED
|
@@ -1,9 +1,5 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
|
|
4
|
-
|
|
5
|
-
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
|
|
6
|
-
|
|
7
3
|
const os = require('os');
|
|
8
4
|
|
|
9
5
|
const path = require('path');
|
|
@@ -36,6 +32,10 @@ const filterDocumentTypes = require('./filterDocumentTypes');
|
|
|
36
32
|
|
|
37
33
|
const filterDrafts = require('./filterDrafts');
|
|
38
34
|
|
|
35
|
+
const logFirstChunk = require('./logFirstChunk');
|
|
36
|
+
|
|
37
|
+
const tryParseJson = require('./tryParseJson');
|
|
38
|
+
|
|
39
39
|
const noop = () => null;
|
|
40
40
|
|
|
41
41
|
function exportDataset(opts) {
|
|
@@ -48,168 +48,173 @@ function exportDataset(opts) {
|
|
|
48
48
|
}
|
|
49
49
|
});
|
|
50
50
|
const slugDate = new Date().toISOString().replace(/[^a-z0-9]/gi, '-').toLowerCase();
|
|
51
|
-
const prefix =
|
|
51
|
+
const prefix = "".concat(opts.dataset, "-export-").concat(slugDate);
|
|
52
52
|
const tmpDir = path.join(os.tmpdir(), prefix);
|
|
53
|
+
|
|
54
|
+
const cleanup = () => fse.remove(tmpDir).catch(err => {
|
|
55
|
+
debug("Error while cleaning up temporary files: ".concat(err.message));
|
|
56
|
+
});
|
|
57
|
+
|
|
53
58
|
const assetHandler = new AssetHandler({
|
|
54
59
|
client: options.client,
|
|
55
60
|
tmpDir,
|
|
56
|
-
prefix
|
|
61
|
+
prefix,
|
|
62
|
+
concurrency: options.assetConcurrency
|
|
57
63
|
});
|
|
58
64
|
debug('Outputting assets (temporarily) to %s', tmpDir);
|
|
59
65
|
debug('Outputting to %s', options.outputPath === '-' ? 'stdout' : options.outputPath);
|
|
60
|
-
|
|
66
|
+
let outputStream;
|
|
67
|
+
|
|
68
|
+
if (isWritableStream(options.outputPath)) {
|
|
69
|
+
outputStream = options.outputPath;
|
|
70
|
+
} else {
|
|
71
|
+
outputStream = options.outputPath === '-' ? process.stdout : fse.createWriteStream(options.outputPath);
|
|
72
|
+
}
|
|
73
|
+
|
|
61
74
|
let assetStreamHandler = assetHandler.noop;
|
|
62
75
|
|
|
63
76
|
if (!options.raw) {
|
|
64
77
|
assetStreamHandler = options.assets ? assetHandler.rewriteAssets : assetHandler.stripAssets;
|
|
65
78
|
}
|
|
66
79
|
|
|
67
|
-
return new Promise(
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
80
|
+
return new Promise(async (resolve, reject) => {
|
|
81
|
+
miss.finished(archive, async archiveErr => {
|
|
82
|
+
if (archiveErr) {
|
|
83
|
+
debug('Archiving errored! %s', archiveErr.stack);
|
|
84
|
+
await cleanup();
|
|
85
|
+
reject(archiveErr);
|
|
86
|
+
return;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
debug('Archive finished!');
|
|
90
|
+
});
|
|
91
|
+
debug('Getting dataset export stream');
|
|
92
|
+
onProgress({
|
|
93
|
+
step: 'Exporting documents...'
|
|
94
|
+
});
|
|
95
|
+
let documentCount = 0;
|
|
96
|
+
let lastReported = Date.now();
|
|
97
|
+
|
|
98
|
+
const reportDocumentCount = (chunk, enc, cb) => {
|
|
99
|
+
++documentCount;
|
|
100
|
+
const now = Date.now();
|
|
101
|
+
|
|
102
|
+
if (now - lastReported > 50) {
|
|
103
|
+
onProgress({
|
|
104
|
+
step: 'Exporting documents...',
|
|
105
|
+
current: documentCount,
|
|
106
|
+
total: '?',
|
|
107
|
+
update: true
|
|
108
|
+
});
|
|
109
|
+
lastReported = now;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
cb(null, chunk);
|
|
113
|
+
};
|
|
114
|
+
|
|
115
|
+
const inputStream = await getDocumentsStream(options.client, options.dataset);
|
|
116
|
+
debug('Got HTTP %d', inputStream.statusCode);
|
|
117
|
+
debug('Response headers: %o', inputStream.headers);
|
|
118
|
+
const jsonStream = miss.pipeline(inputStream, logFirstChunk(), split(tryParseJson), rejectOnApiError(), filterSystemDocuments(), assetStreamHandler, filterDocumentTypes(options.types), options.drafts ? miss.through.obj() : filterDrafts(), stringifyStream(), miss.through(reportDocumentCount));
|
|
119
|
+
miss.finished(jsonStream, async err => {
|
|
120
|
+
if (err) {
|
|
121
|
+
return;
|
|
122
|
+
}
|
|
77
123
|
|
|
78
|
-
debug('Archive finished!');
|
|
79
|
-
});
|
|
80
|
-
debug('Getting dataset export stream');
|
|
81
124
|
onProgress({
|
|
82
|
-
step: 'Exporting documents...'
|
|
125
|
+
step: 'Exporting documents...',
|
|
126
|
+
current: documentCount,
|
|
127
|
+
total: documentCount,
|
|
128
|
+
update: true
|
|
83
129
|
});
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
});
|
|
98
|
-
lastReported = now;
|
|
130
|
+
|
|
131
|
+
if (!options.raw && options.assets) {
|
|
132
|
+
onProgress({
|
|
133
|
+
step: 'Downloading assets...'
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
let prevCompleted = 0;
|
|
138
|
+
const progressInterval = setInterval(() => {
|
|
139
|
+
const completed = assetHandler.queueSize - assetHandler.queue.size - assetHandler.queue.pending;
|
|
140
|
+
|
|
141
|
+
if (prevCompleted === completed) {
|
|
142
|
+
return;
|
|
99
143
|
}
|
|
100
144
|
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
step: 'Downloading assets...'
|
|
124
|
-
});
|
|
125
|
-
}
|
|
126
|
-
|
|
127
|
-
let prevCompleted = 0;
|
|
128
|
-
const progressInterval = setInterval(() => {
|
|
129
|
-
const completed = assetHandler.queueSize - assetHandler.queue.size;
|
|
130
|
-
|
|
131
|
-
if (prevCompleted === completed) {
|
|
132
|
-
return;
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
prevCompleted = completed;
|
|
136
|
-
onProgress({
|
|
137
|
-
step: 'Downloading assets...',
|
|
138
|
-
current: completed,
|
|
139
|
-
total: assetHandler.queueSize,
|
|
140
|
-
update: true
|
|
141
|
-
});
|
|
142
|
-
}, 500);
|
|
143
|
-
debug('Waiting for asset handler to complete downloads');
|
|
144
|
-
|
|
145
|
-
try {
|
|
146
|
-
const assetMap = yield assetHandler.finish();
|
|
147
|
-
archive.append(JSON.stringify(assetMap), {
|
|
148
|
-
name: 'assets.json',
|
|
149
|
-
prefix
|
|
150
|
-
});
|
|
151
|
-
clearInterval(progressInterval);
|
|
152
|
-
} catch (assetErr) {
|
|
153
|
-
clearInterval(progressInterval);
|
|
154
|
-
reject(assetErr);
|
|
155
|
-
return;
|
|
156
|
-
} // Add all downloaded assets to archive
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
archive.directory(path.join(tmpDir, 'files'), `${prefix}/files`, {
|
|
160
|
-
store: true
|
|
161
|
-
});
|
|
162
|
-
archive.directory(path.join(tmpDir, 'images'), `${prefix}/images`, {
|
|
163
|
-
store: true
|
|
164
|
-
});
|
|
165
|
-
debug('Finalizing archive, flushing streams');
|
|
166
|
-
onProgress({
|
|
167
|
-
step: 'Adding assets to archive...'
|
|
168
|
-
});
|
|
169
|
-
archive.finalize();
|
|
145
|
+
prevCompleted = completed;
|
|
146
|
+
onProgress({
|
|
147
|
+
step: 'Downloading assets...',
|
|
148
|
+
current: completed,
|
|
149
|
+
total: assetHandler.queueSize,
|
|
150
|
+
update: true
|
|
151
|
+
});
|
|
152
|
+
}, 500);
|
|
153
|
+
debug('Waiting for asset handler to complete downloads');
|
|
154
|
+
|
|
155
|
+
try {
|
|
156
|
+
const assetMap = await assetHandler.finish(); // Make sure we mark the progress as done (eg 100/100 instead of 99/100)
|
|
157
|
+
|
|
158
|
+
onProgress({
|
|
159
|
+
step: 'Downloading assets...',
|
|
160
|
+
current: assetHandler.queueSize,
|
|
161
|
+
total: assetHandler.queueSize,
|
|
162
|
+
update: true
|
|
163
|
+
});
|
|
164
|
+
archive.append(JSON.stringify(assetMap), {
|
|
165
|
+
name: 'assets.json',
|
|
166
|
+
prefix
|
|
170
167
|
});
|
|
168
|
+
clearInterval(progressInterval);
|
|
169
|
+
} catch (assetErr) {
|
|
170
|
+
clearInterval(progressInterval);
|
|
171
|
+
await cleanup();
|
|
172
|
+
reject(assetErr);
|
|
173
|
+
return;
|
|
174
|
+
} // Add all downloaded assets to archive
|
|
175
|
+
|
|
171
176
|
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
177
|
+
archive.directory(path.join(tmpDir, 'files'), "".concat(prefix, "/files"), {
|
|
178
|
+
store: true
|
|
179
|
+
});
|
|
180
|
+
archive.directory(path.join(tmpDir, 'images'), "".concat(prefix, "/images"), {
|
|
181
|
+
store: true
|
|
182
|
+
});
|
|
183
|
+
debug('Finalizing archive, flushing streams');
|
|
184
|
+
onProgress({
|
|
185
|
+
step: 'Adding assets to archive...'
|
|
178
186
|
});
|
|
179
|
-
archive.
|
|
180
|
-
|
|
181
|
-
|
|
187
|
+
archive.finalize();
|
|
188
|
+
});
|
|
189
|
+
archive.on('warning', err => {
|
|
190
|
+
debug('Archive warning: %s', err.message);
|
|
191
|
+
});
|
|
192
|
+
archive.append(jsonStream, {
|
|
193
|
+
name: 'data.ndjson',
|
|
194
|
+
prefix
|
|
195
|
+
});
|
|
196
|
+
miss.pipe(archive, outputStream, onComplete);
|
|
197
|
+
|
|
198
|
+
async function onComplete(err) {
|
|
199
|
+
onProgress({
|
|
200
|
+
step: 'Clearing temporary files...'
|
|
182
201
|
});
|
|
183
|
-
|
|
202
|
+
await cleanup();
|
|
184
203
|
|
|
185
|
-
|
|
186
|
-
|
|
204
|
+
if (!err) {
|
|
205
|
+
resolve();
|
|
206
|
+
return;
|
|
187
207
|
}
|
|
188
208
|
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
if (!err) {
|
|
197
|
-
resolve();
|
|
198
|
-
return;
|
|
199
|
-
}
|
|
200
|
-
|
|
201
|
-
debug('Error during streaming: %s', err.stack);
|
|
202
|
-
assetHandler.clear();
|
|
203
|
-
reject(err);
|
|
204
|
-
});
|
|
205
|
-
return _onComplete.apply(this, arguments);
|
|
206
|
-
}
|
|
207
|
-
});
|
|
209
|
+
debug('Error during streaming: %s', err.stack);
|
|
210
|
+
assetHandler.clear();
|
|
211
|
+
reject(err);
|
|
212
|
+
}
|
|
213
|
+
});
|
|
214
|
+
}
|
|
208
215
|
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
};
|
|
212
|
-
}());
|
|
216
|
+
function isWritableStream(val) {
|
|
217
|
+
return val !== null && typeof val === 'object' && typeof val.pipe === 'function' && typeof val._write === 'function' && typeof val._writableState === 'object';
|
|
213
218
|
}
|
|
214
219
|
|
|
215
220
|
module.exports = exportDataset;
|
package/lib/filterDrafts.js
CHANGED
|
@@ -4,7 +4,7 @@ const miss = require('mississippi');
|
|
|
4
4
|
|
|
5
5
|
const isDraft = doc => doc && doc._id && doc._id.indexOf('drafts.') === 0;
|
|
6
6
|
|
|
7
|
-
module.exports = miss.through.obj((doc, enc, callback) => {
|
|
7
|
+
module.exports = () => miss.through.obj((doc, enc, callback) => {
|
|
8
8
|
if (isDraft(doc)) {
|
|
9
9
|
return callback();
|
|
10
10
|
}
|
|
@@ -6,7 +6,7 @@ const debug = require('./debug');
|
|
|
6
6
|
|
|
7
7
|
const isSystemDocument = doc => doc && doc._id && doc._id.indexOf('_.') === 0;
|
|
8
8
|
|
|
9
|
-
module.exports = miss.through.obj((doc, enc, callback) => {
|
|
9
|
+
module.exports = () => miss.through.obj((doc, enc, callback) => {
|
|
10
10
|
if (isSystemDocument(doc)) {
|
|
11
11
|
debug('%s is a system document, skipping', doc && doc._id);
|
|
12
12
|
return callback();
|
|
@@ -7,10 +7,13 @@ const requestStream = require('./requestStream');
|
|
|
7
7
|
module.exports = (client, dataset) => {
|
|
8
8
|
// Sanity client doesn't handle streams natively since we want to support node/browser
|
|
9
9
|
// with same API. We're just using it here to get hold of URLs and tokens.
|
|
10
|
-
const url = client.getUrl(
|
|
10
|
+
const url = client.getUrl("/data/export/".concat(dataset));
|
|
11
|
+
const token = client.config().token;
|
|
11
12
|
const headers = {
|
|
12
|
-
|
|
13
|
-
|
|
13
|
+
'User-Agent': "".concat(pkg.name, "@").concat(pkg.version),
|
|
14
|
+
...(token ? {
|
|
15
|
+
Authorization: "Bearer ".concat(token)
|
|
16
|
+
} : {})
|
|
14
17
|
};
|
|
15
18
|
return requestStream({
|
|
16
19
|
url,
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
const miss = require('mississippi');
|
|
4
|
+
|
|
5
|
+
const debug = require('./debug');
|
|
6
|
+
|
|
7
|
+
module.exports = () => {
|
|
8
|
+
let firstChunk = true;
|
|
9
|
+
return miss.through((chunk, enc, callback) => {
|
|
10
|
+
if (firstChunk) {
|
|
11
|
+
const string = chunk.toString('utf8').split('\n')[0];
|
|
12
|
+
debug('First chunk received: %s', string.slice(0, 300));
|
|
13
|
+
firstChunk = false;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
callback(null, chunk);
|
|
17
|
+
});
|
|
18
|
+
};
|
package/lib/rejectOnApiError.js
CHANGED
|
@@ -2,11 +2,16 @@
|
|
|
2
2
|
|
|
3
3
|
const miss = require('mississippi');
|
|
4
4
|
|
|
5
|
-
module.exports = miss.through.obj((doc, enc, callback) => {
|
|
5
|
+
module.exports = () => miss.through.obj((doc, enc, callback) => {
|
|
6
6
|
if (doc.error && doc.statusCode) {
|
|
7
7
|
callback(new Error([doc.statusCode, doc.error].join(': ')));
|
|
8
8
|
return;
|
|
9
9
|
}
|
|
10
10
|
|
|
11
|
+
if (!doc._id && doc.error) {
|
|
12
|
+
callback(new Error(doc.error.description || doc.error.message || JSON.stringify(doc)));
|
|
13
|
+
return;
|
|
14
|
+
}
|
|
15
|
+
|
|
11
16
|
callback(null, doc);
|
|
12
17
|
});
|
package/lib/requestStream.js
CHANGED
|
@@ -1,7 +1,63 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
const
|
|
3
|
+
const getIt = require('get-it');
|
|
4
4
|
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
5
|
+
const {
|
|
6
|
+
keepAlive,
|
|
7
|
+
promise
|
|
8
|
+
} = require('get-it/middleware');
|
|
9
|
+
|
|
10
|
+
const debug = require('./debug');
|
|
11
|
+
|
|
12
|
+
const request = getIt([keepAlive(), promise({
|
|
13
|
+
onlyBody: true
|
|
14
|
+
})]);
|
|
15
|
+
const socketsWithTimeout = new WeakSet();
|
|
16
|
+
const CONNECTION_TIMEOUT = 15 * 1000; // 15 seconds
|
|
17
|
+
|
|
18
|
+
const READ_TIMEOUT = 3 * 60 * 1000; // 3 minutes
|
|
19
|
+
|
|
20
|
+
const MAX_RETRIES = 5;
|
|
21
|
+
|
|
22
|
+
function delay(ms) {
|
|
23
|
+
return new Promise(resolve => setTimeout(resolve, ms));
|
|
24
|
+
}
|
|
25
|
+
/* eslint-disable no-await-in-loop, max-depth */
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
module.exports = async options => {
|
|
29
|
+
let error;
|
|
30
|
+
|
|
31
|
+
for (let i = 0; i < MAX_RETRIES; i++) {
|
|
32
|
+
try {
|
|
33
|
+
const response = await request({ ...options,
|
|
34
|
+
stream: true,
|
|
35
|
+
maxRedirects: 0,
|
|
36
|
+
timeout: {
|
|
37
|
+
connect: CONNECTION_TIMEOUT,
|
|
38
|
+
socket: READ_TIMEOUT
|
|
39
|
+
}
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
if (response.connection && typeof response.connection.setTimeout === 'function' && !socketsWithTimeout.has(response.connection)) {
|
|
43
|
+
socketsWithTimeout.add(response.connection);
|
|
44
|
+
response.connection.setTimeout(READ_TIMEOUT, () => {
|
|
45
|
+
response.destroy(new Error("Read timeout: No data received on socket for ".concat(READ_TIMEOUT, " ms")));
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
return response;
|
|
50
|
+
} catch (err) {
|
|
51
|
+
error = err;
|
|
52
|
+
|
|
53
|
+
if (err.response && err.response.statusCode && err.response.statusCode < 500) {
|
|
54
|
+
break;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
debug('Error, retrying after 1500ms: %s', err.message);
|
|
58
|
+
await delay(1500);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
throw error;
|
|
63
|
+
};
|
package/lib/stringifyStream.js
CHANGED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
module.exports = line => {
|
|
4
|
+
try {
|
|
5
|
+
return JSON.parse(line);
|
|
6
|
+
} catch (err) {
|
|
7
|
+
// Catch half-done lines with an error at the end
|
|
8
|
+
const errorPosition = line.lastIndexOf('{"error":');
|
|
9
|
+
|
|
10
|
+
if (errorPosition === -1) {
|
|
11
|
+
err.message = "".concat(err.message, " (").concat(line, ")");
|
|
12
|
+
throw err;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
const errorJson = line.slice(errorPosition);
|
|
16
|
+
const errorLine = JSON.parse(errorJson);
|
|
17
|
+
const error = errorLine && errorLine.error;
|
|
18
|
+
|
|
19
|
+
if (error && error.description) {
|
|
20
|
+
throw new Error("Error streaming dataset: ".concat(error.description, "\n\n").concat(errorJson, "\n"));
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
throw err;
|
|
24
|
+
}
|
|
25
|
+
};
|
package/lib/validateOptions.js
CHANGED
|
@@ -15,11 +15,11 @@ function validateOptions(opts) {
|
|
|
15
15
|
const options = defaults({}, opts, exportDefaults);
|
|
16
16
|
|
|
17
17
|
if (typeof options.dataset !== 'string' || options.dataset.length < 1) {
|
|
18
|
-
throw new Error(
|
|
18
|
+
throw new Error("options.dataset must be a valid dataset name");
|
|
19
19
|
}
|
|
20
20
|
|
|
21
21
|
if (options.onProgress && typeof options.onProgress !== 'function') {
|
|
22
|
-
throw new Error(
|
|
22
|
+
throw new Error("options.onProgress must be a function");
|
|
23
23
|
}
|
|
24
24
|
|
|
25
25
|
if (!options.client) {
|
|
@@ -29,7 +29,7 @@ function validateOptions(opts) {
|
|
|
29
29
|
const missing = clientMethods.find(key => typeof options.client[key] !== 'function');
|
|
30
30
|
|
|
31
31
|
if (missing) {
|
|
32
|
-
throw new Error(
|
|
32
|
+
throw new Error("`options.client` is not a valid @sanity/client instance - no \"".concat(missing, "\" method found"));
|
|
33
33
|
}
|
|
34
34
|
|
|
35
35
|
const clientConfig = options.client.config();
|
|
@@ -40,7 +40,7 @@ function validateOptions(opts) {
|
|
|
40
40
|
|
|
41
41
|
booleanFlags.forEach(flag => {
|
|
42
42
|
if (typeof options[flag] !== 'boolean') {
|
|
43
|
-
throw new Error(
|
|
43
|
+
throw new Error("Flag ".concat(flag, " must be a boolean (true/false)"));
|
|
44
44
|
}
|
|
45
45
|
});
|
|
46
46
|
|
|
@@ -48,6 +48,10 @@ function validateOptions(opts) {
|
|
|
48
48
|
throw new Error('outputPath must be specified (- for stdout)');
|
|
49
49
|
}
|
|
50
50
|
|
|
51
|
+
if (options.assetConcurrency && (options.assetConcurrency < 1 || options.assetConcurrency > 24)) {
|
|
52
|
+
throw new Error('`assetConcurrency` must be between 1 and 24');
|
|
53
|
+
}
|
|
54
|
+
|
|
51
55
|
return options;
|
|
52
56
|
}
|
|
53
57
|
|
package/package.json
CHANGED
|
@@ -1,16 +1,19 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@sanity/export",
|
|
3
|
-
"version": "0.136.3-
|
|
3
|
+
"version": "0.136.3-purple-unicorn-patch.5627+c8b4b71ac",
|
|
4
4
|
"description": "Export Sanity documents and assets",
|
|
5
|
-
"main": "lib/export.js",
|
|
5
|
+
"main": "./lib/export.js",
|
|
6
|
+
"types": "./lib/dts/src/export.d.ts",
|
|
6
7
|
"engines": {
|
|
7
|
-
"node": ">=
|
|
8
|
+
"node": ">=12.0.0"
|
|
8
9
|
},
|
|
9
10
|
"author": "Sanity.io <hello@sanity.io>",
|
|
10
11
|
"license": "MIT",
|
|
11
12
|
"scripts": {
|
|
13
|
+
"build": "../../../bin/pkg-utils transpile --target node",
|
|
12
14
|
"clean": "rimraf lib",
|
|
13
|
-
"test": "jest"
|
|
15
|
+
"test": "jest",
|
|
16
|
+
"watch": "../../../bin/pkg-utils transpile --target node --watch"
|
|
14
17
|
},
|
|
15
18
|
"keywords": [
|
|
16
19
|
"sanity",
|
|
@@ -22,20 +25,17 @@
|
|
|
22
25
|
"ndjson"
|
|
23
26
|
],
|
|
24
27
|
"dependencies": {
|
|
25
|
-
"archiver": "^
|
|
26
|
-
"debug": "^3.
|
|
27
|
-
"fs-extra": "^
|
|
28
|
-
"
|
|
29
|
-
"
|
|
28
|
+
"archiver": "^5.0.0",
|
|
29
|
+
"debug": "^3.2.7",
|
|
30
|
+
"fs-extra": "^7.0.0",
|
|
31
|
+
"get-it": "^5.2.1",
|
|
32
|
+
"lodash": "^4.17.21",
|
|
33
|
+
"mississippi": "^4.0.0",
|
|
30
34
|
"p-queue": "^2.3.0",
|
|
31
|
-
"
|
|
32
|
-
"split2": "^2.1.1"
|
|
35
|
+
"split2": "^3.2.2"
|
|
33
36
|
},
|
|
34
37
|
"devDependencies": {
|
|
35
|
-
"
|
|
36
|
-
"decompress": "^4.2.0",
|
|
37
|
-
"jest": "^23.6.0",
|
|
38
|
-
"rimraf": "^2.6.2",
|
|
38
|
+
"rimraf": "^3.0.2",
|
|
39
39
|
"string-to-stream": "^1.1.0"
|
|
40
40
|
},
|
|
41
41
|
"publishConfig": {
|
|
@@ -43,11 +43,12 @@
|
|
|
43
43
|
},
|
|
44
44
|
"repository": {
|
|
45
45
|
"type": "git",
|
|
46
|
-
"url": "git+https://github.com/sanity-io/sanity.git"
|
|
46
|
+
"url": "git+https://github.com/sanity-io/sanity.git",
|
|
47
|
+
"directory": "packages/@sanity/export"
|
|
47
48
|
},
|
|
48
49
|
"bugs": {
|
|
49
50
|
"url": "https://github.com/sanity-io/sanity/issues"
|
|
50
51
|
},
|
|
51
52
|
"homepage": "https://www.sanity.io/",
|
|
52
|
-
"gitHead": "
|
|
53
|
+
"gitHead": "c8b4b71acd7cf035f6f0a2efd19c39ced849811f"
|
|
53
54
|
}
|