@ardrive/turbo-sdk 1.30.0-alpha.1 → 1.31.0-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bundles/web.bundle.min.js +404 -16
- package/lib/cjs/cli/commands/uploadFile.js +1 -0
- package/lib/cjs/cli/commands/uploadFolder.js +4 -1
- package/lib/cjs/cli/options.js +17 -1
- package/lib/cjs/cli/types.js +0 -15
- package/lib/cjs/cli/utils.js +13 -0
- package/lib/cjs/common/chunked.js +359 -0
- package/lib/cjs/common/http.js +1 -0
- package/lib/cjs/common/turbo.js +10 -2
- package/lib/cjs/common/upload.js +39 -9
- package/lib/cjs/types.js +2 -1
- package/lib/cjs/version.js +1 -1
- package/lib/esm/cli/commands/uploadFile.js +2 -1
- package/lib/esm/cli/commands/uploadFolder.js +4 -1
- package/lib/esm/cli/options.js +17 -1
- package/lib/esm/cli/types.js +0 -15
- package/lib/esm/cli/utils.js +12 -0
- package/lib/esm/common/chunked.js +352 -0
- package/lib/esm/common/http.js +1 -0
- package/lib/esm/common/turbo.js +10 -2
- package/lib/esm/common/upload.js +39 -9
- package/lib/esm/types.js +1 -0
- package/lib/esm/version.js +1 -1
- package/lib/types/cli/commands/uploadFile.d.ts.map +1 -1
- package/lib/types/cli/commands/uploadFolder.d.ts.map +1 -1
- package/lib/types/cli/options.d.ts +45 -2
- package/lib/types/cli/options.d.ts.map +1 -1
- package/lib/types/cli/types.d.ts +4 -0
- package/lib/types/cli/types.d.ts.map +1 -1
- package/lib/types/cli/utils.d.ts +9 -1
- package/lib/types/cli/utils.d.ts.map +1 -1
- package/lib/types/common/chunked.d.ts +44 -0
- package/lib/types/common/chunked.d.ts.map +1 -0
- package/lib/types/common/http.d.ts +1 -1
- package/lib/types/common/http.d.ts.map +1 -1
- package/lib/types/common/turbo.d.ts +2 -2
- package/lib/types/common/turbo.d.ts.map +1 -1
- package/lib/types/common/upload.d.ts +3 -3
- package/lib/types/common/upload.d.ts.map +1 -1
- package/lib/types/types.d.ts +16 -4
- package/lib/types/types.d.ts.map +1 -1
- package/lib/types/version.d.ts +1 -1
- package/package.json +9 -5
|
@@ -0,0 +1,352 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Copyright (C) 2022-2024 Permanent Data Solutions, Inc.
|
|
3
|
+
*
|
|
4
|
+
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
* you may not use this file except in compliance with the License.
|
|
6
|
+
* You may obtain a copy of the License at
|
|
7
|
+
*
|
|
8
|
+
* http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
*
|
|
10
|
+
* Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
* See the License for the specific language governing permissions and
|
|
14
|
+
* limitations under the License.
|
|
15
|
+
*/
|
|
16
|
+
import { CanceledError } from 'axios';
|
|
17
|
+
import { pLimit } from 'plimit-lit';
|
|
18
|
+
import { validChunkingModes, } from '../types.js';
|
|
19
|
+
import { TurboEventEmitter, createStreamWithUploadEvents } from './events.js';
|
|
20
|
+
import { TurboWinstonLogger } from './logger.js';
|
|
21
|
+
const fiveMiB = 5 * 1024 * 1024; // 5 MiB
|
|
22
|
+
const fiveHundredMiB = fiveMiB * 100; // 500 MiB
|
|
23
|
+
export const defaultMaxChunkConcurrency = 5;
|
|
24
|
+
export const maxChunkByteCount = fiveHundredMiB;
|
|
25
|
+
export const minChunkByteCount = fiveMiB;
|
|
26
|
+
export const defaultChunkByteCount = minChunkByteCount;
|
|
27
|
+
const backlogQueueFactor = 2;
|
|
28
|
+
const chunkingHeader = { 'x-chunking-version': '2' };
|
|
29
|
+
/**
|
|
30
|
+
* Performs a chunked upload by splitting the stream into fixed-size buffers,
|
|
31
|
+
* uploading them in parallel, and emitting progress/error events.
|
|
32
|
+
*/
|
|
33
|
+
export class ChunkedUploader {
|
|
34
|
+
constructor({ http, token, maxChunkConcurrency = defaultMaxChunkConcurrency, chunkByteCount = defaultChunkByteCount, logger = TurboWinstonLogger.default, chunkingMode = 'auto', dataItemByteCount, }) {
|
|
35
|
+
this.chunkByteCount = chunkByteCount;
|
|
36
|
+
this.maxChunkConcurrency = maxChunkConcurrency;
|
|
37
|
+
this.http = http;
|
|
38
|
+
this.token = token;
|
|
39
|
+
this.logger = logger;
|
|
40
|
+
this.assertChunkParams({
|
|
41
|
+
chunkByteCount,
|
|
42
|
+
chunkingMode,
|
|
43
|
+
maxChunkConcurrency,
|
|
44
|
+
});
|
|
45
|
+
this.shouldUseChunkUploader = this.shouldChunkUpload({
|
|
46
|
+
chunkByteCount,
|
|
47
|
+
chunkingMode,
|
|
48
|
+
dataItemByteCount,
|
|
49
|
+
});
|
|
50
|
+
this.maxBacklogQueue = this.maxChunkConcurrency * backlogQueueFactor;
|
|
51
|
+
}
|
|
52
|
+
shouldChunkUpload({ chunkByteCount, chunkingMode, dataItemByteCount, }) {
|
|
53
|
+
if (chunkingMode === 'disabled') {
|
|
54
|
+
return false;
|
|
55
|
+
}
|
|
56
|
+
if (chunkingMode === 'force') {
|
|
57
|
+
return true;
|
|
58
|
+
}
|
|
59
|
+
const isMoreThanTwoChunksOfData = dataItemByteCount > chunkByteCount * 2;
|
|
60
|
+
return isMoreThanTwoChunksOfData;
|
|
61
|
+
}
|
|
62
|
+
assertChunkParams({ chunkByteCount, chunkingMode, maxChunkConcurrency, }) {
|
|
63
|
+
if (Number.isNaN(maxChunkConcurrency) ||
|
|
64
|
+
!Number.isInteger(maxChunkConcurrency) ||
|
|
65
|
+
maxChunkConcurrency < 1) {
|
|
66
|
+
throw new Error('Invalid max chunk concurrency. Must be an integer of at least 1.');
|
|
67
|
+
}
|
|
68
|
+
if (Number.isNaN(chunkByteCount) ||
|
|
69
|
+
!Number.isInteger(chunkByteCount) ||
|
|
70
|
+
chunkByteCount < fiveMiB ||
|
|
71
|
+
chunkByteCount > fiveHundredMiB) {
|
|
72
|
+
throw new Error('Invalid chunk size. Must be an integer between 5 MiB and 500 MiB.');
|
|
73
|
+
}
|
|
74
|
+
if (typeof chunkingMode !== 'string' ||
|
|
75
|
+
!validChunkingModes.includes(chunkingMode)) {
|
|
76
|
+
throw new Error(`Invalid chunking mode. Must be one of: ${validChunkingModes.join(', ')}`);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Initialize or resume an upload session, returning the upload ID.
|
|
81
|
+
*/
|
|
82
|
+
async initUpload() {
|
|
83
|
+
const res = await this.http.get({
|
|
84
|
+
endpoint: `/chunks/${this.token}/-1/-1?chunkSize=${this.chunkByteCount}`,
|
|
85
|
+
headers: chunkingHeader,
|
|
86
|
+
});
|
|
87
|
+
if (res.chunkSize !== this.chunkByteCount) {
|
|
88
|
+
this.logger.warn('Chunk size mismatch! Overriding with server value.', {
|
|
89
|
+
expected: this.chunkByteCount,
|
|
90
|
+
actual: res.chunkSize,
|
|
91
|
+
});
|
|
92
|
+
this.chunkByteCount = res.chunkSize;
|
|
93
|
+
}
|
|
94
|
+
return res.id;
|
|
95
|
+
}
|
|
96
|
+
async upload({ dataItemSizeFactory, dataItemStreamFactory, dataItemOpts, signal, events, }) {
|
|
97
|
+
const uploadId = await this.initUpload();
|
|
98
|
+
const dataItemByteCount = dataItemSizeFactory();
|
|
99
|
+
const emitter = new TurboEventEmitter(events);
|
|
100
|
+
const { stream, resume } = createStreamWithUploadEvents({
|
|
101
|
+
data: dataItemStreamFactory(),
|
|
102
|
+
dataSize: dataItemByteCount,
|
|
103
|
+
emitter,
|
|
104
|
+
});
|
|
105
|
+
this.logger.debug(`Starting chunked upload`, {
|
|
106
|
+
token: this.token,
|
|
107
|
+
uploadId,
|
|
108
|
+
totalSize: dataItemByteCount,
|
|
109
|
+
chunkByteCount: this.chunkByteCount,
|
|
110
|
+
maxChunkConcurrency: this.maxChunkConcurrency,
|
|
111
|
+
inputStreamType: isReadableStream(stream) ? 'ReadableStream' : 'Readable',
|
|
112
|
+
});
|
|
113
|
+
const inFlight = new Set();
|
|
114
|
+
const internalAbort = new AbortController();
|
|
115
|
+
const combinedSignal = combineAbortSignals([internalAbort.signal, signal]);
|
|
116
|
+
const limit = pLimit(this.maxChunkConcurrency);
|
|
117
|
+
let currentOffset = 0;
|
|
118
|
+
let currentChunkPartNumber = 0;
|
|
119
|
+
let firstError;
|
|
120
|
+
let uploadedBytes = 0;
|
|
121
|
+
const chunks = splitIntoChunks(stream, this.chunkByteCount);
|
|
122
|
+
resume();
|
|
123
|
+
for await (const chunk of chunks) {
|
|
124
|
+
if (combinedSignal?.aborted) {
|
|
125
|
+
internalAbort.abort();
|
|
126
|
+
await Promise.allSettled(inFlight);
|
|
127
|
+
firstError ??= new CanceledError();
|
|
128
|
+
break;
|
|
129
|
+
}
|
|
130
|
+
const chunkPartNumber = ++currentChunkPartNumber;
|
|
131
|
+
const chunkByteCount = chunk.length;
|
|
132
|
+
const chunkOffset = currentOffset;
|
|
133
|
+
currentOffset += chunkByteCount;
|
|
134
|
+
this.logger.debug('Queueing chunk', {
|
|
135
|
+
chunkPartNumber,
|
|
136
|
+
chunkOffset,
|
|
137
|
+
chunkByteCount,
|
|
138
|
+
});
|
|
139
|
+
const promise = limit(async () => {
|
|
140
|
+
if (firstError !== undefined) {
|
|
141
|
+
return;
|
|
142
|
+
}
|
|
143
|
+
this.logger.debug('Uploading chunk', {
|
|
144
|
+
chunkPartNumber,
|
|
145
|
+
chunkOffset,
|
|
146
|
+
chunkByteCount,
|
|
147
|
+
});
|
|
148
|
+
await this.http.post({
|
|
149
|
+
endpoint: `/chunks/${this.token}/${uploadId}/${chunkOffset}`,
|
|
150
|
+
data: chunk,
|
|
151
|
+
headers: {
|
|
152
|
+
'Content-Type': 'application/octet-stream',
|
|
153
|
+
...chunkingHeader,
|
|
154
|
+
},
|
|
155
|
+
signal: combinedSignal,
|
|
156
|
+
});
|
|
157
|
+
uploadedBytes += chunkByteCount;
|
|
158
|
+
this.logger.debug('Chunk uploaded', {
|
|
159
|
+
chunkPartNumber,
|
|
160
|
+
chunkOffset,
|
|
161
|
+
chunkByteCount,
|
|
162
|
+
});
|
|
163
|
+
emitter.emit('upload-progress', {
|
|
164
|
+
processedBytes: uploadedBytes,
|
|
165
|
+
totalBytes: dataItemByteCount,
|
|
166
|
+
});
|
|
167
|
+
}).catch((err) => {
|
|
168
|
+
this.logger.error('Chunk upload failed', {
|
|
169
|
+
id: chunkPartNumber,
|
|
170
|
+
offset: chunkOffset,
|
|
171
|
+
size: chunkByteCount,
|
|
172
|
+
err,
|
|
173
|
+
});
|
|
174
|
+
emitter.emit('upload-error', err);
|
|
175
|
+
internalAbort.abort(err);
|
|
176
|
+
firstError = firstError ?? err;
|
|
177
|
+
});
|
|
178
|
+
inFlight.add(promise);
|
|
179
|
+
promise.finally(() => inFlight.delete(promise));
|
|
180
|
+
if (inFlight.size >= this.maxBacklogQueue) {
|
|
181
|
+
await Promise.race(inFlight);
|
|
182
|
+
if (combinedSignal?.aborted) {
|
|
183
|
+
internalAbort.abort();
|
|
184
|
+
await Promise.allSettled(inFlight);
|
|
185
|
+
firstError ??= new CanceledError();
|
|
186
|
+
break;
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
await Promise.all(inFlight);
|
|
191
|
+
if (firstError !== undefined) {
|
|
192
|
+
throw firstError;
|
|
193
|
+
}
|
|
194
|
+
const paidByHeader = {};
|
|
195
|
+
if (dataItemOpts?.paidBy !== undefined) {
|
|
196
|
+
paidByHeader['x-paid-by'] = Array.isArray(dataItemOpts.paidBy)
|
|
197
|
+
? dataItemOpts.paidBy.join(',')
|
|
198
|
+
: dataItemOpts.paidBy;
|
|
199
|
+
}
|
|
200
|
+
// TODO: Async Finalize
|
|
201
|
+
// Finalize and reconstruct server-side
|
|
202
|
+
const finalizeResponse = await this.http.post({
|
|
203
|
+
endpoint: `/chunks/${this.token}/${uploadId}/-1`,
|
|
204
|
+
data: Buffer.alloc(0),
|
|
205
|
+
headers: {
|
|
206
|
+
'Content-Type': 'application/octet-stream',
|
|
207
|
+
...paidByHeader,
|
|
208
|
+
...chunkingHeader,
|
|
209
|
+
},
|
|
210
|
+
signal: combinedSignal,
|
|
211
|
+
});
|
|
212
|
+
emitter.emit('upload-success');
|
|
213
|
+
return finalizeResponse;
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
/**
|
|
217
|
+
* Yield Buffers of up to `chunkByteCount`, coalescing whatever small pieces
|
|
218
|
+
* the source produces into proper slices.
|
|
219
|
+
*/
|
|
220
|
+
export async function* splitIntoChunks(source, chunkByteCount) {
|
|
221
|
+
if (isReadableStream(source)) {
|
|
222
|
+
yield* splitReadableStreamIntoChunks(source, chunkByteCount);
|
|
223
|
+
}
|
|
224
|
+
else {
|
|
225
|
+
yield* splitReadableIntoChunks(source, chunkByteCount);
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
export async function* splitReadableIntoChunks(source, chunkByteCount) {
|
|
229
|
+
const queue = [];
|
|
230
|
+
let total = 0;
|
|
231
|
+
let encoder;
|
|
232
|
+
for await (const piece of source) {
|
|
233
|
+
const u8 = piece instanceof Uint8Array
|
|
234
|
+
? new Uint8Array(piece.buffer, piece.byteOffset, piece.byteLength)
|
|
235
|
+
: (encoder ??= new TextEncoder()).encode(String(piece));
|
|
236
|
+
queue.push(u8);
|
|
237
|
+
total += u8.length;
|
|
238
|
+
// Emit full chunks
|
|
239
|
+
while (total >= chunkByteCount) {
|
|
240
|
+
const out = new Uint8Array(chunkByteCount);
|
|
241
|
+
let remaining = out.length;
|
|
242
|
+
let off = 0;
|
|
243
|
+
while (remaining > 0) {
|
|
244
|
+
const head = queue[0];
|
|
245
|
+
const take = Math.min(remaining, head.length);
|
|
246
|
+
out.set(head.subarray(0, take), off);
|
|
247
|
+
off += take;
|
|
248
|
+
remaining -= take;
|
|
249
|
+
if (take === head.length) {
|
|
250
|
+
queue.shift();
|
|
251
|
+
}
|
|
252
|
+
else {
|
|
253
|
+
queue[0] = head.subarray(take);
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
total -= chunkByteCount;
|
|
257
|
+
// Yield a Buffer view (no copy)
|
|
258
|
+
yield Buffer.from(out.buffer, out.byteOffset, out.byteLength);
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
// Remainder
|
|
262
|
+
if (total > 0) {
|
|
263
|
+
const out = new Uint8Array(total);
|
|
264
|
+
let off = 0;
|
|
265
|
+
while (queue.length > 0) {
|
|
266
|
+
const head = queue.shift(); // safe due to loop condition
|
|
267
|
+
out.set(head, off);
|
|
268
|
+
off += head.length;
|
|
269
|
+
}
|
|
270
|
+
yield Buffer.from(out.buffer, out.byteOffset, out.byteLength);
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
export async function* splitReadableStreamIntoChunks(source, chunkByteCount) {
|
|
274
|
+
const reader = source.getReader();
|
|
275
|
+
const queue = [];
|
|
276
|
+
let total = 0;
|
|
277
|
+
try {
|
|
278
|
+
while (true) {
|
|
279
|
+
const { value, done } = await reader.read();
|
|
280
|
+
if (done)
|
|
281
|
+
break;
|
|
282
|
+
// Ensure we keep a plain view (avoids surprises if the producer reuses buffers)
|
|
283
|
+
const u8 = new Uint8Array(value.buffer, value.byteOffset, value.byteLength);
|
|
284
|
+
queue.push(u8);
|
|
285
|
+
total += u8.length;
|
|
286
|
+
while (total >= chunkByteCount) {
|
|
287
|
+
const out = new Uint8Array(chunkByteCount);
|
|
288
|
+
let remaining = out.length;
|
|
289
|
+
let off = 0;
|
|
290
|
+
while (remaining > 0) {
|
|
291
|
+
const head = queue[0];
|
|
292
|
+
const take = Math.min(remaining, head.length);
|
|
293
|
+
out.set(head.subarray(0, take), off);
|
|
294
|
+
off += take;
|
|
295
|
+
remaining -= take;
|
|
296
|
+
if (take === head.length) {
|
|
297
|
+
queue.shift();
|
|
298
|
+
}
|
|
299
|
+
else {
|
|
300
|
+
queue[0] = head.subarray(take);
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
total -= chunkByteCount;
|
|
304
|
+
yield Buffer.from(out.buffer, out.byteOffset, out.byteLength);
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
if (total > 0) {
|
|
308
|
+
const out = new Uint8Array(total);
|
|
309
|
+
let off = 0;
|
|
310
|
+
while (queue.length > 0) {
|
|
311
|
+
const head = queue.shift(); // safe due to loop condition
|
|
312
|
+
out.set(head, off);
|
|
313
|
+
off += head.length;
|
|
314
|
+
}
|
|
315
|
+
yield Buffer.from(out.buffer, out.byteOffset, out.byteLength);
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
finally {
|
|
319
|
+
reader.releaseLock();
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
function isReadableStream(source) {
|
|
323
|
+
// Prefer instanceof if available, otherwise use a safe duck-typing check
|
|
324
|
+
if (typeof ReadableStream !== 'undefined' &&
|
|
325
|
+
source instanceof ReadableStream) {
|
|
326
|
+
return true;
|
|
327
|
+
}
|
|
328
|
+
return (source !== null &&
|
|
329
|
+
typeof source === 'object' &&
|
|
330
|
+
'getReader' in source &&
|
|
331
|
+
typeof source.getReader === 'function');
|
|
332
|
+
}
|
|
333
|
+
function combineAbortSignals(signals) {
|
|
334
|
+
const real = signals.filter(Boolean);
|
|
335
|
+
if (real.length === 0)
|
|
336
|
+
return undefined;
|
|
337
|
+
const anyFn = AbortSignal.any;
|
|
338
|
+
if (typeof anyFn === 'function') {
|
|
339
|
+
return anyFn(real);
|
|
340
|
+
}
|
|
341
|
+
const controller = new AbortController();
|
|
342
|
+
for (const s of real) {
|
|
343
|
+
const sig = s;
|
|
344
|
+
if (sig.aborted) {
|
|
345
|
+
controller.abort(sig.reason);
|
|
346
|
+
break;
|
|
347
|
+
}
|
|
348
|
+
const onAbort = () => controller.abort(sig.reason);
|
|
349
|
+
s.addEventListener('abort', onAbort, { once: true });
|
|
350
|
+
}
|
|
351
|
+
return controller.signal;
|
|
352
|
+
}
|
package/lib/esm/common/http.js
CHANGED
|
@@ -42,6 +42,7 @@ export class TurboHTTPService {
|
|
|
42
42
|
// See: https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API#body
|
|
43
43
|
const { body, duplex } = await toFetchBody(data);
|
|
44
44
|
try {
|
|
45
|
+
this.logger.debug('Posting data via fetch', { endpoint, headers });
|
|
45
46
|
const res = await fetch(this.axios.defaults.baseURL + endpoint, {
|
|
46
47
|
method: 'POST',
|
|
47
48
|
headers,
|
package/lib/esm/common/turbo.js
CHANGED
|
@@ -156,8 +156,16 @@ export class TurboAuthenticatedClient extends TurboUnauthenticatedClient {
|
|
|
156
156
|
/**
|
|
157
157
|
* Signs and uploads raw data to the Turbo Upload Service.
|
|
158
158
|
*/
|
|
159
|
-
upload({ data, dataItemOpts, signal, events, }) {
|
|
160
|
-
return this.uploadService.upload({
|
|
159
|
+
upload({ data, dataItemOpts, signal, events, chunkByteCount, chunkingMode, maxChunkConcurrency, }) {
|
|
160
|
+
return this.uploadService.upload({
|
|
161
|
+
data,
|
|
162
|
+
dataItemOpts,
|
|
163
|
+
signal,
|
|
164
|
+
events,
|
|
165
|
+
chunkByteCount,
|
|
166
|
+
chunkingMode,
|
|
167
|
+
maxChunkConcurrency,
|
|
168
|
+
});
|
|
161
169
|
}
|
|
162
170
|
uploadFile(params) {
|
|
163
171
|
return this.uploadService.uploadFile(params);
|
package/lib/esm/common/upload.js
CHANGED
|
@@ -18,6 +18,7 @@ import { pLimit } from 'plimit-lit';
|
|
|
18
18
|
import { defaultRetryConfig } from '../utils/axiosClient.js';
|
|
19
19
|
import { isBlob, sleep } from '../utils/common.js';
|
|
20
20
|
import { FailedRequestError } from '../utils/errors.js';
|
|
21
|
+
import { ChunkedUploader } from './chunked.js';
|
|
21
22
|
import { TurboEventEmitter, createStreamWithUploadEvents } from './events.js';
|
|
22
23
|
import { TurboHTTPService } from './http.js';
|
|
23
24
|
import { TurboWinstonLogger } from './logger.js';
|
|
@@ -91,7 +92,7 @@ export class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUpl
|
|
|
91
92
|
/**
|
|
92
93
|
* Signs and uploads raw data to the Turbo Upload Service.
|
|
93
94
|
*/
|
|
94
|
-
upload({ data, dataItemOpts, signal, events, }) {
|
|
95
|
+
upload({ data, dataItemOpts, signal, events, chunkByteCount, chunkingMode, maxChunkConcurrency, }) {
|
|
95
96
|
// This function is intended to be usable in both Node and browser environments.
|
|
96
97
|
if (isBlob(data)) {
|
|
97
98
|
const streamFactory = () => data.stream();
|
|
@@ -119,6 +120,9 @@ export class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUpl
|
|
|
119
120
|
signal,
|
|
120
121
|
dataItemOpts,
|
|
121
122
|
events,
|
|
123
|
+
chunkByteCount,
|
|
124
|
+
chunkingMode,
|
|
125
|
+
maxChunkConcurrency,
|
|
122
126
|
});
|
|
123
127
|
}
|
|
124
128
|
resolveUploadFileConfig(params) {
|
|
@@ -159,22 +163,42 @@ export class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUpl
|
|
|
159
163
|
let lastStatusCode = undefined; // Store the last status code for throwing
|
|
160
164
|
const emitter = new TurboEventEmitter(events);
|
|
161
165
|
// avoid duplicating signing on failures here - these errors will immediately be thrown
|
|
162
|
-
// TODO: create a SigningError class and throw that instead of the generic Error
|
|
163
|
-
const { dataItemStreamFactory, dataItemSizeFactory } = await this.signer.signDataItem({
|
|
164
|
-
fileStreamFactory,
|
|
165
|
-
fileSizeFactory,
|
|
166
|
-
dataItemOpts,
|
|
167
|
-
emitter,
|
|
168
|
-
});
|
|
169
166
|
// TODO: move the retry implementation to the http class, and avoid awaiting here. This will standardize the retry logic across all upload methods.
|
|
170
167
|
while (retries < maxRetries) {
|
|
171
168
|
if (signal?.aborted) {
|
|
172
169
|
throw new CanceledError();
|
|
173
170
|
}
|
|
171
|
+
// TODO: create a SigningError class and throw that instead of the generic Error
|
|
172
|
+
const { dataItemStreamFactory, dataItemSizeFactory } = await this.signer.signDataItem({
|
|
173
|
+
fileStreamFactory,
|
|
174
|
+
fileSizeFactory,
|
|
175
|
+
dataItemOpts,
|
|
176
|
+
emitter,
|
|
177
|
+
});
|
|
174
178
|
// Now that we have the signed data item, we can upload it using the uploadSignedDataItem method
|
|
175
179
|
// which will create a new emitter with upload events. We await
|
|
176
180
|
// this result due to the wrapped retry logic of this method.
|
|
177
181
|
try {
|
|
182
|
+
const { chunkByteCount, maxChunkConcurrency } = params;
|
|
183
|
+
const chunkedUploader = new ChunkedUploader({
|
|
184
|
+
http: this.httpService,
|
|
185
|
+
token: this.token,
|
|
186
|
+
maxChunkConcurrency,
|
|
187
|
+
chunkByteCount,
|
|
188
|
+
logger: this.logger,
|
|
189
|
+
dataItemByteCount: dataItemSizeFactory(),
|
|
190
|
+
chunkingMode: params.chunkingMode,
|
|
191
|
+
});
|
|
192
|
+
if (chunkedUploader.shouldUseChunkUploader) {
|
|
193
|
+
const response = await chunkedUploader.upload({
|
|
194
|
+
dataItemStreamFactory,
|
|
195
|
+
dataItemSizeFactory,
|
|
196
|
+
dataItemOpts,
|
|
197
|
+
signal,
|
|
198
|
+
events,
|
|
199
|
+
});
|
|
200
|
+
return response;
|
|
201
|
+
}
|
|
178
202
|
const response = await this.uploadSignedDataItem({
|
|
179
203
|
dataItemStreamFactory,
|
|
180
204
|
dataItemSizeFactory,
|
|
@@ -263,7 +287,7 @@ export class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUpl
|
|
|
263
287
|
*/
|
|
264
288
|
async uploadFolder(params) {
|
|
265
289
|
this.logger.debug('Uploading folder...', { params });
|
|
266
|
-
const { dataItemOpts, signal, manifestOptions = {}, maxConcurrentUploads = 1, throwOnFailure = true, } = params;
|
|
290
|
+
const { dataItemOpts, signal, manifestOptions = {}, maxConcurrentUploads = 1, throwOnFailure = true, maxChunkConcurrency, chunkByteCount, chunkingMode, } = params;
|
|
267
291
|
const { disableManifest, indexFile, fallbackFile } = manifestOptions;
|
|
268
292
|
const paths = {};
|
|
269
293
|
const response = {
|
|
@@ -287,6 +311,9 @@ export class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUpl
|
|
|
287
311
|
fileSizeFactory: () => this.getFileSize(file),
|
|
288
312
|
signal,
|
|
289
313
|
dataItemOpts: dataItemOptsWithContentType,
|
|
314
|
+
chunkByteCount,
|
|
315
|
+
maxChunkConcurrency,
|
|
316
|
+
chunkingMode,
|
|
290
317
|
});
|
|
291
318
|
const relativePath = this.getRelativePath(file, params);
|
|
292
319
|
paths[relativePath] = { id: result.id };
|
|
@@ -332,6 +359,9 @@ export class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUpl
|
|
|
332
359
|
fileSizeFactory: () => manifestBuffer.byteLength,
|
|
333
360
|
signal,
|
|
334
361
|
dataItemOpts: { ...dataItemOpts, tags: tagsWithManifestContentType },
|
|
362
|
+
chunkByteCount,
|
|
363
|
+
maxChunkConcurrency,
|
|
364
|
+
chunkingMode,
|
|
335
365
|
});
|
|
336
366
|
return {
|
|
337
367
|
...response,
|
package/lib/esm/types.js
CHANGED
package/lib/esm/version.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"uploadFile.d.ts","sourceRoot":"","sources":["../../../../src/cli/commands/uploadFile.ts"],"names":[],"mappings":"AAkBA,OAAO,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;
|
|
1
|
+
{"version":3,"file":"uploadFile.d.ts","sourceRoot":"","sources":["../../../../src/cli/commands/uploadFile.ts"],"names":[],"mappings":"AAkBA,OAAO,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AAQhD,wBAAsB,UAAU,CAAC,OAAO,EAAE,iBAAiB,GAAG,OAAO,CAAC,IAAI,CAAC,CAmB1E"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"uploadFolder.d.ts","sourceRoot":"","sources":["../../../../src/cli/commands/uploadFolder.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,mBAAmB,EAAE,MAAM,aAAa,CAAC;AAQlD,wBAAsB,YAAY,CAChC,OAAO,EAAE,mBAAmB,GAC3B,OAAO,CAAC,IAAI,CAAC,
|
|
1
|
+
{"version":3,"file":"uploadFolder.d.ts","sourceRoot":"","sources":["../../../../src/cli/commands/uploadFolder.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,mBAAmB,EAAE,MAAM,aAAa,CAAC;AAQlD,wBAAsB,YAAY,CAChC,OAAO,EAAE,mBAAmB,GAC3B,OAAO,CAAC,IAAI,CAAC,CAgCf"}
|
|
@@ -131,7 +131,7 @@ export declare const optionMap: {
|
|
|
131
131
|
};
|
|
132
132
|
readonly maxConcurrency: {
|
|
133
133
|
readonly alias: "--max-concurrency <maxConcurrency>";
|
|
134
|
-
readonly description: "Maximum number of concurrent uploads";
|
|
134
|
+
readonly description: "Maximum number of concurrent file uploads";
|
|
135
135
|
};
|
|
136
136
|
readonly paidBy: {
|
|
137
137
|
readonly alias: "--paid-by <paidBy...>";
|
|
@@ -156,6 +156,19 @@ export declare const optionMap: {
|
|
|
156
156
|
readonly alias: "--byte-count <byteCount>";
|
|
157
157
|
readonly description: "Number of bytes to use for the action";
|
|
158
158
|
};
|
|
159
|
+
readonly maxChunkConcurrency: {
|
|
160
|
+
readonly alias: "--max-chunk-concurrency <maxChunkConcurrency>";
|
|
161
|
+
readonly description: "Maximum number of concurrent chunks to upload per file";
|
|
162
|
+
};
|
|
163
|
+
readonly chunkByteCount: {
|
|
164
|
+
readonly alias: "--chunk-byte-count <chunkByteCount>";
|
|
165
|
+
readonly description: "Size of each chunk in bytes";
|
|
166
|
+
};
|
|
167
|
+
readonly chunkingMode: {
|
|
168
|
+
readonly alias: "--chunking-mode <chunkingMode>";
|
|
169
|
+
readonly description: "Chunking mode to use for the upload. Can be \"auto\", \"force\" or \"disabled\". Defaults to \"auto\".";
|
|
170
|
+
readonly default: "auto";
|
|
171
|
+
};
|
|
159
172
|
};
|
|
160
173
|
export declare const walletOptions: ({
|
|
161
174
|
readonly alias: "-w, --wallet-file <filePath>";
|
|
@@ -229,6 +242,16 @@ export declare const uploadOptions: ({
|
|
|
229
242
|
readonly alias: "--use-signer-balance-first";
|
|
230
243
|
readonly description: "Use the signer balance first before using credit share approvals";
|
|
231
244
|
readonly default: false;
|
|
245
|
+
} | {
|
|
246
|
+
readonly alias: "--max-chunk-concurrency <maxChunkConcurrency>";
|
|
247
|
+
readonly description: "Maximum number of concurrent chunks to upload per file";
|
|
248
|
+
} | {
|
|
249
|
+
readonly alias: "--chunk-byte-count <chunkByteCount>";
|
|
250
|
+
readonly description: "Size of each chunk in bytes";
|
|
251
|
+
} | {
|
|
252
|
+
readonly alias: "--chunking-mode <chunkingMode>";
|
|
253
|
+
readonly description: "Chunking mode to use for the upload. Can be \"auto\", \"force\" or \"disabled\". Defaults to \"auto\".";
|
|
254
|
+
readonly default: "auto";
|
|
232
255
|
})[];
|
|
233
256
|
export declare const uploadFolderOptions: ({
|
|
234
257
|
readonly description: "An array of additional tags for the write action, in \"--tags name1 value1 name2 value2\" format";
|
|
@@ -258,7 +281,7 @@ export declare const uploadFolderOptions: ({
|
|
|
258
281
|
readonly default: true;
|
|
259
282
|
} | {
|
|
260
283
|
readonly alias: "--max-concurrency <maxConcurrency>";
|
|
261
|
-
readonly description: "Maximum number of concurrent uploads";
|
|
284
|
+
readonly description: "Maximum number of concurrent file uploads";
|
|
262
285
|
} | {
|
|
263
286
|
readonly alias: "--paid-by <paidBy...>";
|
|
264
287
|
readonly description: "Address to pay for the upload";
|
|
@@ -271,6 +294,16 @@ export declare const uploadFolderOptions: ({
|
|
|
271
294
|
readonly alias: "--use-signer-balance-first";
|
|
272
295
|
readonly description: "Use the signer balance first before using credit share approvals";
|
|
273
296
|
readonly default: false;
|
|
297
|
+
} | {
|
|
298
|
+
readonly alias: "--max-chunk-concurrency <maxChunkConcurrency>";
|
|
299
|
+
readonly description: "Maximum number of concurrent chunks to upload per file";
|
|
300
|
+
} | {
|
|
301
|
+
readonly alias: "--chunk-byte-count <chunkByteCount>";
|
|
302
|
+
readonly description: "Size of each chunk in bytes";
|
|
303
|
+
} | {
|
|
304
|
+
readonly alias: "--chunking-mode <chunkingMode>";
|
|
305
|
+
readonly description: "Chunking mode to use for the upload. Can be \"auto\", \"force\" or \"disabled\". Defaults to \"auto\".";
|
|
306
|
+
readonly default: "auto";
|
|
274
307
|
})[];
|
|
275
308
|
export declare const uploadFileOptions: ({
|
|
276
309
|
readonly description: "An array of additional tags for the write action, in \"--tags name1 value1 name2 value2\" format";
|
|
@@ -300,6 +333,16 @@ export declare const uploadFileOptions: ({
|
|
|
300
333
|
readonly alias: "--use-signer-balance-first";
|
|
301
334
|
readonly description: "Use the signer balance first before using credit share approvals";
|
|
302
335
|
readonly default: false;
|
|
336
|
+
} | {
|
|
337
|
+
readonly alias: "--max-chunk-concurrency <maxChunkConcurrency>";
|
|
338
|
+
readonly description: "Maximum number of concurrent chunks to upload per file";
|
|
339
|
+
} | {
|
|
340
|
+
readonly alias: "--chunk-byte-count <chunkByteCount>";
|
|
341
|
+
readonly description: "Size of each chunk in bytes";
|
|
342
|
+
} | {
|
|
343
|
+
readonly alias: "--chunking-mode <chunkingMode>";
|
|
344
|
+
readonly description: "Chunking mode to use for the upload. Can be \"auto\", \"force\" or \"disabled\". Defaults to \"auto\".";
|
|
345
|
+
readonly default: "auto";
|
|
303
346
|
})[];
|
|
304
347
|
export declare const shareCreditsOptions: ({
|
|
305
348
|
readonly alias: "-a, --address <nativeAddress>";
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"options.d.ts","sourceRoot":"","sources":["../../../src/cli/options.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,eAAO,MAAM,SAAS
|
|
1
|
+
{"version":3,"file":"options.d.ts","sourceRoot":"","sources":["../../../src/cli/options.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,eAAO,MAAM,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAoKZ,CAAC;AAEX,eAAO,MAAM,aAAa;;;;;;;;;IAIzB,CAAC;AAEF,eAAO,MAAM,aAAa;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;IAUzB,CAAC;AAEF,eAAO,MAAM,aAAa;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;IASzB,CAAC;AAEF,eAAO,MAAM,mBAAmB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;IAO/B,CAAC;AAEF,eAAO,MAAM,iBAAiB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;IAAyC,CAAC;AAExE,eAAO,MAAM,mBAAmB;;;;;;;;;;;;;;;;;;IAK/B,CAAC;AAEF,eAAO,MAAM,oBAAoB;;;;;;;;;;;;IAAwC,CAAC;AAE1E,eAAO,MAAM,iBAAiB;;;;;;;;;;;;IAAuB,CAAC"}
|
package/lib/types/cli/types.d.ts
CHANGED
|
@@ -13,6 +13,7 @@
|
|
|
13
13
|
* See the License for the specific language governing permissions and
|
|
14
14
|
* limitations under the License.
|
|
15
15
|
*/
|
|
16
|
+
import { TurboChunkingMode } from '../types.js';
|
|
16
17
|
export type GlobalOptions = {
|
|
17
18
|
dev: boolean;
|
|
18
19
|
local: boolean;
|
|
@@ -43,6 +44,9 @@ export type UploadOptions = WalletOptions & {
|
|
|
43
44
|
ignoreApprovals: boolean;
|
|
44
45
|
useSignerBalanceFirst: boolean;
|
|
45
46
|
tags: string[] | undefined;
|
|
47
|
+
maxChunkConcurrency: string | undefined;
|
|
48
|
+
chunkByteCount: string | undefined;
|
|
49
|
+
chunkingMode: TurboChunkingMode | undefined;
|
|
46
50
|
};
|
|
47
51
|
export type UploadFolderOptions = UploadOptions & {
|
|
48
52
|
folderPath: string | undefined;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../../src/cli/types.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../../src/cli/types.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AACH,OAAO,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AAEhD,MAAM,MAAM,aAAa,GAAG;IAC1B,GAAG,EAAE,OAAO,CAAC;IACb,KAAK,EAAE,OAAO,CAAC;IACf,OAAO,EAAE,MAAM,GAAG,SAAS,CAAC;IAC5B,KAAK,EAAE,OAAO,CAAC;IACf,KAAK,EAAE,OAAO,CAAC;IACf,gBAAgB,EAAE,OAAO,CAAC;IAC1B,KAAK,EAAE,MAAM,CAAC;IACd,UAAU,EAAE,MAAM,GAAG,SAAS,CAAC;IAC/B,SAAS,EAAE,MAAM,GAAG,SAAS,CAAC;IAC9B,SAAS,EAAE,MAAM,GAAG,SAAS,CAAC;IAC9B,KAAK,EAAE,MAAM,GAAG,SAAS,CAAC;CAC3B,CAAC;AAEF,MAAM,MAAM,aAAa,GAAG,aAAa,GAAG;IAC1C,UAAU,EAAE,MAAM,GAAG,SAAS,CAAC;IAC/B,QAAQ,EAAE,MAAM,GAAG,SAAS,CAAC;IAC7B,UAAU,EAAE,MAAM,GAAG,SAAS,CAAC;CAChC,CAAC;AAEF,MAAM,MAAM,cAAc,GAAG,aAAa,GAAG;IAC3C,OAAO,EAAE,MAAM,GAAG,SAAS,CAAC;CAC7B,CAAC;AAEF,MAAM,MAAM,YAAY,GAAG,cAAc,GAAG;IAC1C,KAAK,EAAE,MAAM,GAAG,SAAS,CAAC;IAC1B,QAAQ,EAAE,MAAM,GAAG,SAAS,CAAC;CAC9B,CAAC;AAEF,MAAM,MAAM,aAAa,GAAG,aAAa,GAAG;IAC1C,MAAM,EAAE,MAAM,EAAE,CAAC;IACjB,eAAe,EAAE,OAAO,CAAC;IACzB,qBAAqB,EAAE,OAAO,CAAC;IAC/B,IAAI,EAAE,MAAM,EAAE,GAAG,SAAS,CAAC;IAC3B,mBAAmB,EAAE,MAAM,GAAG,SAAS,CAAC;IACxC,cAAc,EAAE,MAAM,GAAG,SAAS,CAAC;IACnC,YAAY,EAAE,iBAAiB,GAAG,SAAS,CAAC;CAC7C,CAAC;AAEF,MAAM,MAAM,mBAAmB,GAAG,aAAa,GAAG;IAChD,UAAU,EAAE,MAAM,GAAG,SAAS,CAAC;IAC/B,SAAS,EAAE,MAAM,GAAG,SAAS,CAAC;IAC9B,YAAY,EAAE,MAAM,GAAG,SAAS,CAAC;IACjC,QAAQ,EAAE,OAAO,CAAC;IAClB,cAAc,EAAE,MAAM,GAAG,SAAS,CAAC;CACpC,CAAC;AAEF,MAAM,MAAM,iBAAiB,GAAG,aAAa,GAAG;IAC9C,QAAQ,EAAE,MAAM,GAAG,SAAS,CAAC;IAC7B,IAAI,EAAE,MAAM,EAAE,GAAG,SAAS,CAAC;CAC5B,CAAC;AAEF,MAAM,MAAM,iBAAiB,GAAG,aAAa,GAAG;IAC9C,SAAS,EAAE,MAAM,GAAG,SAAS,CAAC;CAC/B,CAAC;AAEF,MAAM,MAAM,mBAAmB,GAAG,iBAAiB,GAAG;IACpD,QAAQ,EAAE,MAAM,GAAG,SAAS,CAAC;CAC9B,CAAC;AAEF,MAAM,MAAM,YAAY,GAAG,iBAAiB,GAAG;IAC7C,KAAK,EAAE,MAAM,GAAG,SAAS,CAAC;IAC1B,QAAQ,EAAE,MAAM,GAAG,SAAS,CAAC;IAC7B,IAAI,EAAE,MAAM,GAAG,SAAS,CAAC;CAC1B,CAAC;AAEF,MAAM,MAAM,iBAAiB,GAAG,aAAa,GAAG;IAC9C,KAAK,EAAE,MAAM,GAAG,SAAS,CAAC;IAC1B,IAAI,EAAE,MAAM,GAAG,SAAS,CAAC;CAC1B,CAAC;AAEF,MAAM,MAAM,mBAAmB,GAAG,aAAa,GAAG;IAChD,OAAO,EAAE,MAAM,GAAG,SAAS,CAAC;IAC5B,KAAK,EAAE,MAAM,GAAG,SAAS,CAAC;IAC1B,gBAAgB,EAAE,MAAM,GAAG,SAAS,CAAC;CACtC,CAAC;AAEF,MAAM,MAAM,oBAAoB,GAAG,aAAa,GAAG;IACjD,OAAO,EAAE,MAAM,GAAG,SAAS,CAAC;CAC7B,CAAC;AAEF,MAAM,MAAM,iBAAiB,GAAG,oBAAoB,CAAC"}
|
package/lib/types/cli/utils.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { Command, OptionValues } from 'commander';
|
|
2
|
-
import { Currency, TokenType, TurboAuthenticatedClient, TurboUnauthenticatedConfiguration } from '../node/index.js';
|
|
2
|
+
import { Currency, TokenType, TurboAuthenticatedClient, TurboChunkingMode, TurboUnauthenticatedConfiguration } from '../node/index.js';
|
|
3
3
|
import { AddressOptions, GlobalOptions, TokenPriceOptions, UploadFolderOptions, UploadOptions, WalletOptions } from './types.js';
|
|
4
4
|
export declare function exitWithErrorLog(error: unknown): void;
|
|
5
5
|
export declare function runCommand<T extends OptionValues>(command: Command, action: (options: T) => Promise<void>): Promise<void>;
|
|
@@ -27,6 +27,9 @@ export declare function getUploadFolderOptions(options: UploadFolderOptions): {
|
|
|
27
27
|
fallbackFile: string | undefined;
|
|
28
28
|
disableManifest: boolean;
|
|
29
29
|
maxConcurrentUploads: number;
|
|
30
|
+
maxChunkConcurrency?: number;
|
|
31
|
+
chunkByteCount?: number;
|
|
32
|
+
chunkingMode?: TurboChunkingMode;
|
|
30
33
|
};
|
|
31
34
|
/**
|
|
32
35
|
* Parse tags array from CLI input into Tag array
|
|
@@ -46,5 +49,10 @@ export declare function currencyFromOptions<T extends GlobalOptions & {
|
|
|
46
49
|
currency?: string;
|
|
47
50
|
}>(options: T): Currency | undefined;
|
|
48
51
|
export declare function requiredByteCountFromOptions({ byteCount, }: TokenPriceOptions): number;
|
|
52
|
+
export declare function getChunkingOptions<O extends UploadOptions>(options: O): {
|
|
53
|
+
chunkingMode?: TurboChunkingMode;
|
|
54
|
+
chunkByteCount?: number;
|
|
55
|
+
maxChunkConcurrency?: number;
|
|
56
|
+
};
|
|
49
57
|
export {};
|
|
50
58
|
//# sourceMappingURL=utils.d.ts.map
|