@ardrive/turbo-sdk 1.30.0-alpha.1 → 1.31.0-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/bundles/web.bundle.min.js +404 -16
  2. package/lib/cjs/cli/commands/uploadFile.js +1 -0
  3. package/lib/cjs/cli/commands/uploadFolder.js +4 -1
  4. package/lib/cjs/cli/options.js +17 -1
  5. package/lib/cjs/cli/types.js +0 -15
  6. package/lib/cjs/cli/utils.js +13 -0
  7. package/lib/cjs/common/chunked.js +359 -0
  8. package/lib/cjs/common/http.js +1 -0
  9. package/lib/cjs/common/turbo.js +10 -2
  10. package/lib/cjs/common/upload.js +39 -9
  11. package/lib/cjs/types.js +2 -1
  12. package/lib/cjs/version.js +1 -1
  13. package/lib/esm/cli/commands/uploadFile.js +2 -1
  14. package/lib/esm/cli/commands/uploadFolder.js +4 -1
  15. package/lib/esm/cli/options.js +17 -1
  16. package/lib/esm/cli/types.js +0 -15
  17. package/lib/esm/cli/utils.js +12 -0
  18. package/lib/esm/common/chunked.js +352 -0
  19. package/lib/esm/common/http.js +1 -0
  20. package/lib/esm/common/turbo.js +10 -2
  21. package/lib/esm/common/upload.js +39 -9
  22. package/lib/esm/types.js +1 -0
  23. package/lib/esm/version.js +1 -1
  24. package/lib/types/cli/commands/uploadFile.d.ts.map +1 -1
  25. package/lib/types/cli/commands/uploadFolder.d.ts.map +1 -1
  26. package/lib/types/cli/options.d.ts +45 -2
  27. package/lib/types/cli/options.d.ts.map +1 -1
  28. package/lib/types/cli/types.d.ts +4 -0
  29. package/lib/types/cli/types.d.ts.map +1 -1
  30. package/lib/types/cli/utils.d.ts +9 -1
  31. package/lib/types/cli/utils.d.ts.map +1 -1
  32. package/lib/types/common/chunked.d.ts +44 -0
  33. package/lib/types/common/chunked.d.ts.map +1 -0
  34. package/lib/types/common/http.d.ts +1 -1
  35. package/lib/types/common/http.d.ts.map +1 -1
  36. package/lib/types/common/turbo.d.ts +2 -2
  37. package/lib/types/common/turbo.d.ts.map +1 -1
  38. package/lib/types/common/upload.d.ts +3 -3
  39. package/lib/types/common/upload.d.ts.map +1 -1
  40. package/lib/types/types.d.ts +16 -4
  41. package/lib/types/types.d.ts.map +1 -1
  42. package/lib/types/version.d.ts +1 -1
  43. package/package.json +9 -5
@@ -0,0 +1,359 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ChunkedUploader = exports.defaultChunkByteCount = exports.minChunkByteCount = exports.maxChunkByteCount = exports.defaultMaxChunkConcurrency = void 0;
4
+ exports.splitIntoChunks = splitIntoChunks;
5
+ exports.splitReadableIntoChunks = splitReadableIntoChunks;
6
+ exports.splitReadableStreamIntoChunks = splitReadableStreamIntoChunks;
7
+ /**
8
+ * Copyright (C) 2022-2024 Permanent Data Solutions, Inc.
9
+ *
10
+ * Licensed under the Apache License, Version 2.0 (the "License");
11
+ * you may not use this file except in compliance with the License.
12
+ * You may obtain a copy of the License at
13
+ *
14
+ * http://www.apache.org/licenses/LICENSE-2.0
15
+ *
16
+ * Unless required by applicable law or agreed to in writing, software
17
+ * distributed under the License is distributed on an "AS IS" BASIS,
18
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
19
+ * See the License for the specific language governing permissions and
20
+ * limitations under the License.
21
+ */
22
+ const axios_1 = require("axios");
23
+ const plimit_lit_1 = require("plimit-lit");
24
+ const types_js_1 = require("../types.js");
25
+ const events_js_1 = require("./events.js");
26
+ const logger_js_1 = require("./logger.js");
27
+ const fiveMiB = 5 * 1024 * 1024; // 5 MiB
28
+ const fiveHundredMiB = fiveMiB * 100; // 500 MiB
29
+ exports.defaultMaxChunkConcurrency = 5;
30
+ exports.maxChunkByteCount = fiveHundredMiB;
31
+ exports.minChunkByteCount = fiveMiB;
32
+ exports.defaultChunkByteCount = exports.minChunkByteCount;
33
+ const backlogQueueFactor = 2;
34
+ const chunkingHeader = { 'x-chunking-version': '2' };
35
+ /**
36
+ * Performs a chunked upload by splitting the stream into fixed-size buffers,
37
+ * uploading them in parallel, and emitting progress/error events.
38
+ */
39
+ class ChunkedUploader {
40
+ constructor({ http, token, maxChunkConcurrency = exports.defaultMaxChunkConcurrency, chunkByteCount = exports.defaultChunkByteCount, logger = logger_js_1.TurboWinstonLogger.default, chunkingMode = 'auto', dataItemByteCount, }) {
41
+ this.chunkByteCount = chunkByteCount;
42
+ this.maxChunkConcurrency = maxChunkConcurrency;
43
+ this.http = http;
44
+ this.token = token;
45
+ this.logger = logger;
46
+ this.assertChunkParams({
47
+ chunkByteCount,
48
+ chunkingMode,
49
+ maxChunkConcurrency,
50
+ });
51
+ this.shouldUseChunkUploader = this.shouldChunkUpload({
52
+ chunkByteCount,
53
+ chunkingMode,
54
+ dataItemByteCount,
55
+ });
56
+ this.maxBacklogQueue = this.maxChunkConcurrency * backlogQueueFactor;
57
+ }
58
+ shouldChunkUpload({ chunkByteCount, chunkingMode, dataItemByteCount, }) {
59
+ if (chunkingMode === 'disabled') {
60
+ return false;
61
+ }
62
+ if (chunkingMode === 'force') {
63
+ return true;
64
+ }
65
+ const isMoreThanTwoChunksOfData = dataItemByteCount > chunkByteCount * 2;
66
+ return isMoreThanTwoChunksOfData;
67
+ }
68
+ assertChunkParams({ chunkByteCount, chunkingMode, maxChunkConcurrency, }) {
69
+ if (Number.isNaN(maxChunkConcurrency) ||
70
+ !Number.isInteger(maxChunkConcurrency) ||
71
+ maxChunkConcurrency < 1) {
72
+ throw new Error('Invalid max chunk concurrency. Must be an integer of at least 1.');
73
+ }
74
+ if (Number.isNaN(chunkByteCount) ||
75
+ !Number.isInteger(chunkByteCount) ||
76
+ chunkByteCount < fiveMiB ||
77
+ chunkByteCount > fiveHundredMiB) {
78
+ throw new Error('Invalid chunk size. Must be an integer between 5 MiB and 500 MiB.');
79
+ }
80
+ if (typeof chunkingMode !== 'string' ||
81
+ !types_js_1.validChunkingModes.includes(chunkingMode)) {
82
+ throw new Error(`Invalid chunking mode. Must be one of: ${types_js_1.validChunkingModes.join(', ')}`);
83
+ }
84
+ }
85
+ /**
86
+ * Initialize or resume an upload session, returning the upload ID.
87
+ */
88
+ async initUpload() {
89
+ const res = await this.http.get({
90
+ endpoint: `/chunks/${this.token}/-1/-1?chunkSize=${this.chunkByteCount}`,
91
+ headers: chunkingHeader,
92
+ });
93
+ if (res.chunkSize !== this.chunkByteCount) {
94
+ this.logger.warn('Chunk size mismatch! Overriding with server value.', {
95
+ expected: this.chunkByteCount,
96
+ actual: res.chunkSize,
97
+ });
98
+ this.chunkByteCount = res.chunkSize;
99
+ }
100
+ return res.id;
101
+ }
102
+ async upload({ dataItemSizeFactory, dataItemStreamFactory, dataItemOpts, signal, events, }) {
103
+ const uploadId = await this.initUpload();
104
+ const dataItemByteCount = dataItemSizeFactory();
105
+ const emitter = new events_js_1.TurboEventEmitter(events);
106
+ const { stream, resume } = (0, events_js_1.createStreamWithUploadEvents)({
107
+ data: dataItemStreamFactory(),
108
+ dataSize: dataItemByteCount,
109
+ emitter,
110
+ });
111
+ this.logger.debug(`Starting chunked upload`, {
112
+ token: this.token,
113
+ uploadId,
114
+ totalSize: dataItemByteCount,
115
+ chunkByteCount: this.chunkByteCount,
116
+ maxChunkConcurrency: this.maxChunkConcurrency,
117
+ inputStreamType: isReadableStream(stream) ? 'ReadableStream' : 'Readable',
118
+ });
119
+ const inFlight = new Set();
120
+ const internalAbort = new AbortController();
121
+ const combinedSignal = combineAbortSignals([internalAbort.signal, signal]);
122
+ const limit = (0, plimit_lit_1.pLimit)(this.maxChunkConcurrency);
123
+ let currentOffset = 0;
124
+ let currentChunkPartNumber = 0;
125
+ let firstError;
126
+ let uploadedBytes = 0;
127
+ const chunks = splitIntoChunks(stream, this.chunkByteCount);
128
+ resume();
129
+ for await (const chunk of chunks) {
130
+ if (combinedSignal?.aborted) {
131
+ internalAbort.abort();
132
+ await Promise.allSettled(inFlight);
133
+ firstError ??= new axios_1.CanceledError();
134
+ break;
135
+ }
136
+ const chunkPartNumber = ++currentChunkPartNumber;
137
+ const chunkByteCount = chunk.length;
138
+ const chunkOffset = currentOffset;
139
+ currentOffset += chunkByteCount;
140
+ this.logger.debug('Queueing chunk', {
141
+ chunkPartNumber,
142
+ chunkOffset,
143
+ chunkByteCount,
144
+ });
145
+ const promise = limit(async () => {
146
+ if (firstError !== undefined) {
147
+ return;
148
+ }
149
+ this.logger.debug('Uploading chunk', {
150
+ chunkPartNumber,
151
+ chunkOffset,
152
+ chunkByteCount,
153
+ });
154
+ await this.http.post({
155
+ endpoint: `/chunks/${this.token}/${uploadId}/${chunkOffset}`,
156
+ data: chunk,
157
+ headers: {
158
+ 'Content-Type': 'application/octet-stream',
159
+ ...chunkingHeader,
160
+ },
161
+ signal: combinedSignal,
162
+ });
163
+ uploadedBytes += chunkByteCount;
164
+ this.logger.debug('Chunk uploaded', {
165
+ chunkPartNumber,
166
+ chunkOffset,
167
+ chunkByteCount,
168
+ });
169
+ emitter.emit('upload-progress', {
170
+ processedBytes: uploadedBytes,
171
+ totalBytes: dataItemByteCount,
172
+ });
173
+ }).catch((err) => {
174
+ this.logger.error('Chunk upload failed', {
175
+ id: chunkPartNumber,
176
+ offset: chunkOffset,
177
+ size: chunkByteCount,
178
+ err,
179
+ });
180
+ emitter.emit('upload-error', err);
181
+ internalAbort.abort(err);
182
+ firstError = firstError ?? err;
183
+ });
184
+ inFlight.add(promise);
185
+ promise.finally(() => inFlight.delete(promise));
186
+ if (inFlight.size >= this.maxBacklogQueue) {
187
+ await Promise.race(inFlight);
188
+ if (combinedSignal?.aborted) {
189
+ internalAbort.abort();
190
+ await Promise.allSettled(inFlight);
191
+ firstError ??= new axios_1.CanceledError();
192
+ break;
193
+ }
194
+ }
195
+ }
196
+ await Promise.all(inFlight);
197
+ if (firstError !== undefined) {
198
+ throw firstError;
199
+ }
200
+ const paidByHeader = {};
201
+ if (dataItemOpts?.paidBy !== undefined) {
202
+ paidByHeader['x-paid-by'] = Array.isArray(dataItemOpts.paidBy)
203
+ ? dataItemOpts.paidBy.join(',')
204
+ : dataItemOpts.paidBy;
205
+ }
206
+ // TODO: Async Finalize
207
+ // Finalize and reconstruct server-side
208
+ const finalizeResponse = await this.http.post({
209
+ endpoint: `/chunks/${this.token}/${uploadId}/-1`,
210
+ data: Buffer.alloc(0),
211
+ headers: {
212
+ 'Content-Type': 'application/octet-stream',
213
+ ...paidByHeader,
214
+ ...chunkingHeader,
215
+ },
216
+ signal: combinedSignal,
217
+ });
218
+ emitter.emit('upload-success');
219
+ return finalizeResponse;
220
+ }
221
+ }
222
+ exports.ChunkedUploader = ChunkedUploader;
223
+ /**
224
+ * Yield Buffers of up to `chunkByteCount`, coalescing whatever small pieces
225
+ * the source produces into proper slices.
226
+ */
227
+ async function* splitIntoChunks(source, chunkByteCount) {
228
+ if (isReadableStream(source)) {
229
+ yield* splitReadableStreamIntoChunks(source, chunkByteCount);
230
+ }
231
+ else {
232
+ yield* splitReadableIntoChunks(source, chunkByteCount);
233
+ }
234
+ }
235
+ async function* splitReadableIntoChunks(source, chunkByteCount) {
236
+ const queue = [];
237
+ let total = 0;
238
+ let encoder;
239
+ for await (const piece of source) {
240
+ const u8 = piece instanceof Uint8Array
241
+ ? new Uint8Array(piece.buffer, piece.byteOffset, piece.byteLength)
242
+ : (encoder ??= new TextEncoder()).encode(String(piece));
243
+ queue.push(u8);
244
+ total += u8.length;
245
+ // Emit full chunks
246
+ while (total >= chunkByteCount) {
247
+ const out = new Uint8Array(chunkByteCount);
248
+ let remaining = out.length;
249
+ let off = 0;
250
+ while (remaining > 0) {
251
+ const head = queue[0];
252
+ const take = Math.min(remaining, head.length);
253
+ out.set(head.subarray(0, take), off);
254
+ off += take;
255
+ remaining -= take;
256
+ if (take === head.length) {
257
+ queue.shift();
258
+ }
259
+ else {
260
+ queue[0] = head.subarray(take);
261
+ }
262
+ }
263
+ total -= chunkByteCount;
264
+ // Yield a Buffer view (no copy)
265
+ yield Buffer.from(out.buffer, out.byteOffset, out.byteLength);
266
+ }
267
+ }
268
+ // Remainder
269
+ if (total > 0) {
270
+ const out = new Uint8Array(total);
271
+ let off = 0;
272
+ while (queue.length > 0) {
273
+ const head = queue.shift(); // safe due to loop condition
274
+ out.set(head, off);
275
+ off += head.length;
276
+ }
277
+ yield Buffer.from(out.buffer, out.byteOffset, out.byteLength);
278
+ }
279
+ }
280
+ async function* splitReadableStreamIntoChunks(source, chunkByteCount) {
281
+ const reader = source.getReader();
282
+ const queue = [];
283
+ let total = 0;
284
+ try {
285
+ while (true) {
286
+ const { value, done } = await reader.read();
287
+ if (done)
288
+ break;
289
+ // Ensure we keep a plain view (avoids surprises if the producer reuses buffers)
290
+ const u8 = new Uint8Array(value.buffer, value.byteOffset, value.byteLength);
291
+ queue.push(u8);
292
+ total += u8.length;
293
+ while (total >= chunkByteCount) {
294
+ const out = new Uint8Array(chunkByteCount);
295
+ let remaining = out.length;
296
+ let off = 0;
297
+ while (remaining > 0) {
298
+ const head = queue[0];
299
+ const take = Math.min(remaining, head.length);
300
+ out.set(head.subarray(0, take), off);
301
+ off += take;
302
+ remaining -= take;
303
+ if (take === head.length) {
304
+ queue.shift();
305
+ }
306
+ else {
307
+ queue[0] = head.subarray(take);
308
+ }
309
+ }
310
+ total -= chunkByteCount;
311
+ yield Buffer.from(out.buffer, out.byteOffset, out.byteLength);
312
+ }
313
+ }
314
+ if (total > 0) {
315
+ const out = new Uint8Array(total);
316
+ let off = 0;
317
+ while (queue.length > 0) {
318
+ const head = queue.shift(); // safe due to loop condition
319
+ out.set(head, off);
320
+ off += head.length;
321
+ }
322
+ yield Buffer.from(out.buffer, out.byteOffset, out.byteLength);
323
+ }
324
+ }
325
+ finally {
326
+ reader.releaseLock();
327
+ }
328
+ }
329
+ function isReadableStream(source) {
330
+ // Prefer instanceof if available, otherwise use a safe duck-typing check
331
+ if (typeof ReadableStream !== 'undefined' &&
332
+ source instanceof ReadableStream) {
333
+ return true;
334
+ }
335
+ return (source !== null &&
336
+ typeof source === 'object' &&
337
+ 'getReader' in source &&
338
+ typeof source.getReader === 'function');
339
+ }
340
+ function combineAbortSignals(signals) {
341
+ const real = signals.filter(Boolean);
342
+ if (real.length === 0)
343
+ return undefined;
344
+ const anyFn = AbortSignal.any;
345
+ if (typeof anyFn === 'function') {
346
+ return anyFn(real);
347
+ }
348
+ const controller = new AbortController();
349
+ for (const s of real) {
350
+ const sig = s;
351
+ if (sig.aborted) {
352
+ controller.abort(sig.reason);
353
+ break;
354
+ }
355
+ const onAbort = () => controller.abort(sig.reason);
356
+ s.addEventListener('abort', onAbort, { once: true });
357
+ }
358
+ return controller.signal;
359
+ }
@@ -45,6 +45,7 @@ class TurboHTTPService {
45
45
  // See: https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API#body
46
46
  const { body, duplex } = await toFetchBody(data);
47
47
  try {
48
+ this.logger.debug('Posting data via fetch', { endpoint, headers });
48
49
  const res = await fetch(this.axios.defaults.baseURL + endpoint, {
49
50
  method: 'POST',
50
51
  headers,
@@ -160,8 +160,16 @@ class TurboAuthenticatedClient extends TurboUnauthenticatedClient {
160
160
  /**
161
161
  * Signs and uploads raw data to the Turbo Upload Service.
162
162
  */
163
- upload({ data, dataItemOpts, signal, events, }) {
164
- return this.uploadService.upload({ data, dataItemOpts, signal, events });
163
+ upload({ data, dataItemOpts, signal, events, chunkByteCount, chunkingMode, maxChunkConcurrency, }) {
164
+ return this.uploadService.upload({
165
+ data,
166
+ dataItemOpts,
167
+ signal,
168
+ events,
169
+ chunkByteCount,
170
+ chunkingMode,
171
+ maxChunkConcurrency,
172
+ });
165
173
  }
166
174
  uploadFile(params) {
167
175
  return this.uploadService.uploadFile(params);
@@ -21,6 +21,7 @@ const plimit_lit_1 = require("plimit-lit");
21
21
  const axiosClient_js_1 = require("../utils/axiosClient.js");
22
22
  const common_js_1 = require("../utils/common.js");
23
23
  const errors_js_1 = require("../utils/errors.js");
24
+ const chunked_js_1 = require("./chunked.js");
24
25
  const events_js_1 = require("./events.js");
25
26
  const http_js_1 = require("./http.js");
26
27
  const logger_js_1 = require("./logger.js");
@@ -95,7 +96,7 @@ class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUploadServ
95
96
  /**
96
97
  * Signs and uploads raw data to the Turbo Upload Service.
97
98
  */
98
- upload({ data, dataItemOpts, signal, events, }) {
99
+ upload({ data, dataItemOpts, signal, events, chunkByteCount, chunkingMode, maxChunkConcurrency, }) {
99
100
  // This function is intended to be usable in both Node and browser environments.
100
101
  if ((0, common_js_1.isBlob)(data)) {
101
102
  const streamFactory = () => data.stream();
@@ -123,6 +124,9 @@ class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUploadServ
123
124
  signal,
124
125
  dataItemOpts,
125
126
  events,
127
+ chunkByteCount,
128
+ chunkingMode,
129
+ maxChunkConcurrency,
126
130
  });
127
131
  }
128
132
  resolveUploadFileConfig(params) {
@@ -163,22 +167,42 @@ class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUploadServ
163
167
  let lastStatusCode = undefined; // Store the last status code for throwing
164
168
  const emitter = new events_js_1.TurboEventEmitter(events);
165
169
  // avoid duplicating signing on failures here - these errors will immediately be thrown
166
- // TODO: create a SigningError class and throw that instead of the generic Error
167
- const { dataItemStreamFactory, dataItemSizeFactory } = await this.signer.signDataItem({
168
- fileStreamFactory,
169
- fileSizeFactory,
170
- dataItemOpts,
171
- emitter,
172
- });
173
170
  // TODO: move the retry implementation to the http class, and avoid awaiting here. This will standardize the retry logic across all upload methods.
174
171
  while (retries < maxRetries) {
175
172
  if (signal?.aborted) {
176
173
  throw new axios_1.CanceledError();
177
174
  }
175
+ // TODO: create a SigningError class and throw that instead of the generic Error
176
+ const { dataItemStreamFactory, dataItemSizeFactory } = await this.signer.signDataItem({
177
+ fileStreamFactory,
178
+ fileSizeFactory,
179
+ dataItemOpts,
180
+ emitter,
181
+ });
178
182
  // Now that we have the signed data item, we can upload it using the uploadSignedDataItem method
179
183
  // which will create a new emitter with upload events. We await
180
184
  // this result due to the wrapped retry logic of this method.
181
185
  try {
186
+ const { chunkByteCount, maxChunkConcurrency } = params;
187
+ const chunkedUploader = new chunked_js_1.ChunkedUploader({
188
+ http: this.httpService,
189
+ token: this.token,
190
+ maxChunkConcurrency,
191
+ chunkByteCount,
192
+ logger: this.logger,
193
+ dataItemByteCount: dataItemSizeFactory(),
194
+ chunkingMode: params.chunkingMode,
195
+ });
196
+ if (chunkedUploader.shouldUseChunkUploader) {
197
+ const response = await chunkedUploader.upload({
198
+ dataItemStreamFactory,
199
+ dataItemSizeFactory,
200
+ dataItemOpts,
201
+ signal,
202
+ events,
203
+ });
204
+ return response;
205
+ }
182
206
  const response = await this.uploadSignedDataItem({
183
207
  dataItemStreamFactory,
184
208
  dataItemSizeFactory,
@@ -267,7 +291,7 @@ class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUploadServ
267
291
  */
268
292
  async uploadFolder(params) {
269
293
  this.logger.debug('Uploading folder...', { params });
270
- const { dataItemOpts, signal, manifestOptions = {}, maxConcurrentUploads = 1, throwOnFailure = true, } = params;
294
+ const { dataItemOpts, signal, manifestOptions = {}, maxConcurrentUploads = 1, throwOnFailure = true, maxChunkConcurrency, chunkByteCount, chunkingMode, } = params;
271
295
  const { disableManifest, indexFile, fallbackFile } = manifestOptions;
272
296
  const paths = {};
273
297
  const response = {
@@ -291,6 +315,9 @@ class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUploadServ
291
315
  fileSizeFactory: () => this.getFileSize(file),
292
316
  signal,
293
317
  dataItemOpts: dataItemOptsWithContentType,
318
+ chunkByteCount,
319
+ maxChunkConcurrency,
320
+ chunkingMode,
294
321
  });
295
322
  const relativePath = this.getRelativePath(file, params);
296
323
  paths[relativePath] = { id: result.id };
@@ -336,6 +363,9 @@ class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUploadServ
336
363
  fileSizeFactory: () => manifestBuffer.byteLength,
337
364
  signal,
338
365
  dataItemOpts: { ...dataItemOpts, tags: tagsWithManifestContentType },
366
+ chunkByteCount,
367
+ maxChunkConcurrency,
368
+ chunkingMode,
339
369
  });
340
370
  return {
341
371
  ...response,
package/lib/cjs/types.js CHANGED
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.isJWK = exports.isWebUploadFolderParams = exports.isNodeUploadFolderParams = exports.tokenTypes = exports.fiatCurrencyTypes = void 0;
3
+ exports.validChunkingModes = exports.isJWK = exports.isWebUploadFolderParams = exports.isNodeUploadFolderParams = exports.tokenTypes = exports.fiatCurrencyTypes = void 0;
4
4
  exports.isCurrency = isCurrency;
5
5
  exports.isKyvePrivateKey = isKyvePrivateKey;
6
6
  exports.isEthPrivateKey = isEthPrivateKey;
@@ -54,3 +54,4 @@ function isSolanaWalletAdapter(walletAdapter) {
54
54
  function isEthereumWalletAdapter(walletAdapter) {
55
55
  return 'getSigner' in walletAdapter;
56
56
  }
57
+ exports.validChunkingModes = ['force', 'disabled', 'auto'];
@@ -17,4 +17,4 @@
17
17
  Object.defineProperty(exports, "__esModule", { value: true });
18
18
  exports.version = void 0;
19
19
  // AUTOMATICALLY GENERATED FILE - DO NOT TOUCH
20
- exports.version = '1.30.0-alpha.1';
20
+ exports.version = '1.31.0-alpha.1';
@@ -15,7 +15,7 @@
15
15
  */
16
16
  import { createReadStream, statSync } from 'fs';
17
17
  import { turboCliTags } from '../constants.js';
18
- import { getTagsFromOptions, paidByFromOptions, turboFromOptions, } from '../utils.js';
18
+ import { getChunkingOptions, getTagsFromOptions, paidByFromOptions, turboFromOptions, } from '../utils.js';
19
19
  export async function uploadFile(options) {
20
20
  const { filePath } = options;
21
21
  if (filePath === undefined) {
@@ -29,6 +29,7 @@ export async function uploadFile(options) {
29
29
  fileStreamFactory: () => createReadStream(filePath),
30
30
  fileSizeFactory: () => fileSize,
31
31
  dataItemOpts: { tags: [...turboCliTags, ...customTags], paidBy },
32
+ ...getChunkingOptions(options),
32
33
  });
33
34
  console.log('Uploaded file:', JSON.stringify(result, null, 2));
34
35
  }
@@ -18,7 +18,7 @@ import { getTagsFromOptions, getUploadFolderOptions, paidByFromOptions, turboFro
18
18
  export async function uploadFolder(options) {
19
19
  const turbo = await turboFromOptions(options);
20
20
  const paidBy = await paidByFromOptions(options, turbo);
21
- const { disableManifest, fallbackFile, folderPath, indexFile, maxConcurrentUploads, } = getUploadFolderOptions(options);
21
+ const { disableManifest, fallbackFile, folderPath, indexFile, maxConcurrentUploads, chunkByteCount, chunkingMode, maxChunkConcurrency, } = getUploadFolderOptions(options);
22
22
  const customTags = getTagsFromOptions(options);
23
23
  const result = await turbo.uploadFolder({
24
24
  folderPath: folderPath,
@@ -29,6 +29,9 @@ export async function uploadFolder(options) {
29
29
  fallbackFile,
30
30
  },
31
31
  maxConcurrentUploads,
32
+ chunkByteCount,
33
+ chunkingMode,
34
+ maxChunkConcurrency,
32
35
  });
33
36
  console.log('Uploaded folder:', JSON.stringify(result, null, 2));
34
37
  }
@@ -131,7 +131,7 @@ export const optionMap = {
131
131
  },
132
132
  maxConcurrency: {
133
133
  alias: '--max-concurrency <maxConcurrency>',
134
- description: 'Maximum number of concurrent uploads',
134
+ description: 'Maximum number of concurrent file uploads',
135
135
  },
136
136
  paidBy: {
137
137
  alias: '--paid-by <paidBy...>',
@@ -156,6 +156,19 @@ export const optionMap = {
156
156
  alias: '--byte-count <byteCount>',
157
157
  description: 'Number of bytes to use for the action',
158
158
  },
159
+ maxChunkConcurrency: {
160
+ alias: '--max-chunk-concurrency <maxChunkConcurrency>',
161
+ description: 'Maximum number of concurrent chunks to upload per file',
162
+ },
163
+ chunkByteCount: {
164
+ alias: '--chunk-byte-count <chunkByteCount>',
165
+ description: 'Size of each chunk in bytes',
166
+ },
167
+ chunkingMode: {
168
+ alias: '--chunking-mode <chunkingMode>',
169
+ description: 'Chunking mode to use for the upload. Can be "auto", "force" or "disabled". Defaults to "auto".',
170
+ default: 'auto',
171
+ },
159
172
  };
160
173
  export const walletOptions = [
161
174
  optionMap.walletFile,
@@ -179,6 +192,9 @@ export const uploadOptions = [
179
192
  optionMap.ignoreApprovals,
180
193
  optionMap.useSignerBalanceFirst,
181
194
  optionMap.tags,
195
+ optionMap.maxChunkConcurrency,
196
+ optionMap.chunkByteCount,
197
+ optionMap.chunkingMode,
182
198
  ];
183
199
  export const uploadFolderOptions = [
184
200
  ...uploadOptions,
@@ -1,16 +1 @@
1
- /**
2
- * Copyright (C) 2022-2024 Permanent Data Solutions, Inc.
3
- *
4
- * Licensed under the Apache License, Version 2.0 (the "License");
5
- * you may not use this file except in compliance with the License.
6
- * You may obtain a copy of the License at
7
- *
8
- * http://www.apache.org/licenses/LICENSE-2.0
9
- *
10
- * Unless required by applicable law or agreed to in writing, software
11
- * distributed under the License is distributed on an "AS IS" BASIS,
12
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
- * See the License for the specific language governing permissions and
14
- * limitations under the License.
15
- */
16
1
  export {};
@@ -214,6 +214,7 @@ export function getUploadFolderOptions(options) {
214
214
  fallbackFile: options.fallbackFile,
215
215
  disableManifest: !options.manifest,
216
216
  maxConcurrentUploads: +(options.maxConcurrency ?? 1),
217
+ ...getChunkingOptions(options),
217
218
  };
218
219
  }
219
220
  /**
@@ -261,3 +262,14 @@ export function requiredByteCountFromOptions({ byteCount, }) {
261
262
  }
262
263
  return byteCountValue;
263
264
  }
265
+ export function getChunkingOptions(options) {
266
+ return {
267
+ chunkingMode: options.chunkingMode,
268
+ chunkByteCount: options.chunkByteCount !== undefined
269
+ ? +options.chunkByteCount
270
+ : undefined,
271
+ maxChunkConcurrency: options.maxChunkConcurrency !== undefined
272
+ ? +options.maxChunkConcurrency
273
+ : undefined,
274
+ };
275
+ }