@ardrive/turbo-sdk 1.30.0 → 1.31.0-alpha.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/README.md +36 -2
  2. package/bundles/web.bundle.min.js +490 -17
  3. package/lib/cjs/cli/commands/uploadFile.js +1 -0
  4. package/lib/cjs/cli/commands/uploadFolder.js +5 -1
  5. package/lib/cjs/cli/options.js +22 -1
  6. package/lib/cjs/cli/types.js +0 -15
  7. package/lib/cjs/cli/utils.js +14 -0
  8. package/lib/cjs/common/chunked.js +414 -0
  9. package/lib/cjs/common/http.js +1 -0
  10. package/lib/cjs/common/turbo.js +10 -2
  11. package/lib/cjs/common/upload.js +42 -10
  12. package/lib/cjs/types.js +14 -1
  13. package/lib/cjs/version.js +1 -1
  14. package/lib/esm/cli/commands/uploadFile.js +2 -1
  15. package/lib/esm/cli/commands/uploadFolder.js +5 -1
  16. package/lib/esm/cli/options.js +22 -1
  17. package/lib/esm/cli/types.js +0 -15
  18. package/lib/esm/cli/utils.js +13 -0
  19. package/lib/esm/common/chunked.js +407 -0
  20. package/lib/esm/common/http.js +1 -0
  21. package/lib/esm/common/turbo.js +10 -2
  22. package/lib/esm/common/upload.js +42 -10
  23. package/lib/esm/types.js +13 -0
  24. package/lib/esm/version.js +1 -1
  25. package/lib/types/cli/commands/uploadFile.d.ts.map +1 -1
  26. package/lib/types/cli/commands/uploadFolder.d.ts.map +1 -1
  27. package/lib/types/cli/options.d.ts +58 -2
  28. package/lib/types/cli/options.d.ts.map +1 -1
  29. package/lib/types/cli/types.d.ts +5 -0
  30. package/lib/types/cli/types.d.ts.map +1 -1
  31. package/lib/types/cli/utils.d.ts +3 -2
  32. package/lib/types/cli/utils.d.ts.map +1 -1
  33. package/lib/types/common/chunked.d.ts +48 -0
  34. package/lib/types/common/chunked.d.ts.map +1 -0
  35. package/lib/types/common/http.d.ts +1 -1
  36. package/lib/types/common/http.d.ts.map +1 -1
  37. package/lib/types/common/turbo.d.ts +2 -2
  38. package/lib/types/common/turbo.d.ts.map +1 -1
  39. package/lib/types/common/upload.d.ts +3 -3
  40. package/lib/types/common/upload.d.ts.map +1 -1
  41. package/lib/types/types.d.ts +48 -4
  42. package/lib/types/types.d.ts.map +1 -1
  43. package/lib/types/version.d.ts +1 -1
  44. package/package.json +9 -5
package/README.md CHANGED
@@ -625,6 +625,40 @@ const uploadResult = await turbo.uploadFile({
625
625
  });
626
626
  ```
627
627
 
628
+ ##### Customize Multi-Part Upload Behavior
629
+
630
+ By default, the Turbo upload methods will split files into chunks that are larger than 10 MiB and send these chunks to the upload service multi-part endpoints. This behavior can be customized with the following inputs:
631
+
632
+ - `chunkByteCount`: The maximum size in bytes for each chunk. Must be between 5 MiB and 500 MiB. Defaults to 5 MiB.
633
+ - `maxChunkConcurrency`: The maximum number of chunks to upload concurrently. Defaults to 5. Reducing concurrency will slow down uploads, but reduce memory utilization and serialize network calls. Increasing it will upload faster, but can strain available resources.
634
+ - `chunkingMode`: The chunking mode to use. Can be 'auto', 'force', or 'disabled'. Defaults to 'auto'. Auto behavior means chunking is enabled if the file would be split into at least three chunks.
635
+ - `maxFinalizeMs`: The maximum time in milliseconds to wait for the finalization of all chunks after the last chunk is uploaded. Defaults to 1 minute per GiB of the total file size.
636
+
637
+ ```typescript
638
+ // Customize chunking behavior
639
+ await turbo.upload({
640
+ ...params,
641
+ chunkByteCount: 1024 * 1024 * 500, // Max chunk size
642
+ maxChunkConcurrency: 1, // Minimize concurrency
643
+ });
644
+ ```
645
+
646
+ ```typescript
647
+ // Disable chunking behavior
648
+ await turbo.upload({
649
+ ...params,
650
+ chunkingMode: 'disabled',
651
+ });
652
+ ```
653
+
654
+ ```typescript
655
+ // Force chunking behavior
656
+ await turbo.upload({
657
+ ...params,
658
+ chunkingMode: 'force',
659
+ });
660
+ ```
661
+
628
662
  #### `uploadFolder({ folderPath, files, dataItemOpts, signal, maxConcurrentUploads, throwOnFailure, manifestOptions })`
629
663
 
630
664
  Signs and uploads a folder of files. For NodeJS, the `folderPath` of the folder to upload is required. For the browser, an array of `files` is required. The `dataItemOpts` is an optional object that can be used to configure tags, target, and anchor for the data item upload. The `signal` is an optional [AbortSignal] that can be used to cancel the upload or timeout the request. The `maxConcurrentUploads` is an optional number that can be used to limit the number of concurrent uploads. The `throwOnFailure` is an optional boolean that can be used to throw an error if any upload fails. The `manifestOptions` is an optional object that can be used to configure the manifest file, including a custom index file, fallback file, or whether to disable manifests altogether. Manifests are enabled by default.
@@ -765,8 +799,8 @@ Shares credits from the connected wallet to the provided native address and appr
765
799
  ```typescript
766
800
  const { approvalDataItemId, approvedWincAmount } = await turbo.shareCredits({
767
801
  approvedAddress: '2cor...VUa',
768
- approvedWincAmount: 0.08315565032,
769
- expiresBySeconds: 3600,
802
+ approvedWincAmount: 800_000_000_000, // 0.8 Credits
803
+ expiresBySeconds: 3600, // Credits will expire back to original wallet in 1 hour
770
804
  });
771
805
  ```
772
806
 
@@ -197377,7 +197377,7 @@ var require_axios = __commonJS({
197377
197377
  kind === "object" && isFunction4(thing.toString) && thing.toString() === "[object FormData]"));
197378
197378
  };
197379
197379
  var isURLSearchParams2 = kindOfTest2("URLSearchParams");
197380
- var [isReadableStream2, isRequest2, isResponse2, isHeaders2] = ["ReadableStream", "Request", "Response", "Headers"].map(kindOfTest2);
197380
+ var [isReadableStream3, isRequest2, isResponse2, isHeaders2] = ["ReadableStream", "Request", "Response", "Headers"].map(kindOfTest2);
197381
197381
  var trim2 = (str) => str.trim ? str.trim() : str.replace(/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g, "");
197382
197382
  function forEach3(obj, fn2, { allOwnKeys = false } = {}) {
197383
197383
  if (obj === null || typeof obj === "undefined") {
@@ -197662,7 +197662,7 @@ var require_axios = __commonJS({
197662
197662
  isBoolean: isBoolean4,
197663
197663
  isObject: isObject5,
197664
197664
  isPlainObject: isPlainObject2,
197665
- isReadableStream: isReadableStream2,
197665
+ isReadableStream: isReadableStream3,
197666
197666
  isRequest: isRequest2,
197667
197667
  isResponse: isResponse2,
197668
197668
  isHeaders: isHeaders2,
@@ -220929,6 +220929,18 @@ var tokenTypes = [
220929
220929
  "pol",
220930
220930
  "base-eth"
220931
220931
  ];
220932
+ var multipartPendingStatus = [
220933
+ "ASSEMBLING",
220934
+ "VALIDATING",
220935
+ "FINALIZING"
220936
+ ];
220937
+ var multipartFailedStatus = [
220938
+ "UNDERFUNDED",
220939
+ "INVALID",
220940
+ "APPROVAL_FAILED",
220941
+ "REVOKE_FAILED"
220942
+ ];
220943
+ var multipartFinalizedStatus = ["FINALIZED"];
220932
220944
  var isNodeUploadFolderParams = (p9) => p9.folderPath !== void 0;
220933
220945
  var isWebUploadFolderParams = (p9) => p9.files !== void 0;
220934
220946
  function isKyvePrivateKey(wallet) {
@@ -220948,6 +220960,7 @@ function isSolanaWalletAdapter(walletAdapter) {
220948
220960
  function isEthereumWalletAdapter(walletAdapter) {
220949
220961
  return "getSigner" in walletAdapter;
220950
220962
  }
220963
+ var validChunkingModes = ["force", "disabled", "auto"];
220951
220964
 
220952
220965
  // src/common/logger.ts
220953
220966
  init_dirname();
@@ -220959,7 +220972,7 @@ var import_winston = __toESM(require_winston(), 1);
220959
220972
  init_dirname();
220960
220973
  init_buffer2();
220961
220974
  init_process2();
220962
- var version21 = "1.30.0-alpha.1";
220975
+ var version21 = "1.31.0-alpha.1";
220963
220976
 
220964
220977
  // src/common/logger.ts
220965
220978
  var TurboWinstonLogger = class _TurboWinstonLogger {
@@ -225543,6 +225556,7 @@ var TurboHTTPService = class {
225543
225556
  }
225544
225557
  const { body, duplex } = await toFetchBody(data);
225545
225558
  try {
225559
+ this.logger.debug("Posting data via fetch", { endpoint, headers });
225546
225560
  const res = await fetch(this.axios.defaults.baseURL + endpoint, {
225547
225561
  method: "POST",
225548
225562
  headers,
@@ -274373,6 +274387,11 @@ function pLimit(concurrency) {
274373
274387
  return generator;
274374
274388
  }
274375
274389
 
274390
+ // src/common/chunked.ts
274391
+ init_dirname();
274392
+ init_buffer2();
274393
+ init_process2();
274394
+
274376
274395
  // src/common/events.ts
274377
274396
  init_dirname();
274378
274397
  init_buffer2();
@@ -274595,6 +274614,407 @@ function createStreamWithSigningEvents({
274595
274614
  });
274596
274615
  }
274597
274616
 
274617
+ // src/common/chunked.ts
274618
+ var fiveMiB = 5 * 1024 * 1024;
274619
+ var fiveHundredMiB = fiveMiB * 100;
274620
+ var defaultMaxChunkConcurrency = 5;
274621
+ var minChunkByteCount = fiveMiB;
274622
+ var defaultChunkByteCount = minChunkByteCount;
274623
+ var backlogQueueFactor = 2;
274624
+ var chunkingHeader = { "x-chunking-version": "2" };
274625
+ var ChunkedUploader = class {
274626
+ constructor({
274627
+ http,
274628
+ token,
274629
+ maxChunkConcurrency = defaultMaxChunkConcurrency,
274630
+ maxFinalizeMs,
274631
+ chunkByteCount = defaultChunkByteCount,
274632
+ logger: logger19 = TurboWinstonLogger.default,
274633
+ chunkingMode = "auto",
274634
+ dataItemByteCount
274635
+ }) {
274636
+ this.assertChunkParams({
274637
+ chunkByteCount,
274638
+ chunkingMode,
274639
+ maxChunkConcurrency,
274640
+ maxFinalizeMs
274641
+ });
274642
+ this.chunkByteCount = chunkByteCount;
274643
+ this.maxChunkConcurrency = maxChunkConcurrency;
274644
+ this.maxFinalizeMs = maxFinalizeMs;
274645
+ this.http = http;
274646
+ this.token = token;
274647
+ this.logger = logger19;
274648
+ this.shouldUseChunkUploader = this.shouldChunkUpload({
274649
+ chunkByteCount,
274650
+ chunkingMode,
274651
+ dataItemByteCount
274652
+ });
274653
+ this.maxBacklogQueue = this.maxChunkConcurrency * backlogQueueFactor;
274654
+ }
274655
+ shouldChunkUpload({
274656
+ chunkByteCount,
274657
+ chunkingMode,
274658
+ dataItemByteCount
274659
+ }) {
274660
+ if (chunkingMode === "disabled") {
274661
+ return false;
274662
+ }
274663
+ if (chunkingMode === "force") {
274664
+ return true;
274665
+ }
274666
+ const isMoreThanTwoChunksOfData = dataItemByteCount > chunkByteCount * 2;
274667
+ return isMoreThanTwoChunksOfData;
274668
+ }
274669
+ assertChunkParams({
274670
+ chunkByteCount,
274671
+ chunkingMode,
274672
+ maxChunkConcurrency,
274673
+ maxFinalizeMs
274674
+ }) {
274675
+ if (maxFinalizeMs !== void 0 && (Number.isNaN(maxFinalizeMs) || !Number.isInteger(maxFinalizeMs) || maxFinalizeMs < 0)) {
274676
+ throw new Error(
274677
+ "Invalid max finalization wait time. Must be a non-negative integer."
274678
+ );
274679
+ }
274680
+ if (Number.isNaN(maxChunkConcurrency) || !Number.isInteger(maxChunkConcurrency) || maxChunkConcurrency < 1) {
274681
+ throw new Error(
274682
+ "Invalid max chunk concurrency. Must be an integer of at least 1."
274683
+ );
274684
+ }
274685
+ if (Number.isNaN(chunkByteCount) || !Number.isInteger(chunkByteCount) || chunkByteCount < fiveMiB || chunkByteCount > fiveHundredMiB) {
274686
+ throw new Error(
274687
+ "Invalid chunk size. Must be an integer between 5 MiB and 500 MiB."
274688
+ );
274689
+ }
274690
+ if (typeof chunkingMode !== "string" || !validChunkingModes.includes(chunkingMode)) {
274691
+ throw new Error(
274692
+ `Invalid chunking mode. Must be one of: ${validChunkingModes.join(
274693
+ ", "
274694
+ )}`
274695
+ );
274696
+ }
274697
+ }
274698
+ /**
274699
+ * Initialize or resume an upload session, returning the upload ID.
274700
+ */
274701
+ async initUpload() {
274702
+ const res = await this.http.get({
274703
+ endpoint: `/chunks/${this.token}/-1/-1?chunkSize=${this.chunkByteCount}`,
274704
+ headers: chunkingHeader
274705
+ });
274706
+ if (res.chunkSize !== this.chunkByteCount) {
274707
+ this.logger.warn("Chunk size mismatch! Overriding with server value.", {
274708
+ clientExpected: this.chunkByteCount,
274709
+ serverReturned: res.chunkSize
274710
+ });
274711
+ this.chunkByteCount = res.chunkSize;
274712
+ }
274713
+ return res.id;
274714
+ }
274715
+ async upload({
274716
+ dataItemSizeFactory,
274717
+ dataItemStreamFactory,
274718
+ dataItemOpts,
274719
+ signal,
274720
+ events
274721
+ }) {
274722
+ const uploadId = await this.initUpload();
274723
+ const dataItemByteCount = dataItemSizeFactory();
274724
+ const emitter = new TurboEventEmitter(events);
274725
+ const { stream: stream2, resume } = createStreamWithUploadEvents({
274726
+ data: dataItemStreamFactory(),
274727
+ dataSize: dataItemByteCount,
274728
+ emitter
274729
+ });
274730
+ this.logger.debug(`Starting chunked upload`, {
274731
+ token: this.token,
274732
+ uploadId,
274733
+ totalSize: dataItemByteCount,
274734
+ chunkByteCount: this.chunkByteCount,
274735
+ maxChunkConcurrency: this.maxChunkConcurrency,
274736
+ inputStreamType: isReadableStream2(stream2) ? "ReadableStream" : "Readable"
274737
+ });
274738
+ const inFlight = /* @__PURE__ */ new Set();
274739
+ const internalAbort = new AbortController();
274740
+ const combinedSignal = combineAbortSignals([internalAbort.signal, signal]);
274741
+ const limit = pLimit(this.maxChunkConcurrency);
274742
+ let currentOffset = 0;
274743
+ let currentChunkPartNumber = 0;
274744
+ let firstError;
274745
+ let uploadedBytes = 0;
274746
+ const chunks = splitIntoChunks(stream2, this.chunkByteCount);
274747
+ resume();
274748
+ for await (const chunk of chunks) {
274749
+ if (combinedSignal?.aborted) {
274750
+ internalAbort.abort();
274751
+ await Promise.allSettled(inFlight);
274752
+ firstError ??= new CanceledError2();
274753
+ break;
274754
+ }
274755
+ const chunkPartNumber = ++currentChunkPartNumber;
274756
+ const chunkByteCount = chunk.length;
274757
+ const chunkOffset = currentOffset;
274758
+ currentOffset += chunkByteCount;
274759
+ const promise = limit(async () => {
274760
+ if (firstError !== void 0) {
274761
+ return;
274762
+ }
274763
+ this.logger.debug("Uploading chunk", {
274764
+ chunkPartNumber,
274765
+ chunkOffset,
274766
+ chunkByteCount
274767
+ });
274768
+ await this.http.post({
274769
+ endpoint: `/chunks/${this.token}/${uploadId}/${chunkOffset}`,
274770
+ data: chunk,
274771
+ headers: {
274772
+ "Content-Type": "application/octet-stream",
274773
+ ...chunkingHeader
274774
+ },
274775
+ signal: combinedSignal
274776
+ });
274777
+ uploadedBytes += chunkByteCount;
274778
+ this.logger.debug("Chunk uploaded", {
274779
+ chunkPartNumber,
274780
+ chunkOffset,
274781
+ chunkByteCount
274782
+ });
274783
+ emitter.emit("upload-progress", {
274784
+ processedBytes: uploadedBytes,
274785
+ totalBytes: dataItemByteCount
274786
+ });
274787
+ }).catch((err) => {
274788
+ this.logger.error("Chunk upload failed", {
274789
+ id: chunkPartNumber,
274790
+ offset: chunkOffset,
274791
+ size: chunkByteCount,
274792
+ err
274793
+ });
274794
+ emitter.emit("upload-error", err);
274795
+ internalAbort.abort(err);
274796
+ firstError = firstError ?? err;
274797
+ });
274798
+ inFlight.add(promise);
274799
+ promise.finally(() => inFlight.delete(promise));
274800
+ if (inFlight.size >= this.maxBacklogQueue) {
274801
+ await Promise.race(inFlight);
274802
+ if (combinedSignal?.aborted) {
274803
+ internalAbort.abort();
274804
+ await Promise.allSettled(inFlight);
274805
+ firstError ??= new CanceledError2();
274806
+ break;
274807
+ }
274808
+ }
274809
+ }
274810
+ await Promise.all(inFlight);
274811
+ if (firstError !== void 0) {
274812
+ throw firstError;
274813
+ }
274814
+ const finalizeResponse = await this.finalizeUpload(
274815
+ uploadId,
274816
+ dataItemByteCount,
274817
+ dataItemOpts?.paidBy,
274818
+ combinedSignal
274819
+ );
274820
+ emitter.emit("upload-success");
274821
+ return finalizeResponse;
274822
+ }
274823
+ toGiB(bytes4) {
274824
+ return bytes4 / 1024 ** 3;
274825
+ }
274826
+ async finalizeUpload(uploadId, dataItemByteCount, paidBy, signal) {
274827
+ const fileSizeInGiB = Math.ceil(this.toGiB(dataItemByteCount));
274828
+ const defaultMaxWaitTimeMins = fileSizeInGiB;
274829
+ const maxWaitTimeMs = this.maxFinalizeMs ?? defaultMaxWaitTimeMins * 60 * 1e3;
274830
+ const minimumWaitPerStepMs = (
274831
+ // Per step, files smaller than 100MB will wait 2 second,
274832
+ dataItemByteCount < 1024 * 1024 * 100 ? 2e3 : (
274833
+ // files smaller than 3 GiB will wait 3 seconds,
274834
+ dataItemByteCount < 1024 * 1024 * 1024 * 3 ? 3e3 : (
274835
+ // and larger files will wait 1 second per GiB with max of 10 seconds
274836
+ Math.max(1e3 * fileSizeInGiB, 1e4)
274837
+ )
274838
+ )
274839
+ );
274840
+ const paidByHeader = {};
274841
+ if (paidBy !== void 0) {
274842
+ paidByHeader["x-paid-by"] = Array.isArray(paidBy) ? paidBy.join(",") : paidBy;
274843
+ }
274844
+ await this.http.post({
274845
+ endpoint: `/chunks/${this.token}/${uploadId}/finalize`,
274846
+ data: Buffer2.alloc(0),
274847
+ headers: {
274848
+ "Content-Type": "application/octet-stream",
274849
+ ...paidByHeader,
274850
+ ...chunkingHeader
274851
+ },
274852
+ signal
274853
+ });
274854
+ this.logger.debug(
274855
+ `Confirming upload to Turbo with uploadId ${uploadId} for up to ${defaultMaxWaitTimeMins} minutes.`
274856
+ );
274857
+ const startTime = Date.now();
274858
+ const cutoffTime = startTime + maxWaitTimeMs;
274859
+ let attempts = 0;
274860
+ while (Date.now() < cutoffTime) {
274861
+ const waitTimeMs = Math.min(
274862
+ Math.floor((cutoffTime - Date.now()) * (3 / 4)),
274863
+ minimumWaitPerStepMs
274864
+ );
274865
+ await sleep(waitTimeMs);
274866
+ if (signal?.aborted) {
274867
+ this.logger.warn(`Upload finalization aborted by signal.`);
274868
+ throw new CanceledError2();
274869
+ }
274870
+ const response = await this.http.get({
274871
+ endpoint: `/chunks/${this.token}/${uploadId}/status`,
274872
+ signal
274873
+ });
274874
+ this.logger.debug(`Upload status found: ${response.status}`, {
274875
+ status: response.status,
274876
+ attempts: attempts++,
274877
+ maxWaitTimeMs,
274878
+ minimumWaitPerStepMs,
274879
+ waitTimeMs,
274880
+ elapsedMs: Date.now() - startTime
274881
+ });
274882
+ if (response.status === "FINALIZED") {
274883
+ this.logger.debug(`Upload finalized successfully.`);
274884
+ return response.receipt;
274885
+ }
274886
+ if (response.status === "UNDERFUNDED") {
274887
+ throw new FailedRequestError(`Insufficient balance`, 402);
274888
+ }
274889
+ }
274890
+ throw new Error(
274891
+ `Upload multi-part finalization has timed out for Upload ID ${uploadId}`
274892
+ );
274893
+ }
274894
+ };
274895
+ async function* splitIntoChunks(source, chunkByteCount) {
274896
+ if (isReadableStream2(source)) {
274897
+ yield* splitReadableStreamIntoChunks(source, chunkByteCount);
274898
+ } else {
274899
+ yield* splitReadableIntoChunks(source, chunkByteCount);
274900
+ }
274901
+ }
274902
+ async function* splitReadableIntoChunks(source, chunkByteCount) {
274903
+ const queue2 = [];
274904
+ let total = 0;
274905
+ let encoder;
274906
+ for await (const piece of source) {
274907
+ const u82 = piece instanceof Uint8Array ? new Uint8Array(piece.buffer, piece.byteOffset, piece.byteLength) : (encoder ??= new TextEncoder()).encode(String(piece));
274908
+ queue2.push(u82);
274909
+ total += u82.length;
274910
+ while (total >= chunkByteCount) {
274911
+ const out = new Uint8Array(chunkByteCount);
274912
+ let remaining = out.length;
274913
+ let off2 = 0;
274914
+ while (remaining > 0) {
274915
+ const head = queue2[0];
274916
+ const take = Math.min(remaining, head.length);
274917
+ out.set(head.subarray(0, take), off2);
274918
+ off2 += take;
274919
+ remaining -= take;
274920
+ if (take === head.length) {
274921
+ queue2.shift();
274922
+ } else {
274923
+ queue2[0] = head.subarray(take);
274924
+ }
274925
+ }
274926
+ total -= chunkByteCount;
274927
+ yield Buffer2.from(out.buffer, out.byteOffset, out.byteLength);
274928
+ }
274929
+ }
274930
+ if (total > 0) {
274931
+ const out = new Uint8Array(total);
274932
+ let off2 = 0;
274933
+ while (queue2.length > 0) {
274934
+ const head = queue2.shift();
274935
+ out.set(head, off2);
274936
+ off2 += head.length;
274937
+ }
274938
+ yield Buffer2.from(out.buffer, out.byteOffset, out.byteLength);
274939
+ }
274940
+ }
274941
+ async function* splitReadableStreamIntoChunks(source, chunkByteCount) {
274942
+ const reader = source.getReader();
274943
+ const queue2 = [];
274944
+ let total = 0;
274945
+ try {
274946
+ while (true) {
274947
+ const { value, done } = await reader.read();
274948
+ if (done)
274949
+ break;
274950
+ const u82 = new Uint8Array(
274951
+ value.buffer,
274952
+ value.byteOffset,
274953
+ value.byteLength
274954
+ );
274955
+ queue2.push(u82);
274956
+ total += u82.length;
274957
+ while (total >= chunkByteCount) {
274958
+ const out = new Uint8Array(chunkByteCount);
274959
+ let remaining = out.length;
274960
+ let off2 = 0;
274961
+ while (remaining > 0) {
274962
+ const head = queue2[0];
274963
+ const take = Math.min(remaining, head.length);
274964
+ out.set(head.subarray(0, take), off2);
274965
+ off2 += take;
274966
+ remaining -= take;
274967
+ if (take === head.length) {
274968
+ queue2.shift();
274969
+ } else {
274970
+ queue2[0] = head.subarray(take);
274971
+ }
274972
+ }
274973
+ total -= chunkByteCount;
274974
+ yield Buffer2.from(out.buffer, out.byteOffset, out.byteLength);
274975
+ }
274976
+ }
274977
+ if (total > 0) {
274978
+ const out = new Uint8Array(total);
274979
+ let off2 = 0;
274980
+ while (queue2.length > 0) {
274981
+ const head = queue2.shift();
274982
+ out.set(head, off2);
274983
+ off2 += head.length;
274984
+ }
274985
+ yield Buffer2.from(out.buffer, out.byteOffset, out.byteLength);
274986
+ }
274987
+ } finally {
274988
+ reader.releaseLock();
274989
+ }
274990
+ }
274991
+ function isReadableStream2(source) {
274992
+ if (typeof ReadableStream !== "undefined" && source instanceof ReadableStream) {
274993
+ return true;
274994
+ }
274995
+ return source !== null && typeof source === "object" && "getReader" in source && typeof source.getReader === "function";
274996
+ }
274997
+ function combineAbortSignals(signals) {
274998
+ const real = signals.filter(Boolean);
274999
+ if (real.length === 0)
275000
+ return void 0;
275001
+ const anyFn = AbortSignal.any;
275002
+ if (typeof anyFn === "function") {
275003
+ return anyFn(real);
275004
+ }
275005
+ const controller = new AbortController();
275006
+ for (const s7 of real) {
275007
+ const sig = s7;
275008
+ if (sig.aborted) {
275009
+ controller.abort(sig.reason);
275010
+ break;
275011
+ }
275012
+ const onAbort = () => controller.abort(sig.reason);
275013
+ s7.addEventListener("abort", onAbort, { once: true });
275014
+ }
275015
+ return controller.signal;
275016
+ }
275017
+
274598
275018
  // src/common/upload.ts
274599
275019
  function isTurboUploadFileWithStreamFactoryParams(params) {
274600
275020
  return "fileStreamFactory" in params;
@@ -274679,7 +275099,10 @@ var TurboAuthenticatedBaseUploadService = class extends TurboUnauthenticatedUplo
274679
275099
  data,
274680
275100
  dataItemOpts,
274681
275101
  signal,
274682
- events
275102
+ events,
275103
+ chunkByteCount,
275104
+ chunkingMode,
275105
+ maxChunkConcurrency
274683
275106
  }) {
274684
275107
  if (isBlob2(data)) {
274685
275108
  const streamFactory = () => data.stream();
@@ -274705,7 +275128,10 @@ var TurboAuthenticatedBaseUploadService = class extends TurboUnauthenticatedUplo
274705
275128
  fileSizeFactory: () => dataBuffer.byteLength,
274706
275129
  signal,
274707
275130
  dataItemOpts,
274708
- events
275131
+ events,
275132
+ chunkByteCount,
275133
+ chunkingMode,
275134
+ maxChunkConcurrency
274709
275135
  });
274710
275136
  }
274711
275137
  resolveUploadFileConfig(params) {
@@ -274737,17 +275163,38 @@ var TurboAuthenticatedBaseUploadService = class extends TurboUnauthenticatedUplo
274737
275163
  let lastError = void 0;
274738
275164
  let lastStatusCode = void 0;
274739
275165
  const emitter = new TurboEventEmitter(events);
274740
- const { dataItemStreamFactory, dataItemSizeFactory } = await this.signer.signDataItem({
274741
- fileStreamFactory,
274742
- fileSizeFactory,
274743
- dataItemOpts,
274744
- emitter
274745
- });
274746
275166
  while (retries < maxRetries) {
274747
275167
  if (signal?.aborted) {
274748
275168
  throw new CanceledError2();
274749
275169
  }
275170
+ const { dataItemStreamFactory, dataItemSizeFactory } = await this.signer.signDataItem({
275171
+ fileStreamFactory,
275172
+ fileSizeFactory,
275173
+ dataItemOpts,
275174
+ emitter
275175
+ });
274750
275176
  try {
275177
+ const { chunkByteCount, maxChunkConcurrency } = params;
275178
+ const chunkedUploader = new ChunkedUploader({
275179
+ http: this.httpService,
275180
+ token: this.token,
275181
+ maxChunkConcurrency,
275182
+ chunkByteCount,
275183
+ logger: this.logger,
275184
+ dataItemByteCount: dataItemSizeFactory(),
275185
+ chunkingMode: params.chunkingMode,
275186
+ maxFinalizeMs: params.maxFinalizeMs
275187
+ });
275188
+ if (chunkedUploader.shouldUseChunkUploader) {
275189
+ const response2 = await chunkedUploader.upload({
275190
+ dataItemStreamFactory,
275191
+ dataItemSizeFactory,
275192
+ dataItemOpts,
275193
+ signal,
275194
+ events
275195
+ });
275196
+ return response2;
275197
+ }
274751
275198
  const response = await this.uploadSignedDataItem({
274752
275199
  dataItemStreamFactory,
274753
275200
  dataItemSizeFactory,
@@ -274783,7 +275230,7 @@ var TurboAuthenticatedBaseUploadService = class extends TurboUnauthenticatedUplo
274783
275230
  ]);
274784
275231
  }
274785
275232
  }
274786
- const msg = `Failed to upload file after ${maxRetries + 1} attempts
275233
+ const msg = `Failed to upload file after ${retries + 1} attempts
274787
275234
  ${lastError instanceof Error ? lastError.message : lastError}`;
274788
275235
  if (lastError instanceof FailedRequestError) {
274789
275236
  lastError.message = msg;
@@ -274847,7 +275294,11 @@ ${lastError instanceof Error ? lastError.message : lastError}`;
274847
275294
  signal,
274848
275295
  manifestOptions = {},
274849
275296
  maxConcurrentUploads = 1,
274850
- throwOnFailure = true
275297
+ throwOnFailure = true,
275298
+ maxChunkConcurrency,
275299
+ chunkByteCount,
275300
+ chunkingMode,
275301
+ maxFinalizeMs
274851
275302
  } = params;
274852
275303
  const { disableManifest, indexFile, fallbackFile } = manifestOptions;
274853
275304
  const paths = {};
@@ -274873,7 +275324,10 @@ ${lastError instanceof Error ? lastError.message : lastError}`;
274873
275324
  fileStreamFactory: () => this.getFileStreamForFile(file),
274874
275325
  fileSizeFactory: () => this.getFileSize(file),
274875
275326
  signal,
274876
- dataItemOpts: dataItemOptsWithContentType
275327
+ dataItemOpts: dataItemOptsWithContentType,
275328
+ chunkByteCount,
275329
+ maxChunkConcurrency,
275330
+ chunkingMode
274877
275331
  });
274878
275332
  const relativePath = this.getRelativePath(file, params);
274879
275333
  paths[relativePath] = { id: result2.id };
@@ -274916,7 +275370,11 @@ ${lastError instanceof Error ? lastError.message : lastError}`;
274916
275370
  fileStreamFactory: () => this.createManifestStream(manifestBuffer),
274917
275371
  fileSizeFactory: () => manifestBuffer.byteLength,
274918
275372
  signal,
274919
- dataItemOpts: { ...dataItemOpts, tags: tagsWithManifestContentType }
275373
+ dataItemOpts: { ...dataItemOpts, tags: tagsWithManifestContentType },
275374
+ chunkByteCount,
275375
+ maxChunkConcurrency,
275376
+ maxFinalizeMs,
275377
+ chunkingMode
274920
275378
  });
274921
275379
  return {
274922
275380
  ...response,
@@ -275164,9 +275622,20 @@ var TurboAuthenticatedClient = class extends TurboUnauthenticatedClient {
275164
275622
  data,
275165
275623
  dataItemOpts,
275166
275624
  signal,
275167
- events
275625
+ events,
275626
+ chunkByteCount,
275627
+ chunkingMode,
275628
+ maxChunkConcurrency
275168
275629
  }) {
275169
- return this.uploadService.upload({ data, dataItemOpts, signal, events });
275630
+ return this.uploadService.upload({
275631
+ data,
275632
+ dataItemOpts,
275633
+ signal,
275634
+ events,
275635
+ chunkByteCount,
275636
+ chunkingMode,
275637
+ maxChunkConcurrency
275638
+ });
275170
275639
  }
275171
275640
  uploadFile(params) {
275172
275641
  return this.uploadService.uploadFile(params);
@@ -275944,6 +276413,9 @@ export {
275944
276413
  isWebUploadFolderParams,
275945
276414
  lamportToTokenAmount,
275946
276415
  mARIOToTokenAmount,
276416
+ multipartFailedStatus,
276417
+ multipartFinalizedStatus,
276418
+ multipartPendingStatus,
275947
276419
  privateKeyFromKyveMnemonic,
275948
276420
  readableStreamToAsyncIterable,
275949
276421
  signerFromKyveMnemonic,
@@ -275952,6 +276424,7 @@ export {
275952
276424
  tokenToBaseMap,
275953
276425
  tokenTypes,
275954
276426
  ukyveToTokenAmount,
276427
+ validChunkingModes,
275955
276428
  weiToTokenAmount
275956
276429
  };
275957
276430
  /*! Bundled license information:
@@ -32,6 +32,7 @@ async function uploadFile(options) {
32
32
  fileStreamFactory: () => (0, fs_1.createReadStream)(filePath),
33
33
  fileSizeFactory: () => fileSize,
34
34
  dataItemOpts: { tags: [...constants_js_1.turboCliTags, ...customTags], paidBy },
35
+ ...(0, utils_js_1.getChunkingOptions)(options),
35
36
  });
36
37
  console.log('Uploaded file:', JSON.stringify(result, null, 2));
37
38
  }