@ardrive/turbo-sdk 1.26.0 → 1.27.0-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -114609,7 +114609,7 @@ var require_wrapAsync = __commonJS({
114609
114609
  function isAsyncGenerator(fn2) {
114610
114610
  return fn2[Symbol.toStringTag] === "AsyncGenerator";
114611
114611
  }
114612
- function isAsyncIterable(obj) {
114612
+ function isAsyncIterable2(obj) {
114613
114613
  return typeof obj[Symbol.asyncIterator] === "function";
114614
114614
  }
114615
114615
  function wrapAsync(asyncFn) {
@@ -114620,7 +114620,7 @@ var require_wrapAsync = __commonJS({
114620
114620
  exports15.default = wrapAsync;
114621
114621
  exports15.isAsync = isAsync2;
114622
114622
  exports15.isAsyncGenerator = isAsyncGenerator;
114623
- exports15.isAsyncIterable = isAsyncIterable;
114623
+ exports15.isAsyncIterable = isAsyncIterable2;
114624
114624
  }
114625
114625
  });
114626
114626
 
@@ -313540,9 +313540,9 @@ var DataItem = class _DataItem {
313540
313540
  return false;
313541
313541
  }
313542
313542
  }
313543
- const Signer3 = indexToType[sigType];
313543
+ const Signer4 = indexToType[sigType];
313544
313544
  const signatureData = await ar_data_base_default(item);
313545
- return await Signer3.verify(item.rawOwner, signatureData, item.rawSignature);
313545
+ return await Signer4.verify(item.rawOwner, signatureData, item.rawSignature);
313546
313546
  }
313547
313547
  async getSignatureData() {
313548
313548
  return ar_data_base_default(this);
@@ -313953,8 +313953,8 @@ async function processStream(stream2) {
313953
313953
  transform.end();
313954
313954
  if (id3 !== (0, import_base64url11.default)(createHash("sha256").update(signature2).digest()))
313955
313955
  throw new Error("ID doesn't match signature");
313956
- const Signer3 = indexToType[signatureType];
313957
- if (!await Signer3.verify(owner, await signatureData, signature2))
313956
+ const Signer4 = indexToType[signatureType];
313957
+ if (!await Signer4.verify(owner, await signatureData, signature2))
313958
313958
  throw new Error("Invalid signature");
313959
313959
  items.push({
313960
313960
  id: id3,
@@ -314072,7 +314072,7 @@ var import_winston = __toESM(require_winston(), 1);
314072
314072
  init_dirname();
314073
314073
  init_buffer2();
314074
314074
  init_process2();
314075
- var version16 = "1.26.0-alpha.1";
314075
+ var version16 = "1.26.0";
314076
314076
 
314077
314077
  // src/common/logger.ts
314078
314078
  var TurboWinstonLogger = class _TurboWinstonLogger {
@@ -367315,7 +367315,9 @@ function createReadableStreamWithEvents({
367315
367315
  }) {
367316
367316
  const originalStream = data instanceof ReadableStream ? data : new ReadableStream({
367317
367317
  start: (controller) => {
367318
- controller.enqueue(data);
367318
+ controller.enqueue(
367319
+ new Uint8Array(data.buffer, data.byteOffset, data.byteLength)
367320
+ );
367319
367321
  controller.close();
367320
367322
  }
367321
367323
  });
@@ -367333,12 +367335,14 @@ function createReadableStreamWithEvents({
367333
367335
  controller.close();
367334
367336
  return;
367335
367337
  }
367336
- processedBytes += value.length;
367338
+ processedBytes += value.byteLength;
367337
367339
  emitter.emit(eventNamesMap["on-progress"], {
367338
367340
  processedBytes,
367339
367341
  totalBytes: dataSize
367340
367342
  });
367341
- controller.enqueue(value);
367343
+ controller.enqueue(
367344
+ new Uint8Array(value.buffer, value.byteOffset, value.byteLength)
367345
+ );
367342
367346
  } catch (error) {
367343
367347
  emitter.emit(eventNamesMap["on-error"], error);
367344
367348
  controller.error(error);
@@ -367366,7 +367370,7 @@ function createReadableWithEvents({
367366
367370
  let processedBytes = 0;
367367
367371
  existingStream.on("data", (chunk) => {
367368
367372
  eventingStream.write(chunk);
367369
- processedBytes += chunk.length;
367373
+ processedBytes += chunk.byteLength;
367370
367374
  emitter.emit(eventNamesMap["on-progress"], {
367371
367375
  processedBytes,
367372
367376
  totalBytes: dataSize
@@ -367495,6 +367499,22 @@ function createStreamWithUploadEvents({
367495
367499
  }
367496
367500
  });
367497
367501
  }
367502
+ function createStreamWithSigningEvents({
367503
+ data,
367504
+ dataSize,
367505
+ emitter = new TurboEventEmitter()
367506
+ }) {
367507
+ return createStreamWithEvents({
367508
+ data,
367509
+ dataSize,
367510
+ emitter,
367511
+ eventNamesMap: {
367512
+ "on-progress": "signing-progress",
367513
+ "on-error": "signing-error",
367514
+ "on-end": "signing-success"
367515
+ }
367516
+ });
367517
+ }
367498
367518
 
367499
367519
  // src/common/upload.ts
367500
367520
  var creditSharingTagNames = {
@@ -367528,17 +367548,17 @@ var TurboUnauthenticatedUploadService = class {
367528
367548
  signal,
367529
367549
  events = {}
367530
367550
  }) {
367531
- const fileSize = dataItemSizeFactory();
367551
+ const dataItemSize = dataItemSizeFactory();
367532
367552
  this.logger.debug("Uploading signed data item...");
367533
367553
  const emitter = new TurboEventEmitter(events);
367534
367554
  const { stream: streamWithUploadEvents, resume } = createStreamWithUploadEvents({
367535
367555
  data: dataItemStreamFactory(),
367536
- dataSize: fileSize,
367556
+ dataSize: dataItemSize,
367537
367557
  emitter
367538
367558
  });
367539
367559
  const headers = {
367540
367560
  "content-type": "application/octet-stream",
367541
- "content-length": `${fileSize}`
367561
+ "content-length": `${dataItemSize}`
367542
367562
  };
367543
367563
  if (dataItemOpts !== void 0 && dataItemOpts.paidBy !== void 0) {
367544
367564
  const paidBy = Array.isArray(dataItemOpts.paidBy) ? dataItemOpts.paidBy : [dataItemOpts.paidBy];
@@ -367627,7 +367647,6 @@ var TurboAuthenticatedBaseUploadService = class extends TurboUnauthenticatedUplo
367627
367647
  throw new CanceledError2();
367628
367648
  }
367629
367649
  try {
367630
- this.logger.debug("Uploading signed data item...");
367631
367650
  const response = await this.uploadSignedDataItem({
367632
367651
  dataItemStreamFactory,
367633
367652
  dataItemSizeFactory,
@@ -367747,6 +367766,7 @@ ${lastError instanceof Error ? lastError.message : lastError}`;
367747
367766
  };
367748
367767
  try {
367749
367768
  const result2 = await this.uploadFile({
367769
+ // TODO: can fix this type by passing a class generic and specifying in the node/web abstracts which stream type to use
367750
367770
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
367751
367771
  fileStreamFactory: () => this.getFileStreamForFile(file),
367752
367772
  fileSizeFactory: () => this.getFileSize(file),
@@ -367789,6 +367809,7 @@ ${lastError instanceof Error ? lastError.message : lastError}`;
367789
367809
  ];
367790
367810
  const manifestBuffer = Buffer.from(JSON.stringify(manifest));
367791
367811
  const manifestResponse = await this.uploadFile({
367812
+ // TODO: can fix this type by passing a class generic and specifying in the node/web abstracts which stream type to use
367792
367813
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
367793
367814
  fileStreamFactory: () => this.createManifestStream(manifestBuffer),
367794
367815
  fileSizeFactory: () => manifestBuffer.byteLength,
@@ -368345,23 +368366,87 @@ var TurboDataItemAbstractSigner = class {
368345
368366
  init_dirname();
368346
368367
  init_buffer2();
368347
368368
  init_process2();
368348
- async function readableStreamToBuffer({
368349
- stream: stream2,
368350
- size
368351
- }) {
368352
- const reader = stream2.getReader();
368353
- const buffer2 = Buffer.alloc(size);
368354
- let offset2 = 0;
368355
- let done = false;
368356
- while (!done) {
368357
- const { done: streamDone, value } = await reader.read();
368358
- done = streamDone;
368359
- if (!done) {
368360
- buffer2.set(value, offset2);
368361
- offset2 += value.byteLength;
368369
+ var DEFAULT_STREAM_CHUNK_SIZE = 20 * 1024 * 1024;
368370
+ function ensureChunkedStream(input, maxChunkSize = DEFAULT_STREAM_CHUNK_SIZE) {
368371
+ const reader = input.getReader();
368372
+ let leftover = null;
368373
+ return new ReadableStream({
368374
+ async pull(controller) {
368375
+ if (leftover) {
368376
+ const chunk = leftover.subarray(0, maxChunkSize);
368377
+ leftover = leftover.subarray(chunk.length);
368378
+ if (leftover.length === 0)
368379
+ leftover = null;
368380
+ controller.enqueue(chunk);
368381
+ return;
368382
+ }
368383
+ const { value, done } = await reader.read();
368384
+ if (done) {
368385
+ controller.close();
368386
+ return;
368387
+ }
368388
+ if (!(value instanceof Uint8Array)) {
368389
+ throw new TypeError("Expected Uint8Array from source stream");
368390
+ }
368391
+ if (value.byteLength <= maxChunkSize) {
368392
+ controller.enqueue(value);
368393
+ } else {
368394
+ controller.enqueue(value.subarray(0, maxChunkSize));
368395
+ leftover = value.subarray(maxChunkSize);
368396
+ }
368362
368397
  }
368398
+ });
368399
+ }
368400
+ function createUint8ArrayReadableStreamFactory({
368401
+ data,
368402
+ maxChunkSize = DEFAULT_STREAM_CHUNK_SIZE
368403
+ }) {
368404
+ if (data instanceof Blob) {
368405
+ return () => ensureChunkedStream(data.stream());
368406
+ }
368407
+ if (data instanceof ReadableStream) {
368408
+ return () => {
368409
+ const reader = data.getReader();
368410
+ const stream2 = new ReadableStream({
368411
+ async pull(controller) {
368412
+ const { value, done } = await reader.read();
368413
+ if (done) {
368414
+ controller.close();
368415
+ return;
368416
+ }
368417
+ if (ArrayBuffer.isView(value)) {
368418
+ controller.enqueue(
368419
+ new Uint8Array(value.buffer, value.byteOffset, value.byteLength)
368420
+ );
368421
+ } else if (value instanceof ArrayBuffer || value instanceof SharedArrayBuffer) {
368422
+ controller.enqueue(new Uint8Array(value));
368423
+ } else {
368424
+ throw new TypeError("Unsupported chunk type in ReadableStream");
368425
+ }
368426
+ }
368427
+ });
368428
+ return ensureChunkedStream(stream2, maxChunkSize);
368429
+ };
368363
368430
  }
368364
- return buffer2;
368431
+ return () => {
368432
+ let uint8;
368433
+ if (typeof data === "string") {
368434
+ uint8 = new TextEncoder().encode(data);
368435
+ } else if (ArrayBuffer.isView(data)) {
368436
+ uint8 = new Uint8Array(data.buffer, data.byteOffset, data.byteLength);
368437
+ } else if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) {
368438
+ uint8 = new Uint8Array(data);
368439
+ } else {
368440
+ throw new TypeError("Unsupported input type for stream");
368441
+ }
368442
+ const stream2 = new ReadableStream({
368443
+ start(controller) {
368444
+ controller.enqueue(uint8);
368445
+ controller.close();
368446
+ }
368447
+ });
368448
+ return ensureChunkedStream(stream2, maxChunkSize);
368449
+ };
368365
368450
  }
368366
368451
 
368367
368452
  // src/web/signer.ts
@@ -368386,58 +368471,21 @@ var TurboWebArweaveSigner = class extends TurboDataItemAbstractSigner {
368386
368471
  }) {
368387
368472
  await this.setPublicKey();
368388
368473
  const fileSize = fileSizeFactory();
368389
- try {
368390
- const fileStream = fileStreamFactory();
368391
- emitter?.emit("signing-progress", {
368392
- processedBytes: 0,
368393
- totalBytes: fileSize
368394
- });
368395
- const buffer2 = fileStream instanceof Buffer ? fileStream : await readableStreamToBuffer({
368396
- stream: fileStream,
368397
- size: fileSize
368398
- });
368399
- emitter?.emit("signing-progress", {
368400
- processedBytes: Math.floor(fileSize / 2),
368401
- totalBytes: fileSize
368402
- });
368403
- let signedDataItem;
368404
- this.logger.debug("Signing data item...");
368405
- if (this.signer instanceof InjectedArweaveSigner) {
368406
- this.logger.debug(
368407
- "Arconnect signer detected, signing with Arconnect signData Item API..."
368408
- );
368409
- const sign6 = Buffer.from(
368410
- await this.signer["signer"].signDataItem({
368411
- data: Uint8Array.from(buffer2),
368412
- tags: dataItemOpts?.tags,
368413
- target: dataItemOpts?.target,
368414
- anchor: dataItemOpts?.anchor
368415
- })
368416
- );
368417
- signedDataItem = new DataItem(sign6);
368418
- } else {
368419
- signedDataItem = createData(
368420
- Uint8Array.from(buffer2),
368421
- this.signer,
368422
- dataItemOpts
368423
- );
368424
- await signedDataItem.sign(this.signer);
368425
- }
368426
- emitter?.emit("signing-progress", {
368427
- processedBytes: fileSize,
368428
- totalBytes: fileSize
368429
- });
368430
- emitter?.emit("signing-success");
368431
- this.logger.debug("Successfully signed data item...");
368432
- return {
368433
- // while this returns a Buffer - it needs to match our return type for uploading
368434
- dataItemStreamFactory: () => signedDataItem.getRaw(),
368435
- dataItemSizeFactory: () => signedDataItem.getRaw().length
368436
- };
368437
- } catch (error) {
368438
- emitter?.emit("signing-error", error);
368439
- throw error;
368440
- }
368474
+ this.logger.debug("Signing data item...");
368475
+ const { signedDataItemFactory, signedDataItemSize } = await streamSignerReadableStream({
368476
+ streamFactory: createUint8ArrayReadableStreamFactory({
368477
+ data: fileStreamFactory()
368478
+ }),
368479
+ signer: this.signer,
368480
+ dataItemOpts,
368481
+ fileSize,
368482
+ emitter
368483
+ });
368484
+ this.logger.debug("Successfully signed data item...");
368485
+ return {
368486
+ dataItemStreamFactory: signedDataItemFactory,
368487
+ dataItemSizeFactory: () => signedDataItemSize
368488
+ };
368441
368489
  }
368442
368490
  async generateSignedRequestHeaders() {
368443
368491
  await this.setPublicKey();
@@ -368448,6 +368496,113 @@ var TurboWebArweaveSigner = class extends TurboDataItemAbstractSigner {
368448
368496
  return super.signData(dataToSign);
368449
368497
  }
368450
368498
  };
368499
+ var readableStreamToAsyncIterable = (stream2) => ({
368500
+ async *[Symbol.asyncIterator]() {
368501
+ const reader = stream2.getReader();
368502
+ try {
368503
+ while (true) {
368504
+ const { done, value } = await reader.read();
368505
+ if (done)
368506
+ break;
368507
+ if (value !== void 0)
368508
+ yield Buffer.from(value);
368509
+ }
368510
+ } finally {
368511
+ reader.releaseLock();
368512
+ }
368513
+ }
368514
+ });
368515
+ async function streamSignerReadableStream({
368516
+ streamFactory,
368517
+ signer,
368518
+ dataItemOpts,
368519
+ fileSize,
368520
+ emitter
368521
+ }) {
368522
+ try {
368523
+ const header = createData("", signer, dataItemOpts);
368524
+ const headerSize = header.getRaw().byteLength;
368525
+ const totalDataItemSizeWithHeader = fileSize + headerSize;
368526
+ const [stream1, stream2] = streamFactory().tee();
368527
+ const reader1 = stream1.getReader();
368528
+ let bytesProcessed = 0;
368529
+ const eventingStream = new ReadableStream({
368530
+ start() {
368531
+ bytesProcessed = headerSize;
368532
+ emitter?.emit("signing-progress", {
368533
+ processedBytes: bytesProcessed,
368534
+ totalBytes: totalDataItemSizeWithHeader
368535
+ });
368536
+ },
368537
+ async pull(controller) {
368538
+ const { done, value } = await reader1.read();
368539
+ if (done) {
368540
+ controller.close();
368541
+ return;
368542
+ }
368543
+ bytesProcessed += value.byteLength;
368544
+ controller.enqueue(value);
368545
+ emitter?.emit("signing-progress", {
368546
+ processedBytes: bytesProcessed,
368547
+ totalBytes: totalDataItemSizeWithHeader
368548
+ });
368549
+ },
368550
+ cancel() {
368551
+ reader1.cancel();
368552
+ }
368553
+ });
368554
+ const asyncIterableReadableStream = readableStreamToAsyncIterable(eventingStream);
368555
+ const parts = [
368556
+ (0, import_utils.stringToBuffer)("dataitem"),
368557
+ (0, import_utils.stringToBuffer)("1"),
368558
+ (0, import_utils.stringToBuffer)(header.signatureType.toString()),
368559
+ Uint8Array.from(header.rawOwner),
368560
+ Uint8Array.from(header.rawTarget),
368561
+ Uint8Array.from(header.rawAnchor),
368562
+ Uint8Array.from(header.rawTags),
368563
+ asyncIterableReadableStream
368564
+ ];
368565
+ const hash6 = await deepHash(parts);
368566
+ const sigBytes = Buffer.from(await signer.sign(hash6));
368567
+ emitter?.emit("signing-success");
368568
+ header.setSignature(sigBytes);
368569
+ const headerBytes = header.getRaw();
368570
+ const signedDataItemFactory = () => {
368571
+ const reader = stream2.getReader();
368572
+ return new ReadableStream({
368573
+ start(controller) {
368574
+ controller.enqueue(Uint8Array.from(headerBytes));
368575
+ bytesProcessed += headerBytes.byteLength;
368576
+ },
368577
+ async pull(controller) {
368578
+ try {
368579
+ const { done, value } = await reader.read();
368580
+ if (done) {
368581
+ controller.close();
368582
+ return;
368583
+ }
368584
+ controller.enqueue(value);
368585
+ } catch (error) {
368586
+ controller.error(error);
368587
+ }
368588
+ },
368589
+ cancel() {
368590
+ reader.cancel();
368591
+ }
368592
+ });
368593
+ };
368594
+ return {
368595
+ signedDataItemSize: totalDataItemSizeWithHeader,
368596
+ signedDataItemFactory
368597
+ };
368598
+ } catch (error) {
368599
+ emitter?.emit("signing-error", error);
368600
+ throw error;
368601
+ }
368602
+ }
368603
+ function isAsyncIterable(data) {
368604
+ return typeof data[Symbol.asyncIterator] === "function";
368605
+ }
368451
368606
 
368452
368607
  // src/web/upload.ts
368453
368608
  init_dirname();
@@ -368649,6 +368804,7 @@ export {
368649
368804
  TurboAuthenticatedClient,
368650
368805
  TurboAuthenticatedPaymentService,
368651
368806
  TurboAuthenticatedUploadService,
368807
+ TurboEventEmitter,
368652
368808
  TurboFactory,
368653
368809
  TurboUnauthenticatedClient,
368654
368810
  TurboUnauthenticatedPaymentService,
@@ -368658,6 +368814,9 @@ export {
368658
368814
  USD,
368659
368815
  WinstonToTokenAmount,
368660
368816
  ZeroDecimalCurrency,
368817
+ createStreamWithEvents,
368818
+ createStreamWithSigningEvents,
368819
+ createStreamWithUploadEvents,
368661
368820
  creditSharingTagNames,
368662
368821
  currencyMap,
368663
368822
  defaultPaymentServiceURL,
@@ -368669,6 +368828,7 @@ export {
368669
368828
  developmentUploadServiceURL,
368670
368829
  exponentMap,
368671
368830
  fiatCurrencyTypes,
368831
+ isAsyncIterable,
368672
368832
  isCurrency,
368673
368833
  isEthPrivateKey,
368674
368834
  isEthereumWalletAdapter,
@@ -368681,8 +368841,10 @@ export {
368681
368841
  lamportToTokenAmount,
368682
368842
  mARIOToTokenAmount,
368683
368843
  privateKeyFromKyveMnemonic,
368844
+ readableStreamToAsyncIterable,
368684
368845
  signerFromKyveMnemonic,
368685
368846
  signerFromKyvePrivateKey2 as signerFromKyvePrivateKey,
368847
+ streamSignerReadableStream,
368686
368848
  tokenToBaseMap,
368687
368849
  tokenTypes,
368688
368850
  ukyveToTokenAmount,
@@ -46,7 +46,7 @@ function createReadableStreamWithEvents({ data, dataSize, emitter, eventNamesMap
46
46
  ? data
47
47
  : new ReadableStream({
48
48
  start: (controller) => {
49
- controller.enqueue(data);
49
+ controller.enqueue(new Uint8Array(data.buffer, data.byteOffset, data.byteLength));
50
50
  controller.close();
51
51
  },
52
52
  });
@@ -64,12 +64,12 @@ function createReadableStreamWithEvents({ data, dataSize, emitter, eventNamesMap
64
64
  controller.close();
65
65
  return;
66
66
  }
67
- processedBytes += value.length;
67
+ processedBytes += value.byteLength;
68
68
  emitter.emit(eventNamesMap['on-progress'], {
69
69
  processedBytes,
70
70
  totalBytes: dataSize,
71
71
  });
72
- controller.enqueue(value);
72
+ controller.enqueue(new Uint8Array(value.buffer, value.byteOffset, value.byteLength));
73
73
  }
74
74
  catch (error) {
75
75
  emitter.emit(eventNamesMap['on-error'], error);
@@ -125,7 +125,7 @@ function createReadableWithEvents({ data, dataSize, emitter, eventNamesMap, }) {
125
125
  let processedBytes = 0;
126
126
  existingStream.on('data', (chunk) => {
127
127
  eventingStream.write(chunk);
128
- processedBytes += chunk.length;
128
+ processedBytes += chunk.byteLength;
129
129
  emitter.emit(eventNamesMap['on-progress'], {
130
130
  processedBytes,
131
131
  totalBytes: dataSize,
@@ -44,19 +44,19 @@ class TurboUnauthenticatedUploadService {
44
44
  this.retryConfig = retryConfig;
45
45
  }
46
46
  async uploadSignedDataItem({ dataItemStreamFactory, dataItemSizeFactory, dataItemOpts, signal, events = {}, }) {
47
- const fileSize = dataItemSizeFactory();
47
+ const dataItemSize = dataItemSizeFactory();
48
48
  this.logger.debug('Uploading signed data item...');
49
49
  // create the tapped stream with events
50
50
  const emitter = new events_js_1.TurboEventEmitter(events);
51
51
  // create the stream with upload events
52
52
  const { stream: streamWithUploadEvents, resume } = (0, events_js_1.createStreamWithUploadEvents)({
53
53
  data: dataItemStreamFactory(),
54
- dataSize: fileSize,
54
+ dataSize: dataItemSize,
55
55
  emitter,
56
56
  });
57
57
  const headers = {
58
58
  'content-type': 'application/octet-stream',
59
- 'content-length': `${fileSize}`,
59
+ 'content-length': `${dataItemSize}`,
60
60
  };
61
61
  if (dataItemOpts !== undefined && dataItemOpts.paidBy !== undefined) {
62
62
  const paidBy = Array.isArray(dataItemOpts.paidBy)
@@ -140,11 +140,10 @@ class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUploadServ
140
140
  if (signal?.aborted) {
141
141
  throw new axios_1.CanceledError();
142
142
  }
143
+ // Now that we have the signed data item, we can upload it using the uploadSignedDataItem method
144
+ // which will create a new emitter with upload events. We await
145
+ // this result due to the wrapped retry logic of this method.
143
146
  try {
144
- this.logger.debug('Uploading signed data item...');
145
- // Now that we have the signed data item, we can upload it using the uploadSignedDataItem method
146
- // which will create a new emitter with upload events. We await
147
- // this result due to the wrapped retry logic of this method.
148
147
  const response = await this.uploadSignedDataItem({
149
148
  dataItemStreamFactory,
150
149
  dataItemSizeFactory,
@@ -250,6 +249,7 @@ class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUploadServ
250
249
  };
251
250
  try {
252
251
  const result = await this.uploadFile({
252
+ // TODO: can fix this type by passing a class generic and specifying in the node/web abstracts which stream type to use
253
253
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
254
254
  fileStreamFactory: () => this.getFileStreamForFile(file),
255
255
  fileSizeFactory: () => this.getFileSize(file),
@@ -294,6 +294,7 @@ class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUploadServ
294
294
  ];
295
295
  const manifestBuffer = Buffer.from(JSON.stringify(manifest));
296
296
  const manifestResponse = await this.uploadFile({
297
+ // TODO: can fix this type by passing a class generic and specifying in the node/web abstracts which stream type to use
297
298
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
298
299
  fileStreamFactory: () => this.createManifestStream(manifestBuffer),
299
300
  fileSizeFactory: () => manifestBuffer.byteLength,
@@ -1,6 +1,4 @@
1
1
  "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.readableStreamToBuffer = readableStreamToBuffer;
4
2
  /**
5
3
  * Copyright (C) 2022-2024 Permanent Data Solutions, Inc.
6
4
  *
@@ -16,6 +14,12 @@ exports.readableStreamToBuffer = readableStreamToBuffer;
16
14
  * See the License for the specific language governing permissions and
17
15
  * limitations under the License.
18
16
  */
17
+ Object.defineProperty(exports, "__esModule", { value: true });
18
+ exports.DEFAULT_STREAM_CHUNK_SIZE = void 0;
19
+ exports.readableStreamToBuffer = readableStreamToBuffer;
20
+ exports.ensureChunkedStream = ensureChunkedStream;
21
+ exports.createUint8ArrayReadableStreamFactory = createUint8ArrayReadableStreamFactory;
22
+ exports.DEFAULT_STREAM_CHUNK_SIZE = 20 * 1024 * 1024; // 20mb
19
23
  async function readableStreamToBuffer({ stream, size, }) {
20
24
  const reader = stream.getReader();
21
25
  const buffer = Buffer.alloc(size);
@@ -31,3 +35,97 @@ async function readableStreamToBuffer({ stream, size, }) {
31
35
  }
32
36
  return buffer;
33
37
  }
38
+ function ensureChunkedStream(input, maxChunkSize = exports.DEFAULT_STREAM_CHUNK_SIZE) {
39
+ const reader = input.getReader();
40
+ let leftover = null;
41
+ return new ReadableStream({
42
+ async pull(controller) {
43
+ // If we have leftover from a previous large chunk, continue slicing it
44
+ if (leftover) {
45
+ const chunk = leftover.subarray(0, maxChunkSize);
46
+ leftover = leftover.subarray(chunk.length);
47
+ if (leftover.length === 0)
48
+ leftover = null;
49
+ controller.enqueue(chunk);
50
+ return;
51
+ }
52
+ const { value, done } = await reader.read();
53
+ if (done) {
54
+ controller.close();
55
+ return;
56
+ }
57
+ // Runtime check because ReadableStream defaults to <any> and can be abused
58
+ if (!(value instanceof Uint8Array)) {
59
+ throw new TypeError('Expected Uint8Array from source stream');
60
+ }
61
+ if (value.byteLength <= maxChunkSize) {
62
+ controller.enqueue(value);
63
+ }
64
+ else {
65
+ // Slice and enqueue one piece now, keep the rest
66
+ // subarray is the new view with the same buffer (not copy)
67
+ controller.enqueue(value.subarray(0, maxChunkSize));
68
+ leftover = value.subarray(maxChunkSize);
69
+ }
70
+ },
71
+ });
72
+ }
73
+ function createUint8ArrayReadableStreamFactory({ data, maxChunkSize = exports.DEFAULT_STREAM_CHUNK_SIZE, }) {
74
+ // Blob streams are already ReadableStream<Uint8Array>
75
+ if (data instanceof Blob) {
76
+ return () => ensureChunkedStream(data.stream());
77
+ }
78
+ // We need to handle the case where the data is a ReadableStream that is not a Uint8Array
79
+ // This is to ensure downstream code can handle the data as a Uint8Array
80
+ if (data instanceof ReadableStream) {
81
+ return () => {
82
+ const reader = data.getReader();
83
+ const stream = new ReadableStream({
84
+ async pull(controller) {
85
+ const { value, done } = await reader.read();
86
+ if (done) {
87
+ controller.close();
88
+ return;
89
+ }
90
+ if (ArrayBuffer.isView(value)) {
91
+ // specifying offset and length is required to ensure chunks remain within their slice of the buffer
92
+ controller.enqueue(new Uint8Array(value.buffer, value.byteOffset, value.byteLength));
93
+ }
94
+ else if (value instanceof ArrayBuffer ||
95
+ value instanceof SharedArrayBuffer) {
96
+ controller.enqueue(new Uint8Array(value));
97
+ }
98
+ else {
99
+ throw new TypeError('Unsupported chunk type in ReadableStream');
100
+ }
101
+ },
102
+ });
103
+ return ensureChunkedStream(stream, maxChunkSize);
104
+ };
105
+ }
106
+ return () => {
107
+ let uint8;
108
+ if (typeof data === 'string') {
109
+ uint8 = new TextEncoder().encode(data);
110
+ }
111
+ else if (ArrayBuffer.isView(data)) {
112
+ // In theory we could use the view directly, but that might allow other typed arrays like BigInt64Array to be used which could behave unexpectedly downstream
113
+ // specifying offset and length is required to ensure chunks remain within their slice of the buffer
114
+ uint8 = new Uint8Array(data.buffer, data.byteOffset, data.byteLength);
115
+ }
116
+ else if (data instanceof ArrayBuffer ||
117
+ data instanceof SharedArrayBuffer) {
118
+ uint8 = new Uint8Array(data);
119
+ }
120
+ else {
121
+ throw new TypeError('Unsupported input type for stream');
122
+ }
123
+ const stream = new ReadableStream({
124
+ start(controller) {
125
+ controller.enqueue(uint8);
126
+ controller.close();
127
+ },
128
+ });
129
+ return ensureChunkedStream(stream, maxChunkSize);
130
+ };
131
+ }
@@ -17,4 +17,4 @@
17
17
  Object.defineProperty(exports, "__esModule", { value: true });
18
18
  exports.version = void 0;
19
19
  // AUTOMATICALLY GENERATED FILE - DO NOT TOUCH
20
- exports.version = '1.26.0';
20
+ exports.version = '1.27.0-alpha.1';