@ardrive/turbo-sdk 1.26.0-alpha.1 → 1.27.0-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +13 -6
- package/bundles/web.bundle.min.js +244 -82
- package/lib/cjs/common/events.js +4 -4
- package/lib/cjs/common/upload.js +8 -7
- package/lib/cjs/node/signer.js +0 -1
- package/lib/cjs/utils/readableStream.js +100 -2
- package/lib/cjs/version.js +1 -1
- package/lib/cjs/web/index.js +1 -0
- package/lib/cjs/web/signer.js +128 -56
- package/lib/esm/common/events.js +4 -4
- package/lib/esm/common/upload.js +8 -7
- package/lib/esm/node/signer.js +0 -1
- package/lib/esm/utils/readableStream.js +95 -0
- package/lib/esm/version.js +1 -1
- package/lib/esm/web/index.js +1 -0
- package/lib/esm/web/signer.js +126 -57
- package/lib/types/common/events.d.ts.map +1 -1
- package/lib/types/common/upload.d.ts.map +1 -1
- package/lib/types/node/signer.d.ts.map +1 -1
- package/lib/types/types.d.ts +1 -1
- package/lib/types/types.d.ts.map +1 -1
- package/lib/types/utils/readableStream.d.ts +6 -0
- package/lib/types/utils/readableStream.d.ts.map +1 -1
- package/lib/types/version.d.ts +1 -1
- package/lib/types/web/index.d.ts +1 -0
- package/lib/types/web/index.d.ts.map +1 -1
- package/lib/types/web/signer.d.ts +16 -6
- package/lib/types/web/signer.d.ts.map +1 -1
- package/package.json +5 -7
- package/lib/cjs/common/events.test.js +0 -470
- package/lib/esm/common/events.test.js +0 -468
- package/lib/types/common/events.test.d.ts +0 -2
- package/lib/types/common/events.test.d.ts.map +0 -1
package/README.md
CHANGED
@@ -95,7 +95,7 @@ async function uploadWithTurbo() {
|
|
95
95
|
onSigningProgress: ({ totalBytes, processedBytes }) => {
|
96
96
|
console.log('Signing progress:', { totalBytes, processedBytes });
|
97
97
|
},
|
98
|
-
onSigningError: (
|
98
|
+
onSigningError: (error) => {
|
99
99
|
console.log('Signing error:', { error });
|
100
100
|
},
|
101
101
|
onSigningSuccess: () => {
|
@@ -386,6 +386,7 @@ const uploadResponse = await turbo.uploadSignedDataItem({
|
|
386
386
|
dataItemSizeFactory: () => dataItemSize,
|
387
387
|
signal: AbortSignal.timeout(10_000), // cancel the upload after 10 seconds
|
388
388
|
events: {
|
389
|
+
// track upload events only
|
389
390
|
onUploadProgress: ({ totalBytes, processedBytes }) => {
|
390
391
|
console.log('Upload progress:', { totalBytes, processedBytes });
|
391
392
|
},
|
@@ -540,7 +541,7 @@ const uploadResult = await turbo.upload({
|
|
540
541
|
|
541
542
|
#### `uploadFile({ fileStreamFactory, fileSizeFactory, signal, dataItemOpts, events })`
|
542
543
|
|
543
|
-
Signs and uploads a raw file. The provided `fileStreamFactory` should produce a NEW file data stream each time is
|
544
|
+
Signs and uploads a raw file. The provided `fileStreamFactory` should produce a NEW file data stream each time it is invoked. The `fileSizeFactory` is a function that returns the size of the file. The `signal` is an optional [AbortSignal] that can be used to cancel the upload or timeout the request. `dataItemOpts` is an optional object that can be used to configure tags, target, and anchor for the data item upload.
|
544
545
|
|
545
546
|
```typescript
|
546
547
|
const filePath = path.join(__dirname, './my-unsigned-file.txt');
|
@@ -563,6 +564,7 @@ const uploadResult = await turbo.uploadFile({
|
|
563
564
|
// no timeout or AbortSignal provided
|
564
565
|
},
|
565
566
|
events: {
|
567
|
+
// upload events
|
566
568
|
onUploadProgress: ({ totalBytes, processedBytes }) => {
|
567
569
|
console.log('Upload progress:', { totalBytes, processedBytes });
|
568
570
|
},
|
@@ -745,10 +747,10 @@ const { givenApprovals, receivedApprovals } =
|
|
745
747
|
|
746
748
|
## Events
|
747
749
|
|
748
|
-
The SDK provides events for
|
750
|
+
The SDK provides events for tracking the state signing and uploading data to Turbo. You can listen to these events by providing a callback function to the `events` parameter of the `upload`, `uploadFile`, and `uploadSignedDataItem` methods.
|
749
751
|
|
750
|
-
- `onProgress` - emitted when the overall progress changes (includes both upload and signing). Each event
|
751
|
-
- `onError` - emitted when the overall upload or signing fails (includes both upload and signing)
|
752
|
+
- `onProgress` - emitted when the overall progress changes (includes both upload and signing). Each event consists of the total bytes, processed bytes, and the step (upload or signing)
|
753
|
+
- `onError` - emitted when the overall upload or signing fails (includes both upload and signing)
|
752
754
|
- `onSuccess` - emitted when the overall upload or signing succeeds (includes both upload and signing) - this is the last event emitted for the upload or signing process
|
753
755
|
- `onSigningProgress` - emitted when the signing progress changes.
|
754
756
|
- `onSigningError` - emitted when the signing fails.
|
@@ -765,7 +767,7 @@ const uploadResult = await turbo.upload({
|
|
765
767
|
// optional
|
766
768
|
},
|
767
769
|
events: {
|
768
|
-
// overall
|
770
|
+
// overall events (includes signing and upload events)
|
769
771
|
onProgress: ({ totalBytes, processedBytes, step }) => {
|
770
772
|
const percentComplete = (processedBytes / totalBytes) * 100;
|
771
773
|
console.log('Overall progress:', {
|
@@ -778,6 +780,10 @@ const uploadResult = await turbo.upload({
|
|
778
780
|
onError: (error) => {
|
779
781
|
console.log('Overall error:', { error });
|
780
782
|
},
|
783
|
+
onSuccess: () => {
|
784
|
+
console.log('Signed and upload data item!');
|
785
|
+
},
|
786
|
+
// upload events
|
781
787
|
onUploadProgress: ({ totalBytes, processedBytes }) => {
|
782
788
|
console.log('Upload progress:', { totalBytes, processedBytes });
|
783
789
|
},
|
@@ -787,6 +793,7 @@ const uploadResult = await turbo.upload({
|
|
787
793
|
onUploadSuccess: () => {
|
788
794
|
console.log('Upload success!');
|
789
795
|
},
|
796
|
+
// signing events
|
790
797
|
onSigningProgress: ({ totalBytes, processedBytes }) => {
|
791
798
|
console.log('Signing progress:', { totalBytes, processedBytes });
|
792
799
|
},
|
@@ -114609,7 +114609,7 @@ var require_wrapAsync = __commonJS({
|
|
114609
114609
|
function isAsyncGenerator(fn2) {
|
114610
114610
|
return fn2[Symbol.toStringTag] === "AsyncGenerator";
|
114611
114611
|
}
|
114612
|
-
function
|
114612
|
+
function isAsyncIterable2(obj) {
|
114613
114613
|
return typeof obj[Symbol.asyncIterator] === "function";
|
114614
114614
|
}
|
114615
114615
|
function wrapAsync(asyncFn) {
|
@@ -114620,7 +114620,7 @@ var require_wrapAsync = __commonJS({
|
|
114620
114620
|
exports15.default = wrapAsync;
|
114621
114621
|
exports15.isAsync = isAsync2;
|
114622
114622
|
exports15.isAsyncGenerator = isAsyncGenerator;
|
114623
|
-
exports15.isAsyncIterable =
|
114623
|
+
exports15.isAsyncIterable = isAsyncIterable2;
|
114624
114624
|
}
|
114625
114625
|
});
|
114626
114626
|
|
@@ -313540,9 +313540,9 @@ var DataItem = class _DataItem {
|
|
313540
313540
|
return false;
|
313541
313541
|
}
|
313542
313542
|
}
|
313543
|
-
const
|
313543
|
+
const Signer4 = indexToType[sigType];
|
313544
313544
|
const signatureData = await ar_data_base_default(item);
|
313545
|
-
return await
|
313545
|
+
return await Signer4.verify(item.rawOwner, signatureData, item.rawSignature);
|
313546
313546
|
}
|
313547
313547
|
async getSignatureData() {
|
313548
313548
|
return ar_data_base_default(this);
|
@@ -313953,8 +313953,8 @@ async function processStream(stream2) {
|
|
313953
313953
|
transform.end();
|
313954
313954
|
if (id3 !== (0, import_base64url11.default)(createHash("sha256").update(signature2).digest()))
|
313955
313955
|
throw new Error("ID doesn't match signature");
|
313956
|
-
const
|
313957
|
-
if (!await
|
313956
|
+
const Signer4 = indexToType[signatureType];
|
313957
|
+
if (!await Signer4.verify(owner, await signatureData, signature2))
|
313958
313958
|
throw new Error("Invalid signature");
|
313959
313959
|
items.push({
|
313960
313960
|
id: id3,
|
@@ -314072,7 +314072,7 @@ var import_winston = __toESM(require_winston(), 1);
|
|
314072
314072
|
init_dirname();
|
314073
314073
|
init_buffer2();
|
314074
314074
|
init_process2();
|
314075
|
-
var version16 = "1.
|
314075
|
+
var version16 = "1.26.0";
|
314076
314076
|
|
314077
314077
|
// src/common/logger.ts
|
314078
314078
|
var TurboWinstonLogger = class _TurboWinstonLogger {
|
@@ -367315,7 +367315,9 @@ function createReadableStreamWithEvents({
|
|
367315
367315
|
}) {
|
367316
367316
|
const originalStream = data instanceof ReadableStream ? data : new ReadableStream({
|
367317
367317
|
start: (controller) => {
|
367318
|
-
controller.enqueue(
|
367318
|
+
controller.enqueue(
|
367319
|
+
new Uint8Array(data.buffer, data.byteOffset, data.byteLength)
|
367320
|
+
);
|
367319
367321
|
controller.close();
|
367320
367322
|
}
|
367321
367323
|
});
|
@@ -367333,12 +367335,14 @@ function createReadableStreamWithEvents({
|
|
367333
367335
|
controller.close();
|
367334
367336
|
return;
|
367335
367337
|
}
|
367336
|
-
processedBytes += value.
|
367338
|
+
processedBytes += value.byteLength;
|
367337
367339
|
emitter.emit(eventNamesMap["on-progress"], {
|
367338
367340
|
processedBytes,
|
367339
367341
|
totalBytes: dataSize
|
367340
367342
|
});
|
367341
|
-
controller.enqueue(
|
367343
|
+
controller.enqueue(
|
367344
|
+
new Uint8Array(value.buffer, value.byteOffset, value.byteLength)
|
367345
|
+
);
|
367342
367346
|
} catch (error) {
|
367343
367347
|
emitter.emit(eventNamesMap["on-error"], error);
|
367344
367348
|
controller.error(error);
|
@@ -367366,7 +367370,7 @@ function createReadableWithEvents({
|
|
367366
367370
|
let processedBytes = 0;
|
367367
367371
|
existingStream.on("data", (chunk) => {
|
367368
367372
|
eventingStream.write(chunk);
|
367369
|
-
processedBytes += chunk.
|
367373
|
+
processedBytes += chunk.byteLength;
|
367370
367374
|
emitter.emit(eventNamesMap["on-progress"], {
|
367371
367375
|
processedBytes,
|
367372
367376
|
totalBytes: dataSize
|
@@ -367495,6 +367499,22 @@ function createStreamWithUploadEvents({
|
|
367495
367499
|
}
|
367496
367500
|
});
|
367497
367501
|
}
|
367502
|
+
function createStreamWithSigningEvents({
|
367503
|
+
data,
|
367504
|
+
dataSize,
|
367505
|
+
emitter = new TurboEventEmitter()
|
367506
|
+
}) {
|
367507
|
+
return createStreamWithEvents({
|
367508
|
+
data,
|
367509
|
+
dataSize,
|
367510
|
+
emitter,
|
367511
|
+
eventNamesMap: {
|
367512
|
+
"on-progress": "signing-progress",
|
367513
|
+
"on-error": "signing-error",
|
367514
|
+
"on-end": "signing-success"
|
367515
|
+
}
|
367516
|
+
});
|
367517
|
+
}
|
367498
367518
|
|
367499
367519
|
// src/common/upload.ts
|
367500
367520
|
var creditSharingTagNames = {
|
@@ -367528,17 +367548,17 @@ var TurboUnauthenticatedUploadService = class {
|
|
367528
367548
|
signal,
|
367529
367549
|
events = {}
|
367530
367550
|
}) {
|
367531
|
-
const
|
367551
|
+
const dataItemSize = dataItemSizeFactory();
|
367532
367552
|
this.logger.debug("Uploading signed data item...");
|
367533
367553
|
const emitter = new TurboEventEmitter(events);
|
367534
367554
|
const { stream: streamWithUploadEvents, resume } = createStreamWithUploadEvents({
|
367535
367555
|
data: dataItemStreamFactory(),
|
367536
|
-
dataSize:
|
367556
|
+
dataSize: dataItemSize,
|
367537
367557
|
emitter
|
367538
367558
|
});
|
367539
367559
|
const headers = {
|
367540
367560
|
"content-type": "application/octet-stream",
|
367541
|
-
"content-length": `${
|
367561
|
+
"content-length": `${dataItemSize}`
|
367542
367562
|
};
|
367543
367563
|
if (dataItemOpts !== void 0 && dataItemOpts.paidBy !== void 0) {
|
367544
367564
|
const paidBy = Array.isArray(dataItemOpts.paidBy) ? dataItemOpts.paidBy : [dataItemOpts.paidBy];
|
@@ -367627,7 +367647,6 @@ var TurboAuthenticatedBaseUploadService = class extends TurboUnauthenticatedUplo
|
|
367627
367647
|
throw new CanceledError2();
|
367628
367648
|
}
|
367629
367649
|
try {
|
367630
|
-
this.logger.debug("Uploading signed data item...");
|
367631
367650
|
const response = await this.uploadSignedDataItem({
|
367632
367651
|
dataItemStreamFactory,
|
367633
367652
|
dataItemSizeFactory,
|
@@ -367747,6 +367766,7 @@ ${lastError instanceof Error ? lastError.message : lastError}`;
|
|
367747
367766
|
};
|
367748
367767
|
try {
|
367749
367768
|
const result2 = await this.uploadFile({
|
367769
|
+
// TODO: can fix this type by passing a class generic and specifying in the node/web abstracts which stream type to use
|
367750
367770
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
367751
367771
|
fileStreamFactory: () => this.getFileStreamForFile(file),
|
367752
367772
|
fileSizeFactory: () => this.getFileSize(file),
|
@@ -367789,6 +367809,7 @@ ${lastError instanceof Error ? lastError.message : lastError}`;
|
|
367789
367809
|
];
|
367790
367810
|
const manifestBuffer = Buffer.from(JSON.stringify(manifest));
|
367791
367811
|
const manifestResponse = await this.uploadFile({
|
367812
|
+
// TODO: can fix this type by passing a class generic and specifying in the node/web abstracts which stream type to use
|
367792
367813
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
367793
367814
|
fileStreamFactory: () => this.createManifestStream(manifestBuffer),
|
367794
367815
|
fileSizeFactory: () => manifestBuffer.byteLength,
|
@@ -368345,23 +368366,87 @@ var TurboDataItemAbstractSigner = class {
|
|
368345
368366
|
init_dirname();
|
368346
368367
|
init_buffer2();
|
368347
368368
|
init_process2();
|
368348
|
-
|
368349
|
-
|
368350
|
-
|
368351
|
-
|
368352
|
-
|
368353
|
-
|
368354
|
-
|
368355
|
-
|
368356
|
-
|
368357
|
-
|
368358
|
-
|
368359
|
-
|
368360
|
-
|
368361
|
-
|
368369
|
+
var DEFAULT_STREAM_CHUNK_SIZE = 20 * 1024 * 1024;
|
368370
|
+
function ensureChunkedStream(input, maxChunkSize = DEFAULT_STREAM_CHUNK_SIZE) {
|
368371
|
+
const reader = input.getReader();
|
368372
|
+
let leftover = null;
|
368373
|
+
return new ReadableStream({
|
368374
|
+
async pull(controller) {
|
368375
|
+
if (leftover) {
|
368376
|
+
const chunk = leftover.subarray(0, maxChunkSize);
|
368377
|
+
leftover = leftover.subarray(chunk.length);
|
368378
|
+
if (leftover.length === 0)
|
368379
|
+
leftover = null;
|
368380
|
+
controller.enqueue(chunk);
|
368381
|
+
return;
|
368382
|
+
}
|
368383
|
+
const { value, done } = await reader.read();
|
368384
|
+
if (done) {
|
368385
|
+
controller.close();
|
368386
|
+
return;
|
368387
|
+
}
|
368388
|
+
if (!(value instanceof Uint8Array)) {
|
368389
|
+
throw new TypeError("Expected Uint8Array from source stream");
|
368390
|
+
}
|
368391
|
+
if (value.byteLength <= maxChunkSize) {
|
368392
|
+
controller.enqueue(value);
|
368393
|
+
} else {
|
368394
|
+
controller.enqueue(value.subarray(0, maxChunkSize));
|
368395
|
+
leftover = value.subarray(maxChunkSize);
|
368396
|
+
}
|
368362
368397
|
}
|
368398
|
+
});
|
368399
|
+
}
|
368400
|
+
function createUint8ArrayReadableStreamFactory({
|
368401
|
+
data,
|
368402
|
+
maxChunkSize = DEFAULT_STREAM_CHUNK_SIZE
|
368403
|
+
}) {
|
368404
|
+
if (data instanceof Blob) {
|
368405
|
+
return () => ensureChunkedStream(data.stream());
|
368406
|
+
}
|
368407
|
+
if (data instanceof ReadableStream) {
|
368408
|
+
return () => {
|
368409
|
+
const reader = data.getReader();
|
368410
|
+
const stream2 = new ReadableStream({
|
368411
|
+
async pull(controller) {
|
368412
|
+
const { value, done } = await reader.read();
|
368413
|
+
if (done) {
|
368414
|
+
controller.close();
|
368415
|
+
return;
|
368416
|
+
}
|
368417
|
+
if (ArrayBuffer.isView(value)) {
|
368418
|
+
controller.enqueue(
|
368419
|
+
new Uint8Array(value.buffer, value.byteOffset, value.byteLength)
|
368420
|
+
);
|
368421
|
+
} else if (value instanceof ArrayBuffer || value instanceof SharedArrayBuffer) {
|
368422
|
+
controller.enqueue(new Uint8Array(value));
|
368423
|
+
} else {
|
368424
|
+
throw new TypeError("Unsupported chunk type in ReadableStream");
|
368425
|
+
}
|
368426
|
+
}
|
368427
|
+
});
|
368428
|
+
return ensureChunkedStream(stream2, maxChunkSize);
|
368429
|
+
};
|
368363
368430
|
}
|
368364
|
-
return
|
368431
|
+
return () => {
|
368432
|
+
let uint8;
|
368433
|
+
if (typeof data === "string") {
|
368434
|
+
uint8 = new TextEncoder().encode(data);
|
368435
|
+
} else if (ArrayBuffer.isView(data)) {
|
368436
|
+
uint8 = new Uint8Array(data.buffer, data.byteOffset, data.byteLength);
|
368437
|
+
} else if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) {
|
368438
|
+
uint8 = new Uint8Array(data);
|
368439
|
+
} else {
|
368440
|
+
throw new TypeError("Unsupported input type for stream");
|
368441
|
+
}
|
368442
|
+
const stream2 = new ReadableStream({
|
368443
|
+
start(controller) {
|
368444
|
+
controller.enqueue(uint8);
|
368445
|
+
controller.close();
|
368446
|
+
}
|
368447
|
+
});
|
368448
|
+
return ensureChunkedStream(stream2, maxChunkSize);
|
368449
|
+
};
|
368365
368450
|
}
|
368366
368451
|
|
368367
368452
|
// src/web/signer.ts
|
@@ -368386,58 +368471,21 @@ var TurboWebArweaveSigner = class extends TurboDataItemAbstractSigner {
|
|
368386
368471
|
}) {
|
368387
368472
|
await this.setPublicKey();
|
368388
368473
|
const fileSize = fileSizeFactory();
|
368389
|
-
|
368390
|
-
|
368391
|
-
|
368392
|
-
|
368393
|
-
|
368394
|
-
|
368395
|
-
|
368396
|
-
|
368397
|
-
|
368398
|
-
|
368399
|
-
|
368400
|
-
|
368401
|
-
|
368402
|
-
|
368403
|
-
|
368404
|
-
this.logger.debug("Signing data item...");
|
368405
|
-
if (this.signer instanceof InjectedArweaveSigner) {
|
368406
|
-
this.logger.debug(
|
368407
|
-
"Arconnect signer detected, signing with Arconnect signData Item API..."
|
368408
|
-
);
|
368409
|
-
const sign6 = Buffer.from(
|
368410
|
-
await this.signer["signer"].signDataItem({
|
368411
|
-
data: Uint8Array.from(buffer2),
|
368412
|
-
tags: dataItemOpts?.tags,
|
368413
|
-
target: dataItemOpts?.target,
|
368414
|
-
anchor: dataItemOpts?.anchor
|
368415
|
-
})
|
368416
|
-
);
|
368417
|
-
signedDataItem = new DataItem(sign6);
|
368418
|
-
} else {
|
368419
|
-
signedDataItem = createData(
|
368420
|
-
Uint8Array.from(buffer2),
|
368421
|
-
this.signer,
|
368422
|
-
dataItemOpts
|
368423
|
-
);
|
368424
|
-
await signedDataItem.sign(this.signer);
|
368425
|
-
}
|
368426
|
-
emitter?.emit("signing-progress", {
|
368427
|
-
processedBytes: fileSize,
|
368428
|
-
totalBytes: fileSize
|
368429
|
-
});
|
368430
|
-
emitter?.emit("signing-success");
|
368431
|
-
this.logger.debug("Successfully signed data item...");
|
368432
|
-
return {
|
368433
|
-
// while this returns a Buffer - it needs to match our return type for uploading
|
368434
|
-
dataItemStreamFactory: () => signedDataItem.getRaw(),
|
368435
|
-
dataItemSizeFactory: () => signedDataItem.getRaw().length
|
368436
|
-
};
|
368437
|
-
} catch (error) {
|
368438
|
-
emitter?.emit("signing-error", error);
|
368439
|
-
throw error;
|
368440
|
-
}
|
368474
|
+
this.logger.debug("Signing data item...");
|
368475
|
+
const { signedDataItemFactory, signedDataItemSize } = await streamSignerReadableStream({
|
368476
|
+
streamFactory: createUint8ArrayReadableStreamFactory({
|
368477
|
+
data: fileStreamFactory()
|
368478
|
+
}),
|
368479
|
+
signer: this.signer,
|
368480
|
+
dataItemOpts,
|
368481
|
+
fileSize,
|
368482
|
+
emitter
|
368483
|
+
});
|
368484
|
+
this.logger.debug("Successfully signed data item...");
|
368485
|
+
return {
|
368486
|
+
dataItemStreamFactory: signedDataItemFactory,
|
368487
|
+
dataItemSizeFactory: () => signedDataItemSize
|
368488
|
+
};
|
368441
368489
|
}
|
368442
368490
|
async generateSignedRequestHeaders() {
|
368443
368491
|
await this.setPublicKey();
|
@@ -368448,6 +368496,113 @@ var TurboWebArweaveSigner = class extends TurboDataItemAbstractSigner {
|
|
368448
368496
|
return super.signData(dataToSign);
|
368449
368497
|
}
|
368450
368498
|
};
|
368499
|
+
var readableStreamToAsyncIterable = (stream2) => ({
|
368500
|
+
async *[Symbol.asyncIterator]() {
|
368501
|
+
const reader = stream2.getReader();
|
368502
|
+
try {
|
368503
|
+
while (true) {
|
368504
|
+
const { done, value } = await reader.read();
|
368505
|
+
if (done)
|
368506
|
+
break;
|
368507
|
+
if (value !== void 0)
|
368508
|
+
yield Buffer.from(value);
|
368509
|
+
}
|
368510
|
+
} finally {
|
368511
|
+
reader.releaseLock();
|
368512
|
+
}
|
368513
|
+
}
|
368514
|
+
});
|
368515
|
+
async function streamSignerReadableStream({
|
368516
|
+
streamFactory,
|
368517
|
+
signer,
|
368518
|
+
dataItemOpts,
|
368519
|
+
fileSize,
|
368520
|
+
emitter
|
368521
|
+
}) {
|
368522
|
+
try {
|
368523
|
+
const header = createData("", signer, dataItemOpts);
|
368524
|
+
const headerSize = header.getRaw().byteLength;
|
368525
|
+
const totalDataItemSizeWithHeader = fileSize + headerSize;
|
368526
|
+
const [stream1, stream2] = streamFactory().tee();
|
368527
|
+
const reader1 = stream1.getReader();
|
368528
|
+
let bytesProcessed = 0;
|
368529
|
+
const eventingStream = new ReadableStream({
|
368530
|
+
start() {
|
368531
|
+
bytesProcessed = headerSize;
|
368532
|
+
emitter?.emit("signing-progress", {
|
368533
|
+
processedBytes: bytesProcessed,
|
368534
|
+
totalBytes: totalDataItemSizeWithHeader
|
368535
|
+
});
|
368536
|
+
},
|
368537
|
+
async pull(controller) {
|
368538
|
+
const { done, value } = await reader1.read();
|
368539
|
+
if (done) {
|
368540
|
+
controller.close();
|
368541
|
+
return;
|
368542
|
+
}
|
368543
|
+
bytesProcessed += value.byteLength;
|
368544
|
+
controller.enqueue(value);
|
368545
|
+
emitter?.emit("signing-progress", {
|
368546
|
+
processedBytes: bytesProcessed,
|
368547
|
+
totalBytes: totalDataItemSizeWithHeader
|
368548
|
+
});
|
368549
|
+
},
|
368550
|
+
cancel() {
|
368551
|
+
reader1.cancel();
|
368552
|
+
}
|
368553
|
+
});
|
368554
|
+
const asyncIterableReadableStream = readableStreamToAsyncIterable(eventingStream);
|
368555
|
+
const parts = [
|
368556
|
+
(0, import_utils.stringToBuffer)("dataitem"),
|
368557
|
+
(0, import_utils.stringToBuffer)("1"),
|
368558
|
+
(0, import_utils.stringToBuffer)(header.signatureType.toString()),
|
368559
|
+
Uint8Array.from(header.rawOwner),
|
368560
|
+
Uint8Array.from(header.rawTarget),
|
368561
|
+
Uint8Array.from(header.rawAnchor),
|
368562
|
+
Uint8Array.from(header.rawTags),
|
368563
|
+
asyncIterableReadableStream
|
368564
|
+
];
|
368565
|
+
const hash6 = await deepHash(parts);
|
368566
|
+
const sigBytes = Buffer.from(await signer.sign(hash6));
|
368567
|
+
emitter?.emit("signing-success");
|
368568
|
+
header.setSignature(sigBytes);
|
368569
|
+
const headerBytes = header.getRaw();
|
368570
|
+
const signedDataItemFactory = () => {
|
368571
|
+
const reader = stream2.getReader();
|
368572
|
+
return new ReadableStream({
|
368573
|
+
start(controller) {
|
368574
|
+
controller.enqueue(Uint8Array.from(headerBytes));
|
368575
|
+
bytesProcessed += headerBytes.byteLength;
|
368576
|
+
},
|
368577
|
+
async pull(controller) {
|
368578
|
+
try {
|
368579
|
+
const { done, value } = await reader.read();
|
368580
|
+
if (done) {
|
368581
|
+
controller.close();
|
368582
|
+
return;
|
368583
|
+
}
|
368584
|
+
controller.enqueue(value);
|
368585
|
+
} catch (error) {
|
368586
|
+
controller.error(error);
|
368587
|
+
}
|
368588
|
+
},
|
368589
|
+
cancel() {
|
368590
|
+
reader.cancel();
|
368591
|
+
}
|
368592
|
+
});
|
368593
|
+
};
|
368594
|
+
return {
|
368595
|
+
signedDataItemSize: totalDataItemSizeWithHeader,
|
368596
|
+
signedDataItemFactory
|
368597
|
+
};
|
368598
|
+
} catch (error) {
|
368599
|
+
emitter?.emit("signing-error", error);
|
368600
|
+
throw error;
|
368601
|
+
}
|
368602
|
+
}
|
368603
|
+
function isAsyncIterable(data) {
|
368604
|
+
return typeof data[Symbol.asyncIterator] === "function";
|
368605
|
+
}
|
368451
368606
|
|
368452
368607
|
// src/web/upload.ts
|
368453
368608
|
init_dirname();
|
@@ -368649,6 +368804,7 @@ export {
|
|
368649
368804
|
TurboAuthenticatedClient,
|
368650
368805
|
TurboAuthenticatedPaymentService,
|
368651
368806
|
TurboAuthenticatedUploadService,
|
368807
|
+
TurboEventEmitter,
|
368652
368808
|
TurboFactory,
|
368653
368809
|
TurboUnauthenticatedClient,
|
368654
368810
|
TurboUnauthenticatedPaymentService,
|
@@ -368658,6 +368814,9 @@ export {
|
|
368658
368814
|
USD,
|
368659
368815
|
WinstonToTokenAmount,
|
368660
368816
|
ZeroDecimalCurrency,
|
368817
|
+
createStreamWithEvents,
|
368818
|
+
createStreamWithSigningEvents,
|
368819
|
+
createStreamWithUploadEvents,
|
368661
368820
|
creditSharingTagNames,
|
368662
368821
|
currencyMap,
|
368663
368822
|
defaultPaymentServiceURL,
|
@@ -368669,6 +368828,7 @@ export {
|
|
368669
368828
|
developmentUploadServiceURL,
|
368670
368829
|
exponentMap,
|
368671
368830
|
fiatCurrencyTypes,
|
368831
|
+
isAsyncIterable,
|
368672
368832
|
isCurrency,
|
368673
368833
|
isEthPrivateKey,
|
368674
368834
|
isEthereumWalletAdapter,
|
@@ -368681,8 +368841,10 @@ export {
|
|
368681
368841
|
lamportToTokenAmount,
|
368682
368842
|
mARIOToTokenAmount,
|
368683
368843
|
privateKeyFromKyveMnemonic,
|
368844
|
+
readableStreamToAsyncIterable,
|
368684
368845
|
signerFromKyveMnemonic,
|
368685
368846
|
signerFromKyvePrivateKey2 as signerFromKyvePrivateKey,
|
368847
|
+
streamSignerReadableStream,
|
368686
368848
|
tokenToBaseMap,
|
368687
368849
|
tokenTypes,
|
368688
368850
|
ukyveToTokenAmount,
|
package/lib/cjs/common/events.js
CHANGED
@@ -46,7 +46,7 @@ function createReadableStreamWithEvents({ data, dataSize, emitter, eventNamesMap
|
|
46
46
|
? data
|
47
47
|
: new ReadableStream({
|
48
48
|
start: (controller) => {
|
49
|
-
controller.enqueue(data);
|
49
|
+
controller.enqueue(new Uint8Array(data.buffer, data.byteOffset, data.byteLength));
|
50
50
|
controller.close();
|
51
51
|
},
|
52
52
|
});
|
@@ -64,12 +64,12 @@ function createReadableStreamWithEvents({ data, dataSize, emitter, eventNamesMap
|
|
64
64
|
controller.close();
|
65
65
|
return;
|
66
66
|
}
|
67
|
-
processedBytes += value.
|
67
|
+
processedBytes += value.byteLength;
|
68
68
|
emitter.emit(eventNamesMap['on-progress'], {
|
69
69
|
processedBytes,
|
70
70
|
totalBytes: dataSize,
|
71
71
|
});
|
72
|
-
controller.enqueue(value);
|
72
|
+
controller.enqueue(new Uint8Array(value.buffer, value.byteOffset, value.byteLength));
|
73
73
|
}
|
74
74
|
catch (error) {
|
75
75
|
emitter.emit(eventNamesMap['on-error'], error);
|
@@ -125,7 +125,7 @@ function createReadableWithEvents({ data, dataSize, emitter, eventNamesMap, }) {
|
|
125
125
|
let processedBytes = 0;
|
126
126
|
existingStream.on('data', (chunk) => {
|
127
127
|
eventingStream.write(chunk);
|
128
|
-
processedBytes += chunk.
|
128
|
+
processedBytes += chunk.byteLength;
|
129
129
|
emitter.emit(eventNamesMap['on-progress'], {
|
130
130
|
processedBytes,
|
131
131
|
totalBytes: dataSize,
|
package/lib/cjs/common/upload.js
CHANGED
@@ -44,19 +44,19 @@ class TurboUnauthenticatedUploadService {
|
|
44
44
|
this.retryConfig = retryConfig;
|
45
45
|
}
|
46
46
|
async uploadSignedDataItem({ dataItemStreamFactory, dataItemSizeFactory, dataItemOpts, signal, events = {}, }) {
|
47
|
-
const
|
47
|
+
const dataItemSize = dataItemSizeFactory();
|
48
48
|
this.logger.debug('Uploading signed data item...');
|
49
49
|
// create the tapped stream with events
|
50
50
|
const emitter = new events_js_1.TurboEventEmitter(events);
|
51
51
|
// create the stream with upload events
|
52
52
|
const { stream: streamWithUploadEvents, resume } = (0, events_js_1.createStreamWithUploadEvents)({
|
53
53
|
data: dataItemStreamFactory(),
|
54
|
-
dataSize:
|
54
|
+
dataSize: dataItemSize,
|
55
55
|
emitter,
|
56
56
|
});
|
57
57
|
const headers = {
|
58
58
|
'content-type': 'application/octet-stream',
|
59
|
-
'content-length': `${
|
59
|
+
'content-length': `${dataItemSize}`,
|
60
60
|
};
|
61
61
|
if (dataItemOpts !== undefined && dataItemOpts.paidBy !== undefined) {
|
62
62
|
const paidBy = Array.isArray(dataItemOpts.paidBy)
|
@@ -140,11 +140,10 @@ class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUploadServ
|
|
140
140
|
if (signal?.aborted) {
|
141
141
|
throw new axios_1.CanceledError();
|
142
142
|
}
|
143
|
+
// Now that we have the signed data item, we can upload it using the uploadSignedDataItem method
|
144
|
+
// which will create a new emitter with upload events. We await
|
145
|
+
// this result due to the wrapped retry logic of this method.
|
143
146
|
try {
|
144
|
-
this.logger.debug('Uploading signed data item...');
|
145
|
-
// Now that we have the signed data item, we can upload it using the uploadSignedDataItem method
|
146
|
-
// which will create a new emitter with upload events. We await
|
147
|
-
// this result due to the wrapped retry logic of this method.
|
148
147
|
const response = await this.uploadSignedDataItem({
|
149
148
|
dataItemStreamFactory,
|
150
149
|
dataItemSizeFactory,
|
@@ -250,6 +249,7 @@ class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUploadServ
|
|
250
249
|
};
|
251
250
|
try {
|
252
251
|
const result = await this.uploadFile({
|
252
|
+
// TODO: can fix this type by passing a class generic and specifying in the node/web abstracts which stream type to use
|
253
253
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
254
254
|
fileStreamFactory: () => this.getFileStreamForFile(file),
|
255
255
|
fileSizeFactory: () => this.getFileSize(file),
|
@@ -294,6 +294,7 @@ class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUploadServ
|
|
294
294
|
];
|
295
295
|
const manifestBuffer = Buffer.from(JSON.stringify(manifest));
|
296
296
|
const manifestResponse = await this.uploadFile({
|
297
|
+
// TODO: can fix this type by passing a class generic and specifying in the node/web abstracts which stream type to use
|
297
298
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
298
299
|
fileStreamFactory: () => this.createManifestStream(manifestBuffer),
|
299
300
|
fileSizeFactory: () => manifestBuffer.byteLength,
|
package/lib/cjs/node/signer.js
CHANGED
@@ -53,7 +53,6 @@ class TurboNodeSigner extends signer_js_1.TurboDataItemAbstractSigner {
|
|
53
53
|
resume();
|
54
54
|
const signedDataItem = await signedDataItemPromise;
|
55
55
|
this.logger.debug('Successfully signed data item...');
|
56
|
-
// TODO: support target, anchor, and tags
|
57
56
|
const signedDataItemSize = this.calculateSignedDataHeadersSize({
|
58
57
|
dataSize: fileSizeFactory(),
|
59
58
|
dataItemOpts,
|