@shelby-protocol/cli 0.0.19 → 0.0.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/entry.js +1320 -1295
- package/package.json +2 -2
package/bin/entry.js
CHANGED
|
@@ -505,7 +505,7 @@ import { Command } from "@commander-js/extra-typings";
|
|
|
505
505
|
|
|
506
506
|
// package.json
|
|
507
507
|
var name = "@shelby-protocol/cli";
|
|
508
|
-
var version = "0.0.
|
|
508
|
+
var version = "0.0.20";
|
|
509
509
|
|
|
510
510
|
// src/commands/account.tsx
|
|
511
511
|
import readline from "readline";
|
|
@@ -569,6 +569,93 @@ import {
|
|
|
569
569
|
Hex
|
|
570
570
|
} from "@aptos-labs/ts-sdk";
|
|
571
571
|
|
|
572
|
+
// ../../packages/sdk/dist/chunk-4MG4XGY4.mjs
|
|
573
|
+
import {
|
|
574
|
+
AccountAuthenticator,
|
|
575
|
+
Deserializer,
|
|
576
|
+
Hex as Hex2,
|
|
577
|
+
MultiAgentTransaction,
|
|
578
|
+
Serializer
|
|
579
|
+
} from "@aptos-labs/ts-sdk";
|
|
580
|
+
var StaleChannelStateError = class _StaleChannelStateError extends Error {
|
|
581
|
+
/**
|
|
582
|
+
* The last valid micropayment stored by the server.
|
|
583
|
+
* Clients can use this to reset their local channel state.
|
|
584
|
+
*/
|
|
585
|
+
storedMicropayment;
|
|
586
|
+
constructor(storedMicropayment, message) {
|
|
587
|
+
super(
|
|
588
|
+
message ?? "Client has stale channel state. Use the returned micropayment to reset local state."
|
|
589
|
+
);
|
|
590
|
+
this.name = "StaleChannelStateError";
|
|
591
|
+
this.storedMicropayment = storedMicropayment;
|
|
592
|
+
}
|
|
593
|
+
/**
|
|
594
|
+
* Returns the stored micropayment as a base64-encoded string.
|
|
595
|
+
*/
|
|
596
|
+
toBase64() {
|
|
597
|
+
const bytes = this.storedMicropayment.bcsToBytes();
|
|
598
|
+
const binaryString = Array.from(
|
|
599
|
+
bytes,
|
|
600
|
+
(byte) => String.fromCharCode(byte)
|
|
601
|
+
).join("");
|
|
602
|
+
return btoa(binaryString);
|
|
603
|
+
}
|
|
604
|
+
/**
|
|
605
|
+
* Creates a StaleChannelStateError from a base64-encoded micropayment string.
|
|
606
|
+
*/
|
|
607
|
+
static fromBase64(base64, message) {
|
|
608
|
+
const binaryString = atob(base64);
|
|
609
|
+
const bytes = Uint8Array.from(binaryString, (char) => char.charCodeAt(0));
|
|
610
|
+
const micropayment = SenderBuiltMicropayment.deserialize(bytes);
|
|
611
|
+
return new _StaleChannelStateError(micropayment, message);
|
|
612
|
+
}
|
|
613
|
+
};
|
|
614
|
+
var SenderBuiltMicropayment = class _SenderBuiltMicropayment {
|
|
615
|
+
/**
|
|
616
|
+
* The actual micropayment transaction. It is built with the receiver address as fee payer and also requires a signature from the receiver to submit.
|
|
617
|
+
*/
|
|
618
|
+
micropayment;
|
|
619
|
+
/**
|
|
620
|
+
* The sender's signature.
|
|
621
|
+
*/
|
|
622
|
+
senderSignature;
|
|
623
|
+
constructor(micropayment, senderSignature) {
|
|
624
|
+
this.micropayment = micropayment;
|
|
625
|
+
this.senderSignature = senderSignature;
|
|
626
|
+
}
|
|
627
|
+
serialize(serializer) {
|
|
628
|
+
this.micropayment.serialize(serializer);
|
|
629
|
+
this.senderSignature.serialize(serializer);
|
|
630
|
+
}
|
|
631
|
+
bcsToBytes() {
|
|
632
|
+
const serializer = new Serializer();
|
|
633
|
+
this.serialize(serializer);
|
|
634
|
+
return serializer.toUint8Array();
|
|
635
|
+
}
|
|
636
|
+
bcsToHex() {
|
|
637
|
+
return Hex2.fromHexInput(this.bcsToBytes());
|
|
638
|
+
}
|
|
639
|
+
toStringWithoutPrefix() {
|
|
640
|
+
return this.bcsToHex().toStringWithoutPrefix();
|
|
641
|
+
}
|
|
642
|
+
toString() {
|
|
643
|
+
return this.bcsToHex().toString();
|
|
644
|
+
}
|
|
645
|
+
/**
|
|
646
|
+
* Deserializes a SenderBuiltMicropayment from BCS bytes.
|
|
647
|
+
* @param bytes - The bytes to deserialize from (Uint8Array or hex string).
|
|
648
|
+
* @returns A new SenderBuiltMicropayment instance.
|
|
649
|
+
*/
|
|
650
|
+
static deserialize(bytes) {
|
|
651
|
+
const bytesArray = typeof bytes === "string" ? Hex2.fromHexInput(bytes).toUint8Array() : bytes;
|
|
652
|
+
const deserializer = new Deserializer(bytesArray);
|
|
653
|
+
const micropayment = MultiAgentTransaction.deserialize(deserializer);
|
|
654
|
+
const senderSignature = AccountAuthenticator.deserialize(deserializer);
|
|
655
|
+
return new _SenderBuiltMicropayment(micropayment, senderSignature);
|
|
656
|
+
}
|
|
657
|
+
};
|
|
658
|
+
|
|
572
659
|
// ../../node_modules/.pnpm/tslib@2.8.1/node_modules/tslib/tslib.es6.mjs
|
|
573
660
|
var __assign = function() {
|
|
574
661
|
__assign = Object.assign || function __assign2(t) {
|
|
@@ -4388,14 +4475,96 @@ function getShelbyIndexerClient(config) {
|
|
|
4388
4475
|
});
|
|
4389
4476
|
}
|
|
4390
4477
|
|
|
4391
|
-
// ../../packages/sdk/dist/chunk-
|
|
4392
|
-
|
|
4393
|
-
|
|
4394
|
-
|
|
4478
|
+
// ../../packages/sdk/dist/chunk-67F5YZ25.mjs
|
|
4479
|
+
var CHUNK_SIZE_PARAMS = {
|
|
4480
|
+
[
|
|
4481
|
+
"ChunkSet10MiB_Chunk1MiB"
|
|
4482
|
+
/* ChunkSet10MiB_Chunk1MiB */
|
|
4483
|
+
]: {
|
|
4484
|
+
// 1MiB
|
|
4485
|
+
chunkSizeBytes: 1 * 1024 * 1024,
|
|
4486
|
+
// 10MiB
|
|
4487
|
+
chunksetSizeBytes: 10 * 1024 * 1024
|
|
4488
|
+
}
|
|
4489
|
+
};
|
|
4490
|
+
var DEFAULT_CHUNK_SIZE_BYTES = CHUNK_SIZE_PARAMS[
|
|
4491
|
+
"ChunkSet10MiB_Chunk1MiB"
|
|
4492
|
+
/* ChunkSet10MiB_Chunk1MiB */
|
|
4493
|
+
].chunkSizeBytes;
|
|
4494
|
+
var DEFAULT_CHUNKSET_SIZE_BYTES = CHUNK_SIZE_PARAMS[
|
|
4495
|
+
"ChunkSet10MiB_Chunk1MiB"
|
|
4496
|
+
/* ChunkSet10MiB_Chunk1MiB */
|
|
4497
|
+
].chunksetSizeBytes;
|
|
4498
|
+
var ERASURE_CODE_AND_CHUNK_MAPPING = {
|
|
4499
|
+
[
|
|
4500
|
+
"ClayCode_16Total_10Data_13Helper"
|
|
4501
|
+
/* ClayCode_16Total_10Data_13Helper */
|
|
4502
|
+
]: {
|
|
4503
|
+
...CHUNK_SIZE_PARAMS.ChunkSet10MiB_Chunk1MiB
|
|
4504
|
+
}
|
|
4505
|
+
};
|
|
4506
|
+
|
|
4507
|
+
// ../../packages/sdk/dist/chunk-LZSIZJYR.mjs
|
|
4508
|
+
var ERASURE_CODE_PARAMS = {
|
|
4509
|
+
[
|
|
4510
|
+
"ClayCode_16Total_10Data_13Helper"
|
|
4511
|
+
/* ClayCode_16Total_10Data_13Helper */
|
|
4512
|
+
]: {
|
|
4513
|
+
// total chunks (data + parity)
|
|
4514
|
+
erasure_n: 16,
|
|
4515
|
+
// data chunks
|
|
4516
|
+
erasure_k: 10,
|
|
4517
|
+
// helper nodes
|
|
4518
|
+
erasure_d: 13
|
|
4519
|
+
}
|
|
4395
4520
|
};
|
|
4521
|
+
var DEFAULT_ERASURE_N = ERASURE_CODE_PARAMS[
|
|
4522
|
+
"ClayCode_16Total_10Data_13Helper"
|
|
4523
|
+
/* ClayCode_16Total_10Data_13Helper */
|
|
4524
|
+
].erasure_n;
|
|
4525
|
+
var DEFAULT_ERASURE_K = ERASURE_CODE_PARAMS[
|
|
4526
|
+
"ClayCode_16Total_10Data_13Helper"
|
|
4527
|
+
/* ClayCode_16Total_10Data_13Helper */
|
|
4528
|
+
].erasure_k;
|
|
4529
|
+
var DEFAULT_ERASURE_D = ERASURE_CODE_PARAMS[
|
|
4530
|
+
"ClayCode_16Total_10Data_13Helper"
|
|
4531
|
+
/* ClayCode_16Total_10Data_13Helper */
|
|
4532
|
+
].erasure_d;
|
|
4533
|
+
var DEFAULT_ERASURE_M = DEFAULT_ERASURE_N - DEFAULT_ERASURE_K;
|
|
4534
|
+
|
|
4535
|
+
// ../../packages/sdk/dist/chunk-W5NRGZEP.mjs
|
|
4536
|
+
import { AccountAddress as AccountAddress2 } from "@aptos-labs/ts-sdk";
|
|
4537
|
+
import { z } from "zod";
|
|
4538
|
+
var BlobNameSchema = z.string().min(1, "Blob name path parameter cannot be empty.").max(1024, "Blob name cannot exceed 1024 characters.").refine((name2) => !name2.endsWith("/"), {
|
|
4539
|
+
message: "Blob name cannot end with a slash"
|
|
4540
|
+
});
|
|
4541
|
+
|
|
4542
|
+
// ../../packages/sdk/dist/chunk-I6NG5GNL.mjs
|
|
4543
|
+
function sleep(ms) {
|
|
4544
|
+
return new Promise((resolve3) => setTimeout(resolve3, ms));
|
|
4545
|
+
}
|
|
4546
|
+
|
|
4547
|
+
// ../../packages/sdk/dist/chunk-IE6LYVIA.mjs
|
|
4548
|
+
import { z as z2 } from "zod";
|
|
4549
|
+
var StartMultipartUploadResponseSchema = z2.object({
|
|
4550
|
+
uploadId: z2.string()
|
|
4551
|
+
});
|
|
4552
|
+
var UploadPartResponseSchema = z2.object({
|
|
4553
|
+
success: z2.literal(true)
|
|
4554
|
+
});
|
|
4555
|
+
var CompleteMultipartUploadResponseSchema = z2.object({
|
|
4556
|
+
success: z2.literal(true)
|
|
4557
|
+
});
|
|
4558
|
+
var RPCErrorResponseSchema = z2.object({
|
|
4559
|
+
error: z2.string()
|
|
4560
|
+
});
|
|
4561
|
+
var StaleMicropaymentErrorResponseSchema = z2.object({
|
|
4562
|
+
error: z2.string().optional(),
|
|
4563
|
+
storedMicropayment: z2.string().optional()
|
|
4564
|
+
});
|
|
4396
4565
|
|
|
4397
4566
|
// ../../packages/sdk/dist/chunk-4JZO2D7T.mjs
|
|
4398
|
-
import { Hex as
|
|
4567
|
+
import { Hex as Hex3 } from "@aptos-labs/ts-sdk";
|
|
4399
4568
|
async function* readInChunks(input, chunkSize) {
|
|
4400
4569
|
let idx = 0;
|
|
4401
4570
|
if (isReadableStream(input)) {
|
|
@@ -4455,7 +4624,7 @@ function zeroPadBytes(buffer, desiredLength) {
|
|
|
4455
4624
|
return paddedBuffer;
|
|
4456
4625
|
}
|
|
4457
4626
|
async function concatHashes(parts) {
|
|
4458
|
-
const chunks = parts.map((part) =>
|
|
4627
|
+
const chunks = parts.map((part) => Hex3.fromHexInput(part).toUint8Array());
|
|
4459
4628
|
const totalLength = chunks.reduce((sum, chunk) => sum + chunk.byteLength, 0);
|
|
4460
4629
|
const combined = new Uint8Array(totalLength);
|
|
4461
4630
|
let offset = 0;
|
|
@@ -4463,7 +4632,7 @@ async function concatHashes(parts) {
|
|
|
4463
4632
|
combined.set(chunk, offset);
|
|
4464
4633
|
offset += chunk.byteLength;
|
|
4465
4634
|
}
|
|
4466
|
-
return
|
|
4635
|
+
return Hex3.fromHexInput(
|
|
4467
4636
|
new Uint8Array(await crypto.subtle.digest("SHA-256", combined))
|
|
4468
4637
|
);
|
|
4469
4638
|
}
|
|
@@ -4484,496 +4653,759 @@ function getBlobNameSuffix(blobName) {
|
|
|
4484
4653
|
return parts.slice(1).join("/") || "";
|
|
4485
4654
|
}
|
|
4486
4655
|
|
|
4487
|
-
// ../../packages/sdk/dist/chunk-
|
|
4488
|
-
|
|
4489
|
-
|
|
4490
|
-
|
|
4491
|
-
|
|
4492
|
-
|
|
4493
|
-
|
|
4494
|
-
|
|
4495
|
-
|
|
4496
|
-
|
|
4497
|
-
|
|
4498
|
-
|
|
4499
|
-
|
|
4500
|
-
|
|
4501
|
-
|
|
4502
|
-
|
|
4503
|
-
|
|
4504
|
-
|
|
4505
|
-
|
|
4506
|
-
|
|
4507
|
-
|
|
4508
|
-
|
|
4509
|
-
|
|
4510
|
-
|
|
4511
|
-
|
|
4512
|
-
|
|
4656
|
+
// ../../packages/sdk/dist/chunk-PUYAZBYR.mjs
|
|
4657
|
+
import { AccountAddress as AccountAddress3 } from "@aptos-labs/ts-sdk";
|
|
4658
|
+
var MICROPAYMENT_HEADER = "X-Shelby-Micropayment";
|
|
4659
|
+
function encodeURIComponentKeepSlashes(str) {
|
|
4660
|
+
return encodeURIComponent(str).replace(/%2F/g, "/");
|
|
4661
|
+
}
|
|
4662
|
+
var ShelbyRPCClient = class {
|
|
4663
|
+
baseUrl;
|
|
4664
|
+
apiKey;
|
|
4665
|
+
rpcConfig;
|
|
4666
|
+
indexer;
|
|
4667
|
+
/**
|
|
4668
|
+
* Creates a new ShelbyRPCClient for interacting with Shelby RPC nodes.
|
|
4669
|
+
* This client handles blob storage operations including upload and download.
|
|
4670
|
+
*
|
|
4671
|
+
* @param config - The client configuration object.
|
|
4672
|
+
* @param config.network - The Shelby network to use.
|
|
4673
|
+
*
|
|
4674
|
+
* @example
|
|
4675
|
+
* ```typescript
|
|
4676
|
+
* const client = new ShelbyRPCClient({
|
|
4677
|
+
* network: Network.SHELBYNET,
|
|
4678
|
+
* apiKey: "AG-***",
|
|
4679
|
+
* });
|
|
4680
|
+
* ```
|
|
4681
|
+
*/
|
|
4682
|
+
constructor(config) {
|
|
4683
|
+
this.baseUrl = config.rpc?.baseUrl ?? NetworkToShelbyRPCBaseUrl.shelbynet;
|
|
4684
|
+
this.apiKey = config.apiKey ?? config.rpc?.apiKey;
|
|
4685
|
+
this.rpcConfig = config.rpc ?? {};
|
|
4686
|
+
this.indexer = getShelbyIndexerClient(config);
|
|
4513
4687
|
}
|
|
4514
|
-
|
|
4515
|
-
|
|
4516
|
-
|
|
4517
|
-
|
|
4518
|
-
|
|
4519
|
-
|
|
4520
|
-
|
|
4521
|
-
|
|
4522
|
-
|
|
4523
|
-
|
|
4524
|
-
|
|
4525
|
-
|
|
4526
|
-
|
|
4527
|
-
|
|
4688
|
+
async #uploadPart(uploadId, partIdx, partData) {
|
|
4689
|
+
const nRetries = 5;
|
|
4690
|
+
for (let i = 0; i < nRetries; ++i) {
|
|
4691
|
+
const partResponse = await fetch(
|
|
4692
|
+
buildRequestUrl(
|
|
4693
|
+
`/v1/multipart-uploads/${uploadId}/parts/${partIdx}`,
|
|
4694
|
+
this.baseUrl
|
|
4695
|
+
),
|
|
4696
|
+
{
|
|
4697
|
+
method: "PUT",
|
|
4698
|
+
headers: {
|
|
4699
|
+
"Content-Type": "application/octet-stream",
|
|
4700
|
+
...this.apiKey ? { Authorization: `Bearer ${this.apiKey}` } : {}
|
|
4701
|
+
},
|
|
4702
|
+
body: partData
|
|
4703
|
+
}
|
|
4704
|
+
);
|
|
4705
|
+
if (partResponse.ok) return;
|
|
4706
|
+
if (i < nRetries - 1) {
|
|
4707
|
+
const delay = 2 ** i * 100;
|
|
4708
|
+
await sleep(delay);
|
|
4709
|
+
}
|
|
4710
|
+
}
|
|
4711
|
+
throw new Error(`Failed to upload part ${partIdx}.`);
|
|
4528
4712
|
}
|
|
4529
|
-
|
|
4530
|
-
|
|
4531
|
-
|
|
4532
|
-
|
|
4533
|
-
|
|
4534
|
-
|
|
4535
|
-
|
|
4536
|
-
|
|
4537
|
-
|
|
4538
|
-
|
|
4539
|
-
|
|
4540
|
-
|
|
4541
|
-
|
|
4542
|
-
|
|
4543
|
-
|
|
4544
|
-
// ../../packages/sdk/dist/chunk-RLRI2533.mjs
|
|
4545
|
-
import { Hex as Hex3 } from "@aptos-labs/ts-sdk";
|
|
4546
|
-
import { z } from "zod";
|
|
4547
|
-
var COMMITMENT_SCHEMA_VERSION = "1.3";
|
|
4548
|
-
var ChunksetCommitmentSchema = z.object({
|
|
4549
|
-
// Chunkset root (vector commitment of child chunks)
|
|
4550
|
-
chunkset_root: z.string(),
|
|
4551
|
-
// the size is known statically from the current configuration
|
|
4552
|
-
chunk_commitments: z.array(z.string())
|
|
4553
|
-
}).refine(
|
|
4554
|
-
(data) => {
|
|
4555
|
-
return data.chunk_commitments.length === DEFAULT_ERASURE_K + DEFAULT_ERASURE_M;
|
|
4556
|
-
},
|
|
4557
|
-
{
|
|
4558
|
-
message: `Chunkset must have exactly ${DEFAULT_ERASURE_K + DEFAULT_ERASURE_M} chunks (ERASURE_K + ERASURE_M = ${DEFAULT_ERASURE_K} + ${DEFAULT_ERASURE_M})`,
|
|
4559
|
-
path: ["chunk_commitments"]
|
|
4560
|
-
}
|
|
4561
|
-
);
|
|
4562
|
-
function expectedTotalChunksets(rawSize, chunksetSize = DEFAULT_CHUNKSET_SIZE_BYTES) {
|
|
4563
|
-
if (chunksetSize <= 0) {
|
|
4564
|
-
throw new Error("chunksetSize must be positive");
|
|
4565
|
-
}
|
|
4566
|
-
if (rawSize === 0) return 1;
|
|
4567
|
-
return Math.ceil(rawSize / chunksetSize);
|
|
4568
|
-
}
|
|
4569
|
-
var BlobCommitmentsSchema = z.object({
|
|
4570
|
-
schema_version: z.string(),
|
|
4571
|
-
raw_data_size: z.number(),
|
|
4572
|
-
// FIXME I am not sure about this being here, or if it should be somewhere else
|
|
4573
|
-
blob_merkle_root: z.string(),
|
|
4574
|
-
chunkset_commitments: z.array(ChunksetCommitmentSchema)
|
|
4575
|
-
}).refine(
|
|
4576
|
-
(data) => {
|
|
4577
|
-
return expectedTotalChunksets(data.raw_data_size) === data.chunkset_commitments.length;
|
|
4578
|
-
},
|
|
4579
|
-
{
|
|
4580
|
-
message: "Total chunkset count mismatches with raw data size",
|
|
4581
|
-
// FIXME put more details in here
|
|
4582
|
-
path: ["chunkset_commitments"]
|
|
4583
|
-
}
|
|
4584
|
-
);
|
|
4585
|
-
async function generateMerkleRoot(leafHashes) {
|
|
4586
|
-
if (!leafHashes.length) {
|
|
4587
|
-
throw new Error(
|
|
4588
|
-
"An empty array cannot be used to construct a Merkle tree."
|
|
4713
|
+
async #putBlobMultipart(account, blobName, blobData, partSize = 5 * 1024 * 1024, onProgress) {
|
|
4714
|
+
const startResponse = await fetch(
|
|
4715
|
+
buildRequestUrl("/v1/multipart-uploads", this.baseUrl),
|
|
4716
|
+
{
|
|
4717
|
+
method: "POST",
|
|
4718
|
+
headers: {
|
|
4719
|
+
"Content-Type": "application/json",
|
|
4720
|
+
...this.apiKey ? { Authorization: `Bearer ${this.apiKey}` } : {}
|
|
4721
|
+
},
|
|
4722
|
+
body: JSON.stringify({
|
|
4723
|
+
rawAccount: account.toString(),
|
|
4724
|
+
rawBlobName: blobName,
|
|
4725
|
+
rawPartSize: partSize
|
|
4726
|
+
})
|
|
4727
|
+
}
|
|
4589
4728
|
);
|
|
4590
|
-
|
|
4591
|
-
|
|
4592
|
-
|
|
4593
|
-
|
|
4594
|
-
|
|
4595
|
-
|
|
4596
|
-
|
|
4597
|
-
|
|
4598
|
-
const nextLeaves = [];
|
|
4599
|
-
for (let i = 0; i < currentLeaves.length; i += 2) {
|
|
4600
|
-
nextLeaves.push(
|
|
4601
|
-
await concatHashes([
|
|
4602
|
-
currentLeaves[i].toUint8Array(),
|
|
4603
|
-
currentLeaves[i + 1].toUint8Array()
|
|
4604
|
-
])
|
|
4729
|
+
if (!startResponse.ok) {
|
|
4730
|
+
let errorBodyText = "Could not read error body";
|
|
4731
|
+
try {
|
|
4732
|
+
errorBodyText = await startResponse.text();
|
|
4733
|
+
} catch (_e) {
|
|
4734
|
+
}
|
|
4735
|
+
throw new Error(
|
|
4736
|
+
`Failed to start multipart upload! status: ${startResponse.status}, body: ${errorBodyText}`
|
|
4605
4737
|
);
|
|
4606
4738
|
}
|
|
4607
|
-
|
|
4608
|
-
|
|
4609
|
-
return currentLeaves[0];
|
|
4610
|
-
}
|
|
4611
|
-
async function generateChunksetCommitments(shouldPad, chunksetIdx, chunksetData, expectedChunksetSize, provider, onChunk) {
|
|
4612
|
-
const { erasure_n } = provider.config;
|
|
4613
|
-
const chunksetPayload = shouldPad ? zeroPadBytes(chunksetData, expectedChunksetSize) : validatePrePaddedChunkset(
|
|
4614
|
-
chunksetData,
|
|
4615
|
-
expectedChunksetSize,
|
|
4616
|
-
chunksetIdx
|
|
4617
|
-
);
|
|
4618
|
-
const { chunks } = provider.encode(chunksetPayload);
|
|
4619
|
-
if (chunks.length !== erasure_n) {
|
|
4620
|
-
throw new Error(
|
|
4621
|
-
`Erasure provider produced ${chunks.length} chunks, expected ${erasure_n}.`
|
|
4739
|
+
const { uploadId } = StartMultipartUploadResponseSchema.parse(
|
|
4740
|
+
await startResponse.json()
|
|
4622
4741
|
);
|
|
4623
|
-
|
|
4624
|
-
|
|
4625
|
-
|
|
4626
|
-
|
|
4627
|
-
|
|
4628
|
-
await
|
|
4742
|
+
const totalParts = Math.ceil(blobData.length / partSize);
|
|
4743
|
+
for (let partIdx = 0; partIdx < totalParts; partIdx++) {
|
|
4744
|
+
const start = partIdx * partSize;
|
|
4745
|
+
const end = Math.min(start + partSize, blobData.length);
|
|
4746
|
+
const partData = blobData.slice(start, end);
|
|
4747
|
+
await this.#uploadPart(uploadId, partIdx, partData);
|
|
4748
|
+
onProgress?.({
|
|
4749
|
+
partIdx,
|
|
4750
|
+
totalParts,
|
|
4751
|
+
partBytes: partData.length,
|
|
4752
|
+
uploadedBytes: end,
|
|
4753
|
+
totalBytes: blobData.length
|
|
4754
|
+
});
|
|
4629
4755
|
}
|
|
4630
|
-
|
|
4631
|
-
|
|
4632
|
-
|
|
4633
|
-
|
|
4634
|
-
|
|
4635
|
-
|
|
4636
|
-
|
|
4637
|
-
|
|
4638
|
-
|
|
4639
|
-
|
|
4640
|
-
|
|
4641
|
-
|
|
4642
|
-
}
|
|
4643
|
-
async function generateCommitments(provider, fullData, onChunk, options) {
|
|
4644
|
-
const expectedChunksetSize = DEFAULT_CHUNKSET_SIZE_BYTES;
|
|
4645
|
-
const shouldPad = options?.pad ?? true;
|
|
4646
|
-
const chunksetCommitments = [];
|
|
4647
|
-
const chunksetCommitmentHashes = [];
|
|
4648
|
-
let rawDataSize = 0;
|
|
4649
|
-
const chunksetGen = readInChunks(fullData, expectedChunksetSize);
|
|
4650
|
-
for await (const [chunksetIdx, chunksetData] of chunksetGen) {
|
|
4651
|
-
rawDataSize += chunksetData.length;
|
|
4652
|
-
const { h, entry } = await generateChunksetCommitments(
|
|
4653
|
-
shouldPad,
|
|
4654
|
-
chunksetIdx,
|
|
4655
|
-
chunksetData,
|
|
4656
|
-
expectedChunksetSize,
|
|
4657
|
-
provider,
|
|
4658
|
-
onChunk
|
|
4659
|
-
);
|
|
4660
|
-
chunksetCommitments.push(entry);
|
|
4661
|
-
chunksetCommitmentHashes.push(h);
|
|
4662
|
-
}
|
|
4663
|
-
if (rawDataSize === 0) {
|
|
4664
|
-
const zeroChunkset = new Uint8Array(expectedChunksetSize);
|
|
4665
|
-
const { h, entry } = await generateChunksetCommitments(
|
|
4666
|
-
shouldPad,
|
|
4667
|
-
0,
|
|
4668
|
-
zeroChunkset,
|
|
4669
|
-
expectedChunksetSize,
|
|
4670
|
-
provider,
|
|
4671
|
-
onChunk
|
|
4672
|
-
);
|
|
4673
|
-
chunksetCommitments.push(entry);
|
|
4674
|
-
chunksetCommitmentHashes.push(h);
|
|
4675
|
-
}
|
|
4676
|
-
return {
|
|
4677
|
-
schema_version: COMMITMENT_SCHEMA_VERSION,
|
|
4678
|
-
raw_data_size: rawDataSize,
|
|
4679
|
-
blob_merkle_root: (await generateMerkleRoot(chunksetCommitmentHashes)).toString(),
|
|
4680
|
-
chunkset_commitments: chunksetCommitments
|
|
4681
|
-
};
|
|
4682
|
-
}
|
|
4683
|
-
function validatePrePaddedChunkset(chunkset, expectedSize, chunksetIdx) {
|
|
4684
|
-
if (chunkset.byteLength !== expectedSize) {
|
|
4685
|
-
throw new Error(
|
|
4686
|
-
`Chunkset ${chunksetIdx} has size ${chunkset.byteLength} bytes but expected ${expectedSize} bytes. Enable padding or supply pre-padded data before calling generateCommitments.`
|
|
4756
|
+
const completeResponse = await fetch(
|
|
4757
|
+
buildRequestUrl(
|
|
4758
|
+
`/v1/multipart-uploads/${uploadId}/complete`,
|
|
4759
|
+
this.baseUrl
|
|
4760
|
+
),
|
|
4761
|
+
{
|
|
4762
|
+
method: "POST",
|
|
4763
|
+
headers: {
|
|
4764
|
+
"Content-Type": "application/json",
|
|
4765
|
+
...this.apiKey ? { Authorization: `Bearer ${this.apiKey}` } : {}
|
|
4766
|
+
}
|
|
4767
|
+
}
|
|
4687
4768
|
);
|
|
4769
|
+
if (!completeResponse.ok) {
|
|
4770
|
+
let errorBodyText = "Could not read error body";
|
|
4771
|
+
try {
|
|
4772
|
+
errorBodyText = await completeResponse.text();
|
|
4773
|
+
} catch (_e) {
|
|
4774
|
+
}
|
|
4775
|
+
throw new Error(
|
|
4776
|
+
`Failed to complete multipart upload! status: ${completeResponse.status}, body: ${errorBodyText}`
|
|
4777
|
+
);
|
|
4778
|
+
}
|
|
4688
4779
|
}
|
|
4689
|
-
return chunkset;
|
|
4690
|
-
}
|
|
4691
|
-
|
|
4692
|
-
// ../../packages/sdk/dist/chunk-JWVMZ2Y7.mjs
|
|
4693
|
-
import {
|
|
4694
|
-
AccountAddress as AccountAddress3,
|
|
4695
|
-
Aptos as Aptos2,
|
|
4696
|
-
AptosConfig as AptosConfig2,
|
|
4697
|
-
Hex as Hex4,
|
|
4698
|
-
MoveVector,
|
|
4699
|
-
U32
|
|
4700
|
-
} from "@aptos-labs/ts-sdk";
|
|
4701
|
-
var ShelbyBlobClient = class _ShelbyBlobClient {
|
|
4702
|
-
aptos;
|
|
4703
|
-
deployer;
|
|
4704
|
-
indexer;
|
|
4705
4780
|
/**
|
|
4706
|
-
*
|
|
4707
|
-
*
|
|
4781
|
+
* Uploads blob data to the Shelby RPC node for storage by storage providers.
|
|
4782
|
+
* This method should be called after blob commitments have been registered on the blockchain.
|
|
4783
|
+
* Uses multipart upload for efficient handling of large files.
|
|
4708
4784
|
*
|
|
4709
|
-
* @param
|
|
4710
|
-
* @param
|
|
4785
|
+
* @param params.account - The account that owns the blob.
|
|
4786
|
+
* @param params.blobName - The name/path of the blob (e.g. "folder/file.txt").
|
|
4787
|
+
* @param params.blobData - The raw blob data as a Uint8Array.
|
|
4711
4788
|
*
|
|
4712
4789
|
* @example
|
|
4713
4790
|
* ```typescript
|
|
4714
|
-
* const
|
|
4715
|
-
*
|
|
4716
|
-
*
|
|
4717
|
-
*
|
|
4718
|
-
*
|
|
4719
|
-
*
|
|
4720
|
-
* },
|
|
4791
|
+
* const blobData = new TextEncoder().encode("Hello, world!");
|
|
4792
|
+
*
|
|
4793
|
+
* await client.putBlob({
|
|
4794
|
+
* account: AccountAddress.from("0x1"),
|
|
4795
|
+
* blobName: "greetings/hello.txt",
|
|
4796
|
+
* blobData
|
|
4721
4797
|
* });
|
|
4722
4798
|
* ```
|
|
4723
4799
|
*/
|
|
4724
|
-
|
|
4725
|
-
|
|
4726
|
-
|
|
4727
|
-
|
|
4728
|
-
|
|
4729
|
-
|
|
4730
|
-
|
|
4731
|
-
|
|
4732
|
-
|
|
4733
|
-
});
|
|
4734
|
-
this.aptos = new Aptos2(aptosConfig);
|
|
4735
|
-
this.deployer = config.deployer ?? AccountAddress3.fromString(SHELBY_DEPLOYER);
|
|
4736
|
-
this.indexer = getShelbyIndexerClient(config);
|
|
4800
|
+
async putBlob(params) {
|
|
4801
|
+
BlobNameSchema.parse(params.blobName);
|
|
4802
|
+
await this.#putBlobMultipart(
|
|
4803
|
+
params.account,
|
|
4804
|
+
params.blobName,
|
|
4805
|
+
params.blobData,
|
|
4806
|
+
void 0,
|
|
4807
|
+
params.onProgress
|
|
4808
|
+
);
|
|
4737
4809
|
}
|
|
4810
|
+
// FIXME make this possible to stream in put ^^^
|
|
4738
4811
|
/**
|
|
4739
|
-
*
|
|
4740
|
-
*
|
|
4812
|
+
* Downloads a blob from the Shelby RPC node.
|
|
4813
|
+
* Returns a streaming response with validation to ensure data integrity.
|
|
4741
4814
|
*
|
|
4742
|
-
* @param params.account - The account
|
|
4743
|
-
* @param params.
|
|
4744
|
-
* @
|
|
4815
|
+
* @param params.account - The account that owns the blob.
|
|
4816
|
+
* @param params.blobName - The name/path of the blob (e.g. "folder/file.txt").
|
|
4817
|
+
* @param params.range - Optional byte range for partial downloads.
|
|
4818
|
+
* @param params.range.start - Starting byte position (inclusive).
|
|
4819
|
+
* @param params.range.end - Ending byte position (inclusive, optional).
|
|
4820
|
+
* @param params.micropayment - Optional micropayment to attach to the request.
|
|
4821
|
+
*
|
|
4822
|
+
* @returns A ShelbyBlob object containing the account, name, readable stream, and content length.
|
|
4823
|
+
*
|
|
4824
|
+
* @throws Error if the download fails or content length doesn't match.
|
|
4825
|
+
* @throws StaleChannelStateError if the micropayment is stale (server has newer state).
|
|
4745
4826
|
*
|
|
4746
4827
|
* @example
|
|
4747
4828
|
* ```typescript
|
|
4748
|
-
*
|
|
4749
|
-
*
|
|
4750
|
-
*
|
|
4829
|
+
* // Download entire blob
|
|
4830
|
+
* const blob = await client.getBlob({
|
|
4831
|
+
* account: AccountAddress.from("0x1"),
|
|
4832
|
+
* blobName: "documents/report.pdf"
|
|
4833
|
+
* });
|
|
4834
|
+
*
|
|
4835
|
+
* // Download partial content (bytes 100-199)
|
|
4836
|
+
* const partial = await client.getBlob({
|
|
4837
|
+
* account: AccountAddress.from("0x1"),
|
|
4838
|
+
* blobName: "large-file.bin",
|
|
4839
|
+
* range: { start: 100, end: 199 }
|
|
4840
|
+
* });
|
|
4841
|
+
*
|
|
4842
|
+
* // Download with micropayment
|
|
4843
|
+
* const blob = await client.getBlob({
|
|
4844
|
+
* account: AccountAddress.from("0x1"),
|
|
4845
|
+
* blobName: "documents/report.pdf",
|
|
4846
|
+
* micropayment: senderBuiltMicropayment
|
|
4751
4847
|
* });
|
|
4752
4848
|
* ```
|
|
4753
4849
|
*/
|
|
4754
|
-
async
|
|
4755
|
-
|
|
4756
|
-
|
|
4757
|
-
|
|
4758
|
-
|
|
4759
|
-
|
|
4760
|
-
|
|
4761
|
-
|
|
4762
|
-
|
|
4763
|
-
|
|
4764
|
-
|
|
4850
|
+
async getBlob(params) {
|
|
4851
|
+
BlobNameSchema.parse(params.blobName);
|
|
4852
|
+
const url = buildRequestUrl(
|
|
4853
|
+
`/v1/blobs/${params.account.toString()}/${encodeURIComponentKeepSlashes(
|
|
4854
|
+
params.blobName
|
|
4855
|
+
)}`,
|
|
4856
|
+
this.baseUrl
|
|
4857
|
+
);
|
|
4858
|
+
const headers = new Headers();
|
|
4859
|
+
if (params.range !== void 0) {
|
|
4860
|
+
const { start, end } = params.range;
|
|
4861
|
+
if (end === void 0) {
|
|
4862
|
+
headers.set("Range", `bytes=${start}-`);
|
|
4863
|
+
} else {
|
|
4864
|
+
if (end < start) {
|
|
4865
|
+
throw new Error("Range end cannot be less than start.");
|
|
4765
4866
|
}
|
|
4766
|
-
|
|
4767
|
-
if (!rawMetadata?.[0]?.vec?.[0]) {
|
|
4768
|
-
return void 0;
|
|
4867
|
+
headers.set("Range", `bytes=${start}-${end}`);
|
|
4769
4868
|
}
|
|
4770
|
-
|
|
4771
|
-
|
|
4772
|
-
|
|
4773
|
-
|
|
4774
|
-
|
|
4775
|
-
|
|
4776
|
-
|
|
4777
|
-
|
|
4778
|
-
|
|
4869
|
+
}
|
|
4870
|
+
if (this.apiKey) {
|
|
4871
|
+
headers.set("Authorization", `Bearer ${this.apiKey}`);
|
|
4872
|
+
}
|
|
4873
|
+
if (params.micropayment) {
|
|
4874
|
+
const bytes = params.micropayment.bcsToBytes();
|
|
4875
|
+
const binaryString = Array.from(
|
|
4876
|
+
bytes,
|
|
4877
|
+
(byte) => String.fromCharCode(byte)
|
|
4878
|
+
).join("");
|
|
4879
|
+
headers.set(MICROPAYMENT_HEADER, btoa(binaryString));
|
|
4880
|
+
}
|
|
4881
|
+
const response = await fetch(url, { headers });
|
|
4882
|
+
if (response.status === 409) {
|
|
4883
|
+
let json;
|
|
4884
|
+
try {
|
|
4885
|
+
json = await response.json();
|
|
4886
|
+
} catch {
|
|
4779
4887
|
throw new Error(
|
|
4780
|
-
|
|
4888
|
+
`Failed to download blob: ${response.status} ${response.statusText}`
|
|
4781
4889
|
);
|
|
4782
4890
|
}
|
|
4783
|
-
|
|
4784
|
-
|
|
4785
|
-
|
|
4786
|
-
|
|
4787
|
-
|
|
4788
|
-
name: params.name,
|
|
4789
|
-
blobNameSuffix: getBlobNameSuffix(params.name),
|
|
4790
|
-
size: Number(metadata.blob_size),
|
|
4791
|
-
encoding,
|
|
4792
|
-
expirationMicros: Number(metadata.expiration_micros),
|
|
4793
|
-
creationMicros: Number(metadata.creation_micros),
|
|
4794
|
-
sliceAddress: AccountAddress3.fromString(metadata.slice.inner),
|
|
4795
|
-
isWritten: metadata.is_written
|
|
4796
|
-
};
|
|
4797
|
-
} catch (error) {
|
|
4798
|
-
if (error instanceof Error && // Depending on the network, the error message may show up differently.
|
|
4799
|
-
(error.message?.includes("sub_status: Some(404)") || error.message?.includes("EBLOB_NOT_FOUND"))) {
|
|
4800
|
-
return void 0;
|
|
4891
|
+
const parseResult = StaleMicropaymentErrorResponseSchema.safeParse(json);
|
|
4892
|
+
if (!parseResult.success) {
|
|
4893
|
+
throw new Error(
|
|
4894
|
+
`Failed to download blob: ${response.status} ${response.statusText}`
|
|
4895
|
+
);
|
|
4801
4896
|
}
|
|
4802
|
-
|
|
4897
|
+
const errorBody = parseResult.data;
|
|
4898
|
+
if (errorBody.storedMicropayment) {
|
|
4899
|
+
throw StaleChannelStateError.fromBase64(
|
|
4900
|
+
errorBody.storedMicropayment,
|
|
4901
|
+
errorBody.error
|
|
4902
|
+
);
|
|
4903
|
+
}
|
|
4904
|
+
throw new Error(
|
|
4905
|
+
errorBody.error ?? `Failed to download blob: ${response.status} ${response.statusText}`
|
|
4906
|
+
);
|
|
4803
4907
|
}
|
|
4804
|
-
|
|
4805
|
-
|
|
4806
|
-
|
|
4807
|
-
|
|
4808
|
-
|
|
4809
|
-
|
|
4810
|
-
|
|
4811
|
-
|
|
4812
|
-
|
|
4813
|
-
|
|
4814
|
-
|
|
4815
|
-
|
|
4816
|
-
|
|
4817
|
-
|
|
4818
|
-
|
|
4819
|
-
|
|
4820
|
-
|
|
4821
|
-
|
|
4822
|
-
|
|
4823
|
-
|
|
4824
|
-
|
|
4825
|
-
|
|
4826
|
-
|
|
4827
|
-
|
|
4828
|
-
|
|
4829
|
-
|
|
4830
|
-
|
|
4908
|
+
if (!response.ok) {
|
|
4909
|
+
throw new Error(
|
|
4910
|
+
`Failed to download blob: ${response.status} ${response.statusText}`
|
|
4911
|
+
);
|
|
4912
|
+
}
|
|
4913
|
+
if (!response.body) {
|
|
4914
|
+
throw new Error("Response body is null");
|
|
4915
|
+
}
|
|
4916
|
+
const contentLengthHeader = response.headers.get("content-length");
|
|
4917
|
+
if (contentLengthHeader === null) {
|
|
4918
|
+
throw new Error(
|
|
4919
|
+
"Response did not have content-length header, which is required"
|
|
4920
|
+
);
|
|
4921
|
+
}
|
|
4922
|
+
const expectedContentLength = Number.parseInt(contentLengthHeader, 10);
|
|
4923
|
+
if (Number.isNaN(expectedContentLength)) {
|
|
4924
|
+
throw new Error(
|
|
4925
|
+
`Invalid content-length header received: ${contentLengthHeader}`
|
|
4926
|
+
);
|
|
4927
|
+
}
|
|
4928
|
+
const validatingStream = new ReadableStream({
|
|
4929
|
+
start(controller) {
|
|
4930
|
+
const maybeReader = response.body?.getReader();
|
|
4931
|
+
if (!maybeReader) {
|
|
4932
|
+
controller.error(new Error("Response body reader is unavailable"));
|
|
4933
|
+
return;
|
|
4934
|
+
}
|
|
4935
|
+
const reader = maybeReader;
|
|
4936
|
+
let bytesReceived = 0;
|
|
4937
|
+
function pump() {
|
|
4938
|
+
return reader.read().then(({ done, value }) => {
|
|
4939
|
+
if (done) {
|
|
4940
|
+
if (bytesReceived !== expectedContentLength) {
|
|
4941
|
+
controller.error(
|
|
4942
|
+
new Error(
|
|
4943
|
+
`Downloaded data size (${bytesReceived} bytes) does not match content-length header (${expectedContentLength} bytes). This might indicate a partial or corrupted download.`
|
|
4944
|
+
)
|
|
4945
|
+
);
|
|
4946
|
+
return;
|
|
4947
|
+
}
|
|
4948
|
+
controller.close();
|
|
4949
|
+
return;
|
|
4950
|
+
}
|
|
4951
|
+
bytesReceived += value.byteLength;
|
|
4952
|
+
controller.enqueue(value);
|
|
4953
|
+
return pump();
|
|
4954
|
+
}).catch((error) => {
|
|
4955
|
+
controller.error(error);
|
|
4956
|
+
});
|
|
4957
|
+
}
|
|
4958
|
+
return pump();
|
|
4959
|
+
}
|
|
4831
4960
|
});
|
|
4832
|
-
|
|
4833
|
-
|
|
4834
|
-
|
|
4835
|
-
|
|
4836
|
-
|
|
4837
|
-
* @param params.pagination (optional) - The pagination options.
|
|
4838
|
-
* @param params.orderBy (optional) - The order by clause to sort the blobs by.
|
|
4839
|
-
* @returns The blob metadata for all the blobs that match the where clause.
|
|
4840
|
-
*
|
|
4841
|
-
* @example
|
|
4842
|
-
* ```typescript
|
|
4843
|
-
* // BlobMetadata[]
|
|
4844
|
-
* const blobs = await client.getBlobs({
|
|
4845
|
-
* where: { owner: { _eq: AccountAddress.fromString("0x1").toString() } },
|
|
4846
|
-
* });
|
|
4847
|
-
* ```
|
|
4848
|
-
*/
|
|
4849
|
-
async getBlobs(params = {}) {
|
|
4850
|
-
const { limit, offset } = params.pagination ?? {};
|
|
4851
|
-
const { orderBy, where } = params;
|
|
4852
|
-
const currentMicros = Date.now() * 1e3;
|
|
4853
|
-
const defaultActiveFilter = {
|
|
4854
|
-
expires_at: { _gte: currentMicros },
|
|
4855
|
-
is_deleted: { _eq: "0" }
|
|
4961
|
+
return {
|
|
4962
|
+
account: AccountAddress3.from(params.account),
|
|
4963
|
+
name: params.blobName,
|
|
4964
|
+
readable: validatingStream,
|
|
4965
|
+
contentLength: expectedContentLength
|
|
4856
4966
|
};
|
|
4857
|
-
const finalWhere = where !== void 0 ? { ...defaultActiveFilter, ...where } : defaultActiveFilter;
|
|
4858
|
-
const { blobs } = await this.indexer.getBlobs({
|
|
4859
|
-
where: finalWhere,
|
|
4860
|
-
limit,
|
|
4861
|
-
offset,
|
|
4862
|
-
orderBy
|
|
4863
|
-
});
|
|
4864
|
-
return blobs.map(
|
|
4865
|
-
(blob) => ({
|
|
4866
|
-
owner: AccountAddress3.from(blob.owner),
|
|
4867
|
-
name: blob.blob_name,
|
|
4868
|
-
blobNameSuffix: getBlobNameSuffix(blob.blob_name),
|
|
4869
|
-
blobMerkleRoot: Hex4.fromHexInput(blob.blob_commitment).toUint8Array(),
|
|
4870
|
-
size: Number(blob.size),
|
|
4871
|
-
// TODO: Add encoding when supported in NCI
|
|
4872
|
-
encoding: {
|
|
4873
|
-
variant: "clay",
|
|
4874
|
-
...ERASURE_CODE_PARAMS.ClayCode_16Total_10Data_13Helper,
|
|
4875
|
-
...ERASURE_CODE_AND_CHUNK_MAPPING.ClayCode_16Total_10Data_13Helper
|
|
4876
|
-
},
|
|
4877
|
-
expirationMicros: Number(blob.expires_at),
|
|
4878
|
-
creationMicros: Number(blob.created_at),
|
|
4879
|
-
sliceAddress: AccountAddress3.from(blob.slice_address),
|
|
4880
|
-
isWritten: Boolean(Number(blob.is_written))
|
|
4881
|
-
})
|
|
4882
|
-
);
|
|
4883
4967
|
}
|
|
4884
|
-
|
|
4885
|
-
|
|
4886
|
-
|
|
4887
|
-
|
|
4888
|
-
|
|
4889
|
-
|
|
4890
|
-
|
|
4891
|
-
|
|
4892
|
-
|
|
4893
|
-
|
|
4894
|
-
|
|
4895
|
-
|
|
4896
|
-
|
|
4897
|
-
|
|
4898
|
-
|
|
4899
|
-
|
|
4900
|
-
|
|
4901
|
-
|
|
4902
|
-
|
|
4903
|
-
|
|
4904
|
-
|
|
4905
|
-
|
|
4906
|
-
|
|
4907
|
-
|
|
4908
|
-
|
|
4909
|
-
|
|
4910
|
-
|
|
4911
|
-
|
|
4968
|
+
};
|
|
4969
|
+
|
|
4970
|
+
// ../../packages/sdk/dist/chunk-OTBLZL2S.mjs
|
|
4971
|
+
import { AccountAddress as AccountAddress4 } from "@aptos-labs/ts-sdk";
|
|
4972
|
+
var createBlobKey = (params) => {
|
|
4973
|
+
return `@${AccountAddress4.from(params.account).toStringLongWithoutPrefix()}/${params.blobName}`;
|
|
4974
|
+
};
|
|
4975
|
+
|
|
4976
|
+
// ../../packages/sdk/dist/chunk-RLRI2533.mjs
|
|
4977
|
+
import { Hex as Hex4 } from "@aptos-labs/ts-sdk";
|
|
4978
|
+
import { z as z3 } from "zod";
|
|
4979
|
+
var COMMITMENT_SCHEMA_VERSION = "1.3";
|
|
4980
|
+
var ChunksetCommitmentSchema = z3.object({
|
|
4981
|
+
// Chunkset root (vector commitment of child chunks)
|
|
4982
|
+
chunkset_root: z3.string(),
|
|
4983
|
+
// the size is known statically from the current configuration
|
|
4984
|
+
chunk_commitments: z3.array(z3.string())
|
|
4985
|
+
}).refine(
|
|
4986
|
+
(data) => {
|
|
4987
|
+
return data.chunk_commitments.length === DEFAULT_ERASURE_K + DEFAULT_ERASURE_M;
|
|
4988
|
+
},
|
|
4989
|
+
{
|
|
4990
|
+
message: `Chunkset must have exactly ${DEFAULT_ERASURE_K + DEFAULT_ERASURE_M} chunks (ERASURE_K + ERASURE_M = ${DEFAULT_ERASURE_K} + ${DEFAULT_ERASURE_M})`,
|
|
4991
|
+
path: ["chunk_commitments"]
|
|
4992
|
+
}
|
|
4993
|
+
);
|
|
4994
|
+
function expectedTotalChunksets(rawSize, chunksetSize = DEFAULT_CHUNKSET_SIZE_BYTES) {
|
|
4995
|
+
if (chunksetSize <= 0) {
|
|
4996
|
+
throw new Error("chunksetSize must be positive");
|
|
4997
|
+
}
|
|
4998
|
+
if (rawSize === 0) return 1;
|
|
4999
|
+
return Math.ceil(rawSize / chunksetSize);
|
|
5000
|
+
}
|
|
5001
|
+
var BlobCommitmentsSchema = z3.object({
|
|
5002
|
+
schema_version: z3.string(),
|
|
5003
|
+
raw_data_size: z3.number(),
|
|
5004
|
+
// FIXME I am not sure about this being here, or if it should be somewhere else
|
|
5005
|
+
blob_merkle_root: z3.string(),
|
|
5006
|
+
chunkset_commitments: z3.array(ChunksetCommitmentSchema)
|
|
5007
|
+
}).refine(
|
|
5008
|
+
(data) => {
|
|
5009
|
+
return expectedTotalChunksets(data.raw_data_size) === data.chunkset_commitments.length;
|
|
5010
|
+
},
|
|
5011
|
+
{
|
|
5012
|
+
message: "Total chunkset count mismatches with raw data size",
|
|
5013
|
+
// FIXME put more details in here
|
|
5014
|
+
path: ["chunkset_commitments"]
|
|
5015
|
+
}
|
|
5016
|
+
);
|
|
5017
|
+
async function generateMerkleRoot(leafHashes) {
|
|
5018
|
+
if (!leafHashes.length) {
|
|
5019
|
+
throw new Error(
|
|
5020
|
+
"An empty array cannot be used to construct a Merkle tree."
|
|
4912
5021
|
);
|
|
4913
5022
|
}
|
|
4914
|
-
|
|
4915
|
-
|
|
4916
|
-
|
|
4917
|
-
|
|
4918
|
-
|
|
4919
|
-
|
|
4920
|
-
|
|
4921
|
-
|
|
4922
|
-
|
|
4923
|
-
|
|
4924
|
-
|
|
4925
|
-
|
|
4926
|
-
|
|
4927
|
-
|
|
4928
|
-
|
|
5023
|
+
const zeroArray = new Uint8Array(leafHashes[0].toUint8Array().length);
|
|
5024
|
+
const zeroBytes = Hex4.fromHexInput(zeroArray);
|
|
5025
|
+
let currentLeaves = leafHashes;
|
|
5026
|
+
while (currentLeaves.length > 1) {
|
|
5027
|
+
if (currentLeaves.length % 2 !== 0) {
|
|
5028
|
+
currentLeaves.push(zeroBytes);
|
|
5029
|
+
}
|
|
5030
|
+
const nextLeaves = [];
|
|
5031
|
+
for (let i = 0; i < currentLeaves.length; i += 2) {
|
|
5032
|
+
nextLeaves.push(
|
|
5033
|
+
await concatHashes([
|
|
5034
|
+
currentLeaves[i].toUint8Array(),
|
|
5035
|
+
currentLeaves[i + 1].toUint8Array()
|
|
5036
|
+
])
|
|
5037
|
+
);
|
|
5038
|
+
}
|
|
5039
|
+
currentLeaves = nextLeaves;
|
|
5040
|
+
}
|
|
5041
|
+
return currentLeaves[0];
|
|
5042
|
+
}
|
|
5043
|
+
async function generateChunksetCommitments(shouldPad, chunksetIdx, chunksetData, expectedChunksetSize, provider, onChunk) {
|
|
5044
|
+
const { erasure_n } = provider.config;
|
|
5045
|
+
const chunksetPayload = shouldPad ? zeroPadBytes(chunksetData, expectedChunksetSize) : validatePrePaddedChunkset(
|
|
5046
|
+
chunksetData,
|
|
5047
|
+
expectedChunksetSize,
|
|
5048
|
+
chunksetIdx
|
|
5049
|
+
);
|
|
5050
|
+
const { chunks } = provider.encode(chunksetPayload);
|
|
5051
|
+
if (chunks.length !== erasure_n) {
|
|
5052
|
+
throw new Error(
|
|
5053
|
+
`Erasure provider produced ${chunks.length} chunks, expected ${erasure_n}.`
|
|
5054
|
+
);
|
|
5055
|
+
}
|
|
5056
|
+
const chunkRoots = provider.getChunkMerkleRoots();
|
|
5057
|
+
let chunkIdx = 0;
|
|
5058
|
+
for (const chunkData of chunks) {
|
|
5059
|
+
if (onChunk !== void 0) {
|
|
5060
|
+
await onChunk(chunksetIdx, chunkIdx, chunkData);
|
|
5061
|
+
}
|
|
5062
|
+
chunkIdx += 1;
|
|
5063
|
+
}
|
|
5064
|
+
const a = await generateMerkleRoot(
|
|
5065
|
+
chunkRoots.map((a2) => Hex4.fromHexInput(a2))
|
|
5066
|
+
);
|
|
5067
|
+
const entry = {
|
|
5068
|
+
chunkset_root: a.toString(),
|
|
5069
|
+
chunk_commitments: chunkRoots.map(
|
|
5070
|
+
(chunk) => Hex4.fromHexInput(chunk).toString()
|
|
5071
|
+
)
|
|
5072
|
+
};
|
|
5073
|
+
return { h: a, entry };
|
|
5074
|
+
}
|
|
5075
|
+
async function generateCommitments(provider, fullData, onChunk, options) {
|
|
5076
|
+
const expectedChunksetSize = DEFAULT_CHUNKSET_SIZE_BYTES;
|
|
5077
|
+
const shouldPad = options?.pad ?? true;
|
|
5078
|
+
const chunksetCommitments = [];
|
|
5079
|
+
const chunksetCommitmentHashes = [];
|
|
5080
|
+
let rawDataSize = 0;
|
|
5081
|
+
const chunksetGen = readInChunks(fullData, expectedChunksetSize);
|
|
5082
|
+
for await (const [chunksetIdx, chunksetData] of chunksetGen) {
|
|
5083
|
+
rawDataSize += chunksetData.length;
|
|
5084
|
+
const { h, entry } = await generateChunksetCommitments(
|
|
5085
|
+
shouldPad,
|
|
5086
|
+
chunksetIdx,
|
|
5087
|
+
chunksetData,
|
|
5088
|
+
expectedChunksetSize,
|
|
5089
|
+
provider,
|
|
5090
|
+
onChunk
|
|
5091
|
+
);
|
|
5092
|
+
chunksetCommitments.push(entry);
|
|
5093
|
+
chunksetCommitmentHashes.push(h);
|
|
5094
|
+
}
|
|
5095
|
+
if (rawDataSize === 0) {
|
|
5096
|
+
const zeroChunkset = new Uint8Array(expectedChunksetSize);
|
|
5097
|
+
const { h, entry } = await generateChunksetCommitments(
|
|
5098
|
+
shouldPad,
|
|
5099
|
+
0,
|
|
5100
|
+
zeroChunkset,
|
|
5101
|
+
expectedChunksetSize,
|
|
5102
|
+
provider,
|
|
5103
|
+
onChunk
|
|
5104
|
+
);
|
|
5105
|
+
chunksetCommitments.push(entry);
|
|
5106
|
+
chunksetCommitmentHashes.push(h);
|
|
5107
|
+
}
|
|
5108
|
+
return {
|
|
5109
|
+
schema_version: COMMITMENT_SCHEMA_VERSION,
|
|
5110
|
+
raw_data_size: rawDataSize,
|
|
5111
|
+
blob_merkle_root: (await generateMerkleRoot(chunksetCommitmentHashes)).toString(),
|
|
5112
|
+
chunkset_commitments: chunksetCommitments
|
|
5113
|
+
};
|
|
5114
|
+
}
|
|
5115
|
+
function validatePrePaddedChunkset(chunkset, expectedSize, chunksetIdx) {
|
|
5116
|
+
if (chunkset.byteLength !== expectedSize) {
|
|
5117
|
+
throw new Error(
|
|
5118
|
+
`Chunkset ${chunksetIdx} has size ${chunkset.byteLength} bytes but expected ${expectedSize} bytes. Enable padding or supply pre-padded data before calling generateCommitments.`
|
|
5119
|
+
);
|
|
4929
5120
|
}
|
|
5121
|
+
return chunkset;
|
|
5122
|
+
}
|
|
5123
|
+
|
|
5124
|
+
// ../../packages/sdk/dist/chunk-JWVMZ2Y7.mjs
|
|
5125
|
+
import {
|
|
5126
|
+
AccountAddress as AccountAddress5,
|
|
5127
|
+
Aptos as Aptos2,
|
|
5128
|
+
AptosConfig as AptosConfig2,
|
|
5129
|
+
Hex as Hex5,
|
|
5130
|
+
MoveVector,
|
|
5131
|
+
U32
|
|
5132
|
+
} from "@aptos-labs/ts-sdk";
|
|
5133
|
+
var ShelbyBlobClient = class _ShelbyBlobClient {
|
|
5134
|
+
aptos;
|
|
5135
|
+
deployer;
|
|
5136
|
+
indexer;
|
|
4930
5137
|
/**
|
|
4931
|
-
*
|
|
5138
|
+
* The ShelbyBlobClient is used to interact with the Shelby contract on the Aptos blockchain. This
|
|
5139
|
+
* includes functions for registering blob commitments and retrieving blob metadata.
|
|
4932
5140
|
*
|
|
4933
|
-
* @param
|
|
4934
|
-
* @
|
|
5141
|
+
* @param config - The client configuration object.
|
|
5142
|
+
* @param config.network - The Shelby network to use.
|
|
4935
5143
|
*
|
|
4936
5144
|
* @example
|
|
4937
5145
|
* ```typescript
|
|
4938
|
-
* const
|
|
5146
|
+
* const blobClient = new ShelbyBlobClient({
|
|
5147
|
+
* aptos: {
|
|
5148
|
+
* network: Network.SHELBYNET,
|
|
5149
|
+
* clientConfig: {
|
|
5150
|
+
* API_KEY: "AG-***",
|
|
5151
|
+
* },
|
|
5152
|
+
* },
|
|
5153
|
+
* });
|
|
4939
5154
|
* ```
|
|
4940
5155
|
*/
|
|
4941
|
-
|
|
4942
|
-
const
|
|
4943
|
-
const
|
|
4944
|
-
|
|
5156
|
+
constructor(config) {
|
|
5157
|
+
const baseAptosConfig = getAptosConfig(config);
|
|
5158
|
+
const aptosConfig = new AptosConfig2({
|
|
5159
|
+
...baseAptosConfig,
|
|
5160
|
+
clientConfig: {
|
|
5161
|
+
...baseAptosConfig.clientConfig,
|
|
5162
|
+
// Only use top-level apiKey if no API_KEY is already provided in Aptos settings
|
|
5163
|
+
API_KEY: baseAptosConfig.clientConfig?.API_KEY ?? config.apiKey
|
|
5164
|
+
}
|
|
5165
|
+
});
|
|
5166
|
+
this.aptos = new Aptos2(aptosConfig);
|
|
5167
|
+
this.deployer = config.deployer ?? AccountAddress5.fromString(SHELBY_DEPLOYER);
|
|
5168
|
+
this.indexer = getShelbyIndexerClient(config);
|
|
4945
5169
|
}
|
|
4946
5170
|
/**
|
|
4947
|
-
*
|
|
4948
|
-
*
|
|
4949
|
-
* @param params.account - The account that is signing and paying for the transaction.
|
|
4950
|
-
* @param params.blobName - The name/path of the blob (e.g. "foo/bar.txt").
|
|
4951
|
-
* @param params.blobMerkleRoot - The merkle root of the blob commitments.
|
|
4952
|
-
* @param params.size - The size of the blob in bytes.
|
|
4953
|
-
* @param params.expirationMicros - The expiration time of the blob in microseconds.
|
|
4954
|
-
* @param params.options - Optional transaction building options.
|
|
4955
|
-
* @param params.options.chunksetSizeBytes - Custom chunkset size (defaults to DEFAULT_CHUNKSET_SIZE_BYTES).
|
|
4956
|
-
* @param params.options.build - Additional Aptos transaction building options.
|
|
5171
|
+
* Retrieves the blob metadata from the blockchain. If it does not exist,
|
|
5172
|
+
* returns `undefined`.
|
|
4957
5173
|
*
|
|
4958
|
-
* @
|
|
5174
|
+
* @param params.account - The account namespace the blob is stored in (e.g. "0x1")
|
|
5175
|
+
* @param params.name - The name of the blob (e.g. "foo/bar")
|
|
5176
|
+
* @returns The blob metadata.
|
|
4959
5177
|
*
|
|
4960
5178
|
* @example
|
|
4961
5179
|
* ```typescript
|
|
4962
|
-
* const
|
|
4963
|
-
*
|
|
4964
|
-
*
|
|
4965
|
-
* const { transaction } = await client.registerBlob({
|
|
4966
|
-
* account: signer,
|
|
4967
|
-
* blobName: "foo/bar.txt",
|
|
4968
|
-
* blobMerkleRoot: blobCommitments.blob_merkle_root,
|
|
4969
|
-
* size: data.length,
|
|
4970
|
-
* expirationMicros: Date.now() * 1000 + 3600_000_000, // 1 hour from now in microseconds
|
|
5180
|
+
* const metadata = await client.getBlobMetadata({
|
|
5181
|
+
* account: AccountAddress.fromString("0x1"),
|
|
5182
|
+
* name: "foo/bar.txt",
|
|
4971
5183
|
* });
|
|
4972
5184
|
* ```
|
|
4973
5185
|
*/
|
|
4974
|
-
async
|
|
4975
|
-
|
|
4976
|
-
|
|
5186
|
+
async getBlobMetadata(params) {
|
|
5187
|
+
try {
|
|
5188
|
+
const rawMetadata = await this.aptos.view({
|
|
5189
|
+
payload: {
|
|
5190
|
+
function: `${this.deployer.toString()}::blob_metadata::get_blob_metadata`,
|
|
5191
|
+
functionArguments: [
|
|
5192
|
+
createBlobKey({
|
|
5193
|
+
account: params.account,
|
|
5194
|
+
blobName: params.name
|
|
5195
|
+
})
|
|
5196
|
+
]
|
|
5197
|
+
}
|
|
5198
|
+
});
|
|
5199
|
+
if (!rawMetadata?.[0]?.vec?.[0]) {
|
|
5200
|
+
return void 0;
|
|
5201
|
+
}
|
|
5202
|
+
const metadata = rawMetadata[0].vec[0];
|
|
5203
|
+
let encoding;
|
|
5204
|
+
if (metadata.encoding.__variant__ === "ClayCode_16Total_10Data_13Helper") {
|
|
5205
|
+
encoding = {
|
|
5206
|
+
variant: "clay",
|
|
5207
|
+
...ERASURE_CODE_PARAMS[metadata.encoding.__variant__],
|
|
5208
|
+
...ERASURE_CODE_AND_CHUNK_MAPPING[metadata.encoding.__variant__]
|
|
5209
|
+
};
|
|
5210
|
+
} else {
|
|
5211
|
+
throw new Error(
|
|
5212
|
+
"Could not parse encoding from Shelby Smart Contract, this SDK is out of date."
|
|
5213
|
+
);
|
|
5214
|
+
}
|
|
5215
|
+
return {
|
|
5216
|
+
blobMerkleRoot: Hex5.fromHexInput(
|
|
5217
|
+
metadata.blob_commitment
|
|
5218
|
+
).toUint8Array(),
|
|
5219
|
+
owner: AccountAddress5.fromString(metadata.owner),
|
|
5220
|
+
name: params.name,
|
|
5221
|
+
blobNameSuffix: getBlobNameSuffix(params.name),
|
|
5222
|
+
size: Number(metadata.blob_size),
|
|
5223
|
+
encoding,
|
|
5224
|
+
expirationMicros: Number(metadata.expiration_micros),
|
|
5225
|
+
creationMicros: Number(metadata.creation_micros),
|
|
5226
|
+
sliceAddress: AccountAddress5.fromString(metadata.slice.inner),
|
|
5227
|
+
isWritten: metadata.is_written
|
|
5228
|
+
};
|
|
5229
|
+
} catch (error) {
|
|
5230
|
+
if (error instanceof Error && // Depending on the network, the error message may show up differently.
|
|
5231
|
+
(error.message?.includes("sub_status: Some(404)") || error.message?.includes("EBLOB_NOT_FOUND"))) {
|
|
5232
|
+
return void 0;
|
|
5233
|
+
}
|
|
5234
|
+
throw error;
|
|
5235
|
+
}
|
|
5236
|
+
}
|
|
5237
|
+
/**
|
|
5238
|
+
* Retrieves all the blobs and their metadata for an account from the
|
|
5239
|
+
* blockchain.
|
|
5240
|
+
*
|
|
5241
|
+
* @param params.account - The account namespace the blobs are stored in (e.g. "0x1")
|
|
5242
|
+
* @param params.pagination (optional) - The pagination options.
|
|
5243
|
+
* @param params.orderBy (optional) - The order by clause to sort the blobs by.
|
|
5244
|
+
* @returns The blob metadata for all the blobs for the account.
|
|
5245
|
+
*
|
|
5246
|
+
* @example
|
|
5247
|
+
* ```typescript
|
|
5248
|
+
* // BlobMetadata[]
|
|
5249
|
+
* const blobs = await client.getAccountBlobs({
|
|
5250
|
+
* account: AccountAddress.fromString("0x1"),
|
|
5251
|
+
* });
|
|
5252
|
+
* ```
|
|
5253
|
+
*/
|
|
5254
|
+
getAccountBlobs(params) {
|
|
5255
|
+
const { where, ...rest } = params;
|
|
5256
|
+
return this.getBlobs({
|
|
5257
|
+
where: {
|
|
5258
|
+
...where ?? {},
|
|
5259
|
+
owner: { _eq: AccountAddress5.from(params.account).toString() }
|
|
5260
|
+
},
|
|
5261
|
+
pagination: rest.pagination,
|
|
5262
|
+
orderBy: rest.orderBy
|
|
5263
|
+
});
|
|
5264
|
+
}
|
|
5265
|
+
/**
|
|
5266
|
+
* Retrieves blobs and their metadata from the blockchain.
|
|
5267
|
+
*
|
|
5268
|
+
* @param params.where (optional) - The where clause to filter the blobs by.
|
|
5269
|
+
* @param params.pagination (optional) - The pagination options.
|
|
5270
|
+
* @param params.orderBy (optional) - The order by clause to sort the blobs by.
|
|
5271
|
+
* @returns The blob metadata for all the blobs that match the where clause.
|
|
5272
|
+
*
|
|
5273
|
+
* @example
|
|
5274
|
+
* ```typescript
|
|
5275
|
+
* // BlobMetadata[]
|
|
5276
|
+
* const blobs = await client.getBlobs({
|
|
5277
|
+
* where: { owner: { _eq: AccountAddress.fromString("0x1").toString() } },
|
|
5278
|
+
* });
|
|
5279
|
+
* ```
|
|
5280
|
+
*/
|
|
5281
|
+
async getBlobs(params = {}) {
|
|
5282
|
+
const { limit, offset } = params.pagination ?? {};
|
|
5283
|
+
const { orderBy, where } = params;
|
|
5284
|
+
const currentMicros = Date.now() * 1e3;
|
|
5285
|
+
const defaultActiveFilter = {
|
|
5286
|
+
expires_at: { _gte: currentMicros },
|
|
5287
|
+
is_deleted: { _eq: "0" }
|
|
5288
|
+
};
|
|
5289
|
+
const finalWhere = where !== void 0 ? { ...defaultActiveFilter, ...where } : defaultActiveFilter;
|
|
5290
|
+
const { blobs } = await this.indexer.getBlobs({
|
|
5291
|
+
where: finalWhere,
|
|
5292
|
+
limit,
|
|
5293
|
+
offset,
|
|
5294
|
+
orderBy
|
|
5295
|
+
});
|
|
5296
|
+
return blobs.map(
|
|
5297
|
+
(blob) => ({
|
|
5298
|
+
owner: AccountAddress5.from(blob.owner),
|
|
5299
|
+
name: blob.blob_name,
|
|
5300
|
+
blobNameSuffix: getBlobNameSuffix(blob.blob_name),
|
|
5301
|
+
blobMerkleRoot: Hex5.fromHexInput(blob.blob_commitment).toUint8Array(),
|
|
5302
|
+
size: Number(blob.size),
|
|
5303
|
+
// TODO: Add encoding when supported in NCI
|
|
5304
|
+
encoding: {
|
|
5305
|
+
variant: "clay",
|
|
5306
|
+
...ERASURE_CODE_PARAMS.ClayCode_16Total_10Data_13Helper,
|
|
5307
|
+
...ERASURE_CODE_AND_CHUNK_MAPPING.ClayCode_16Total_10Data_13Helper
|
|
5308
|
+
},
|
|
5309
|
+
expirationMicros: Number(blob.expires_at),
|
|
5310
|
+
creationMicros: Number(blob.created_at),
|
|
5311
|
+
sliceAddress: AccountAddress5.from(blob.slice_address),
|
|
5312
|
+
isWritten: Boolean(Number(blob.is_written))
|
|
5313
|
+
})
|
|
5314
|
+
);
|
|
5315
|
+
}
|
|
5316
|
+
async getBlobActivities(params) {
|
|
5317
|
+
const { limit, offset } = params.pagination ?? {};
|
|
5318
|
+
const { orderBy, where } = params;
|
|
5319
|
+
const { blob_activities } = await this.indexer.getBlobActivities({
|
|
5320
|
+
where,
|
|
5321
|
+
limit,
|
|
5322
|
+
offset,
|
|
5323
|
+
orderBy
|
|
5324
|
+
});
|
|
5325
|
+
const activityTypeMapping = {
|
|
5326
|
+
[`${this.deployer.toStringLong()}::blob_metadata::BlobRegisteredEvent`]: "register_blob",
|
|
5327
|
+
[`${this.deployer.toStringLong()}::blob_metadata::BlobDeletedEvent`]: "delete_blob",
|
|
5328
|
+
[`${this.deployer.toStringLong()}::blob_metadata::BlobExpirationExtendedEvent`]: "extend_blob_expiration",
|
|
5329
|
+
[`${this.deployer.toStringLong()}::blob_metadata::BlobWrittenEvent`]: "write_blob"
|
|
5330
|
+
};
|
|
5331
|
+
return blob_activities.map(
|
|
5332
|
+
(activity) => ({
|
|
5333
|
+
blobName: activity.blob_name,
|
|
5334
|
+
accountAddress: AccountAddress5.from(
|
|
5335
|
+
activity.blob_name.substring(1, 65)
|
|
5336
|
+
),
|
|
5337
|
+
type: activityTypeMapping[activity.event_type] ?? "unknown",
|
|
5338
|
+
eventType: activity.event_type,
|
|
5339
|
+
eventIndex: activity.event_index,
|
|
5340
|
+
transactionHash: activity.transaction_hash,
|
|
5341
|
+
transactionVersion: activity.transaction_version,
|
|
5342
|
+
timestamp: activity.timestamp
|
|
5343
|
+
})
|
|
5344
|
+
);
|
|
5345
|
+
}
|
|
5346
|
+
/**
|
|
5347
|
+
* Retrieves the total number of blobs from the blockchain.
|
|
5348
|
+
*
|
|
5349
|
+
* @param params.where (optional) - The where clause to filter the blobs by.
|
|
5350
|
+
* @returns The total number of blobs.
|
|
5351
|
+
*
|
|
5352
|
+
* @example
|
|
5353
|
+
* ```typescript
|
|
5354
|
+
* const count = await client.getBlobsCount();
|
|
5355
|
+
* ```
|
|
5356
|
+
*/
|
|
5357
|
+
async getBlobsCount(params) {
|
|
5358
|
+
const { where } = params;
|
|
5359
|
+
const { blobs_aggregate } = await this.indexer.getBlobsCount({ where });
|
|
5360
|
+
return blobs_aggregate?.aggregate?.count ?? 0;
|
|
5361
|
+
}
|
|
5362
|
+
/**
|
|
5363
|
+
* Retrieves the total number of blob activities from the blockchain.
|
|
5364
|
+
*
|
|
5365
|
+
* @param params.where (optional) - The where clause to filter the blob activities by.
|
|
5366
|
+
* @returns The total number of blob activities.
|
|
5367
|
+
*
|
|
5368
|
+
* @example
|
|
5369
|
+
* ```typescript
|
|
5370
|
+
* const count = await client.getBlobActivitiesCount();
|
|
5371
|
+
* ```
|
|
5372
|
+
*/
|
|
5373
|
+
async getBlobActivitiesCount(params) {
|
|
5374
|
+
const { where } = params;
|
|
5375
|
+
const { blob_activities_aggregate } = await this.indexer.getBlobActivitiesCount({ where });
|
|
5376
|
+
return blob_activities_aggregate?.aggregate?.count ?? 0;
|
|
5377
|
+
}
|
|
5378
|
+
/**
|
|
5379
|
+
* Registers a blob on the blockchain by writing its merkle root and metadata.
|
|
5380
|
+
*
|
|
5381
|
+
* @param params.account - The account that is signing and paying for the transaction.
|
|
5382
|
+
* @param params.blobName - The name/path of the blob (e.g. "foo/bar.txt").
|
|
5383
|
+
* @param params.blobMerkleRoot - The merkle root of the blob commitments.
|
|
5384
|
+
* @param params.size - The size of the blob in bytes.
|
|
5385
|
+
* @param params.expirationMicros - The expiration time of the blob in microseconds.
|
|
5386
|
+
* @param params.options - Optional transaction building options.
|
|
5387
|
+
* @param params.options.chunksetSizeBytes - Custom chunkset size (defaults to DEFAULT_CHUNKSET_SIZE_BYTES).
|
|
5388
|
+
* @param params.options.build - Additional Aptos transaction building options.
|
|
5389
|
+
*
|
|
5390
|
+
* @returns An object containing the pending transaction.
|
|
5391
|
+
*
|
|
5392
|
+
* @example
|
|
5393
|
+
* ```typescript
|
|
5394
|
+
* const provider = await ClayErasureCodingProvider.create();
|
|
5395
|
+
* const blobCommitments = await generateCommitments(provider, data);
|
|
5396
|
+
*
|
|
5397
|
+
* const { transaction } = await client.registerBlob({
|
|
5398
|
+
* account: signer,
|
|
5399
|
+
* blobName: "foo/bar.txt",
|
|
5400
|
+
* blobMerkleRoot: blobCommitments.blob_merkle_root,
|
|
5401
|
+
* size: data.length,
|
|
5402
|
+
* expirationMicros: Date.now() * 1000 + 3600_000_000, // 1 hour from now in microseconds
|
|
5403
|
+
* });
|
|
5404
|
+
* ```
|
|
5405
|
+
*/
|
|
5406
|
+
async registerBlob(params) {
|
|
5407
|
+
const chunksetSize = params.options?.chunksetSizeBytes ?? DEFAULT_CHUNKSET_SIZE_BYTES;
|
|
5408
|
+
const transaction = await this.aptos.transaction.build.simple({
|
|
4977
5409
|
...params.options?.build,
|
|
4978
5410
|
data: _ShelbyBlobClient.createRegisterBlobPayload({
|
|
4979
5411
|
deployer: this.deployer,
|
|
@@ -5082,777 +5514,373 @@ var ShelbyBlobClient = class _ShelbyBlobClient {
|
|
|
5082
5514
|
* @param params.options - Additional options for transaction building and encoding.
|
|
5083
5515
|
*
|
|
5084
5516
|
* @returns The blob commitments and the pending transaction.
|
|
5085
|
-
*
|
|
5086
|
-
* @example
|
|
5087
|
-
* ```typescript
|
|
5088
|
-
* const { transaction } = await client.addChunksetAcknowledgements({
|
|
5089
|
-
* account: signer,
|
|
5090
|
-
* blobOwner: owner,
|
|
5091
|
-
* blobName: "foo/bar.txt",
|
|
5092
|
-
* creationMicros, // Taken from the blob metadata at registration time.
|
|
5093
|
-
* chunksetIdx,
|
|
5094
|
-
* storageProviderAcks: An array of StorageProviderAck types, each having the slot index and signature from the SP.
|
|
5095
|
-
* });
|
|
5096
|
-
* ```
|
|
5097
|
-
*/
|
|
5098
|
-
async addChunksetAcknowledgements(params) {
|
|
5099
|
-
const transaction = await this.aptos.transaction.build.simple({
|
|
5100
|
-
...params.options?.build,
|
|
5101
|
-
data: _ShelbyBlobClient.createChunksetAcknowledgementsPayload({
|
|
5102
|
-
blobOwner: params.blobOwner,
|
|
5103
|
-
blobName: params.blobName,
|
|
5104
|
-
creationMicros: params.creationMicros,
|
|
5105
|
-
chunksetIdx: params.chunksetIdx,
|
|
5106
|
-
storageProviderAcks: params.storageProviderAcks
|
|
5107
|
-
}),
|
|
5108
|
-
sender: params.account.accountAddress
|
|
5109
|
-
});
|
|
5110
|
-
return {
|
|
5111
|
-
transaction: await this.aptos.signAndSubmitTransaction({
|
|
5112
|
-
signer: params.account,
|
|
5113
|
-
transaction
|
|
5114
|
-
})
|
|
5115
|
-
};
|
|
5116
|
-
}
|
|
5117
|
-
/**
|
|
5118
|
-
* Registers multiple blobs on the blockchain by writing their merkle roots and metadata.
|
|
5119
|
-
*
|
|
5120
|
-
* @param params.account - The account that is signing and paying for the transaction.
|
|
5121
|
-
* @param params.expirationMicros - The expiration time of the blobs in microseconds.
|
|
5122
|
-
* @param params.blobs - The blobs to register.
|
|
5123
|
-
* @param params.blobs.blobName - The name/path of the blob (e.g. "foo/bar.txt").
|
|
5124
|
-
* @param params.blobs.blobSize - The size of the blob in bytes.
|
|
5125
|
-
* @param params.blobs.blobMerkleRoot - The merkle root of the blob commitments as a hex string.
|
|
5126
|
-
* @param params.options - Optional transaction building options.
|
|
5127
|
-
* @param params.options.chunksetSizeBytes - Custom chunkset size (defaults to DEFAULT_CHUNKSET_SIZE_BYTES).
|
|
5128
|
-
* @param params.options.build - Additional Aptos transaction building options.
|
|
5129
|
-
*
|
|
5130
|
-
* @returns An object containing the pending transaction.
|
|
5131
|
-
*
|
|
5132
|
-
* @example
|
|
5133
|
-
* ```typescript
|
|
5134
|
-
* const provider = await ClayErasureCodingProvider.create();
|
|
5135
|
-
* const blobCommitments = await generateCommitments(provider, data);
|
|
5136
|
-
*
|
|
5137
|
-
* const { transaction } = await client.batchRegisterBlobs({
|
|
5138
|
-
* account: signer,
|
|
5139
|
-
* expirationMicros: Date.now() * 1000 + 3600_000_000, // 1 hour from now in microseconds
|
|
5140
|
-
* blobs: [
|
|
5141
|
-
* {
|
|
5142
|
-
* blobName: "foo/bar.txt",
|
|
5143
|
-
* blobSize: data.length,
|
|
5144
|
-
* blobMerkleRoot: blobCommitments.blob_merkle_root,
|
|
5145
|
-
* },
|
|
5146
|
-
* ],
|
|
5147
|
-
* });
|
|
5148
|
-
* ```
|
|
5149
|
-
*/
|
|
5150
|
-
async batchRegisterBlobs(params) {
|
|
5151
|
-
const chunksetSize = params.options?.chunksetSizeBytes ?? DEFAULT_CHUNKSET_SIZE_BYTES;
|
|
5152
|
-
const transaction = await this.aptos.transaction.build.simple({
|
|
5153
|
-
...params.options?.build,
|
|
5154
|
-
sender: params.account.accountAddress,
|
|
5155
|
-
data: _ShelbyBlobClient.createBatchRegisterBlobsPayload({
|
|
5156
|
-
deployer: this.deployer,
|
|
5157
|
-
account: params.account.accountAddress,
|
|
5158
|
-
expirationMicros: params.expirationMicros,
|
|
5159
|
-
blobs: params.blobs.map((blob) => ({
|
|
5160
|
-
blobName: blob.blobName,
|
|
5161
|
-
blobSize: blob.blobSize,
|
|
5162
|
-
blobMerkleRoot: blob.blobMerkleRoot,
|
|
5163
|
-
numChunksets: expectedTotalChunksets(blob.blobSize, chunksetSize)
|
|
5164
|
-
}))
|
|
5165
|
-
})
|
|
5166
|
-
});
|
|
5167
|
-
return {
|
|
5168
|
-
transaction: await this.aptos.signAndSubmitTransaction({
|
|
5169
|
-
signer: params.account,
|
|
5170
|
-
transaction
|
|
5171
|
-
})
|
|
5172
|
-
};
|
|
5173
|
-
}
|
|
5174
|
-
/**
|
|
5175
|
-
* Creates a transaction payload to register a blob on the blockchain.
|
|
5176
|
-
* This is a static helper method for constructing the Move function call payload.
|
|
5177
|
-
*
|
|
5178
|
-
* @param params.deployer - Optional deployer account address. Defaults to SHELBY_DEPLOYER.
|
|
5179
|
-
* @param params.account - The account that will own the blob.
|
|
5180
|
-
* @param params.blobName - The name/path of the blob (e.g. "foo/bar.txt").
|
|
5181
|
-
* @param params.blobSize - The size of the blob in bytes.
|
|
5182
|
-
* @param params.blobMerkleRoot - The merkle root of the blob commitments as a hex string.
|
|
5183
|
-
* @param params.expirationMicros - The expiration time of the blob in microseconds.
|
|
5184
|
-
* @param params.numChunksets - The total number of chunksets in the blob.
|
|
5185
|
-
*
|
|
5186
|
-
* @returns An Aptos transaction payload data object for the register_blob Move function.
|
|
5187
|
-
*
|
|
5188
|
-
* @see https://github.com/shelby/shelby/blob/e08e84742cf2b80ad8bb7227deb3013398076d53/move/shelby_contract/sources/global_metadata.move#L357
|
|
5189
|
-
*/
|
|
5190
|
-
static createRegisterBlobPayload(params) {
|
|
5191
|
-
return {
|
|
5192
|
-
function: `${(params.deployer ?? SHELBY_DEPLOYER).toString()}::blob_metadata::register_blob`,
|
|
5193
|
-
functionArguments: [
|
|
5194
|
-
params.blobName,
|
|
5195
|
-
params.expirationMicros,
|
|
5196
|
-
MoveVector.U8(params.blobMerkleRoot),
|
|
5197
|
-
params.numChunksets,
|
|
5198
|
-
params.blobSize,
|
|
5199
|
-
// TODO
|
|
5200
|
-
0,
|
|
5201
|
-
// payment tier
|
|
5202
|
-
0
|
|
5203
|
-
// encoding
|
|
5204
|
-
]
|
|
5205
|
-
};
|
|
5206
|
-
}
|
|
5207
|
-
/**
|
|
5208
|
-
* Creates a transaction payload to register multiple blobs on the blockchain.
|
|
5209
|
-
* This is a static helper method for constructing the Move function call payload.
|
|
5210
|
-
*
|
|
5211
|
-
* @param params.deployer - Optional deployer account address. Defaults to SHELBY_DEPLOYER.
|
|
5212
|
-
* @param params.account - The account that will own the blobs.
|
|
5213
|
-
* @param params.expirationMicros - The expiration time of the blobs in microseconds.
|
|
5214
|
-
* @param params.blobs - The blobs to register.
|
|
5215
|
-
* @param params.blobs.blobName - The name/path of the blob (e.g. "foo/bar.txt").
|
|
5216
|
-
* @param params.blobs.blobSize - The size of the blob in bytes.
|
|
5217
|
-
* @param params.blobs.blobMerkleRoot - The merkle root of the blob commitments as a hex string.
|
|
5218
|
-
* @param params.blobs.numChunksets - The total number of chunksets in the blob.
|
|
5219
|
-
*
|
|
5220
|
-
* @returns An Aptos transaction payload data object for the register_multiple_blobs Move function.
|
|
5221
|
-
*
|
|
5222
|
-
* @see https://github.com/shelby/shelby/blob/e08e84742cf2b80ad8bb7227deb3013398076d53/move/shelby_contract/sources/global_metadata.move#L357
|
|
5223
|
-
*/
|
|
5224
|
-
static createBatchRegisterBlobsPayload(params) {
|
|
5225
|
-
const blobNames = [];
|
|
5226
|
-
const blobMerkleRoots = [];
|
|
5227
|
-
const blobNumChunksets = [];
|
|
5228
|
-
const blobSizes = [];
|
|
5229
|
-
params.blobs.forEach((blob) => {
|
|
5230
|
-
blobNames.push(blob.blobName);
|
|
5231
|
-
blobMerkleRoots.push(MoveVector.U8(blob.blobMerkleRoot));
|
|
5232
|
-
blobNumChunksets.push(blob.numChunksets);
|
|
5233
|
-
blobSizes.push(blob.blobSize);
|
|
5234
|
-
});
|
|
5235
|
-
return {
|
|
5236
|
-
function: `${(params.deployer ?? SHELBY_DEPLOYER).toString()}::blob_metadata::register_multiple_blobs`,
|
|
5237
|
-
functionArguments: [
|
|
5238
|
-
blobNames,
|
|
5239
|
-
params.expirationMicros,
|
|
5240
|
-
blobMerkleRoots,
|
|
5241
|
-
blobNumChunksets,
|
|
5242
|
-
blobSizes,
|
|
5243
|
-
// TODO
|
|
5244
|
-
0,
|
|
5245
|
-
0
|
|
5246
|
-
]
|
|
5247
|
-
};
|
|
5248
|
-
}
|
|
5249
|
-
/**
|
|
5250
|
-
* Creates a transaction payload to delete a blob on the blockchain.
|
|
5251
|
-
* This is a static helper method for constructing the Move function call payload.
|
|
5252
|
-
*
|
|
5253
|
-
* @param params.deployer - Optional deployer account address. Defaults to SHELBY_DEPLOYER.
|
|
5254
|
-
* @param params.blobName - The blob name (e.g. "bar.txt", without the account address prefix).
|
|
5255
|
-
*
|
|
5256
|
-
* @returns An Aptos transaction payload data object for the delete_blob Move function.
|
|
5257
|
-
*
|
|
5258
|
-
* @see https://github.com/shelby/shelby/blob/64e9d7b4f0005e586faeb1e4085c79159234b6b6/move/shelby_contract/sources/global_metadata.move#L616
|
|
5259
|
-
*/
|
|
5260
|
-
static createDeleteBlobPayload(params) {
|
|
5261
|
-
return {
|
|
5262
|
-
function: `${(params.deployer ?? SHELBY_DEPLOYER).toString()}::blob_metadata::delete_blob`,
|
|
5263
|
-
functionArguments: [params.blobName]
|
|
5264
|
-
};
|
|
5265
|
-
}
|
|
5266
|
-
/**
|
|
5267
|
-
* Creates a transaction payload to delete multiple blobs on the blockchain.
|
|
5268
|
-
* This is a static helper method for constructing the Move function call payload.
|
|
5269
|
-
*
|
|
5270
|
-
* **Note:** This function requires the `delete_multiple_blobs` entry function
|
|
5271
|
-
* which will be deployed to the smart contract on 2026-02-04. Using this
|
|
5272
|
-
* function before that date will result in a transaction failure.
|
|
5273
|
-
*
|
|
5274
|
-
* This operation is atomic: if any blob deletion fails (e.g., blob not found),
|
|
5275
|
-
* the entire transaction fails and no blobs are deleted.
|
|
5276
|
-
*
|
|
5277
|
-
* @param params.deployer - Optional deployer account address. Defaults to SHELBY_DEPLOYER.
|
|
5278
|
-
* @param params.blobNames - Array of blob name suffixes without the account address prefix
|
|
5279
|
-
* (e.g. ["foo/bar.txt", "baz.txt"], NOT ["0x1/foo/bar.txt"]). The account address
|
|
5280
|
-
* prefix is automatically derived from the transaction sender.
|
|
5281
|
-
*
|
|
5282
|
-
* @returns An Aptos transaction payload data object for the delete_multiple_blobs Move function.
|
|
5283
|
-
*
|
|
5284
|
-
* @see https://github.com/shelby/shelby/blob/main/move/shelby_contract/sources/blob_metadata.move
|
|
5285
|
-
*/
|
|
5286
|
-
static createDeleteMultipleBlobsPayload(params) {
|
|
5287
|
-
return {
|
|
5288
|
-
function: `${(params.deployer ?? SHELBY_DEPLOYER).toString()}::blob_metadata::delete_multiple_blobs`,
|
|
5289
|
-
functionArguments: [params.blobNames]
|
|
5290
|
-
};
|
|
5291
|
-
}
|
|
5292
|
-
static createChunksetAcknowledgementsPayload(params) {
|
|
5293
|
-
const ackBitMask = params.storageProviderAcks.reduce(
|
|
5294
|
-
(acc, ack) => acc | 1 << ack.slot,
|
|
5295
|
-
0
|
|
5296
|
-
);
|
|
5297
|
-
const signatures = new MoveVector(params.storageProviderAcks.map((ack) => MoveVector.U8(ack.signature)));
|
|
5298
|
-
return {
|
|
5299
|
-
function: `${SHELBY_DEPLOYER}::blob_metadata::add_chunkset_acknowledgements`,
|
|
5300
|
-
functionArguments: [
|
|
5301
|
-
createBlobKey({
|
|
5302
|
-
account: params.blobOwner,
|
|
5303
|
-
blobName: params.blobName
|
|
5304
|
-
}),
|
|
5305
|
-
params.chunksetIdx,
|
|
5306
|
-
params.creationMicros,
|
|
5307
|
-
new U32(Number(ackBitMask)),
|
|
5308
|
-
signatures
|
|
5309
|
-
]
|
|
5310
|
-
};
|
|
5311
|
-
}
|
|
5312
|
-
};
|
|
5313
|
-
|
|
5314
|
-
// ../../packages/sdk/dist/chunk-UCDAABAS.mjs
|
|
5315
|
-
import {
|
|
5316
|
-
createDecoder,
|
|
5317
|
-
createEncoder
|
|
5318
|
-
} from "@shelby-protocol/clay-codes";
|
|
5319
|
-
function getTotalChunks(config) {
|
|
5320
|
-
return config.erasure_n;
|
|
5321
|
-
}
|
|
5322
|
-
var ClayErasureCodingProvider = class _ClayErasureCodingProvider {
|
|
5323
|
-
config;
|
|
5324
|
-
encoderCache;
|
|
5325
|
-
decoderCache;
|
|
5326
|
-
lastFunction;
|
|
5327
|
-
constructor(config) {
|
|
5328
|
-
this.config = config;
|
|
5329
|
-
this.lastFunction = "none";
|
|
5330
|
-
}
|
|
5331
|
-
/**
|
|
5332
|
-
* Static factory method to create an initialized ClayErasureCodingProvider
|
|
5333
|
-
*/
|
|
5334
|
-
static async create(options) {
|
|
5335
|
-
const config = buildClayConfig({
|
|
5336
|
-
erasure_n: options?.erasure_n ?? DEFAULT_ERASURE_N,
|
|
5337
|
-
erasure_k: options?.erasure_k ?? DEFAULT_ERASURE_K,
|
|
5338
|
-
erasure_d: options?.erasure_d ?? DEFAULT_ERASURE_D,
|
|
5339
|
-
chunkSizeBytes: options?.chunkSizeBytes ?? DEFAULT_CHUNK_SIZE_BYTES
|
|
5340
|
-
});
|
|
5341
|
-
const provider = new _ClayErasureCodingProvider(config);
|
|
5342
|
-
[provider.encoderCache, provider.decoderCache] = await Promise.all([
|
|
5343
|
-
createEncoder({
|
|
5344
|
-
n: getTotalChunks(config),
|
|
5345
|
-
k: config.erasure_k,
|
|
5346
|
-
d: config.erasure_d,
|
|
5347
|
-
chunkSizeBytes: config.chunkSizeBytes
|
|
5348
|
-
}),
|
|
5349
|
-
createDecoder({
|
|
5350
|
-
n: getTotalChunks(config),
|
|
5351
|
-
k: config.erasure_k,
|
|
5352
|
-
d: config.erasure_d,
|
|
5353
|
-
chunkSizeBytes: config.chunkSizeBytes,
|
|
5354
|
-
erasedChunksMask: 0
|
|
5355
|
-
// No chunks erased initially; will be reconfigured on decode
|
|
5356
|
-
})
|
|
5357
|
-
]);
|
|
5358
|
-
return provider;
|
|
5359
|
-
}
|
|
5360
|
-
encode(data) {
|
|
5361
|
-
const { erasure_k, chunkSizeBytes } = this.config;
|
|
5362
|
-
if (!this.encoderCache) {
|
|
5363
|
-
throw new Error("Encoder cache is missing");
|
|
5364
|
-
}
|
|
5365
|
-
const systematicCapacity = erasure_k * chunkSizeBytes;
|
|
5366
|
-
if (data.byteLength > systematicCapacity) {
|
|
5367
|
-
throw new Error(
|
|
5368
|
-
`Data size ${data.byteLength} bytes exceeds systematic capacity ${systematicCapacity} bytes (erasure_k=${erasure_k} * chunkSizeBytes=${chunkSizeBytes}).`
|
|
5369
|
-
);
|
|
5370
|
-
}
|
|
5371
|
-
if (this.requiresPadding(data.length)) {
|
|
5372
|
-
throw new Error(
|
|
5373
|
-
`Data size ${data.length} bytes does not match systematic capacity ${systematicCapacity} bytes (erasure_k=${erasure_k} * chunkSizeBytes=${chunkSizeBytes}). Provide pre-padded data before encoding.`
|
|
5374
|
-
);
|
|
5375
|
-
}
|
|
5376
|
-
this.lastFunction = "encoded";
|
|
5377
|
-
return this.encoderCache.erasureCode(data);
|
|
5378
|
-
}
|
|
5379
|
-
decode(available, config) {
|
|
5380
|
-
if (!this.decoderCache) {
|
|
5381
|
-
throw new Error("Decoder cache is missing");
|
|
5382
|
-
}
|
|
5383
|
-
const { erasure_k, chunkSizeBytes } = this.config;
|
|
5384
|
-
if (available.length < erasure_k) {
|
|
5385
|
-
throw new Error(
|
|
5386
|
-
`Insufficient available chunks: need at least ${erasure_k}, received ${available.length}.`
|
|
5387
|
-
);
|
|
5388
|
-
}
|
|
5389
|
-
for (let i = 0; i < available.length; i++) {
|
|
5390
|
-
const chunk = available[i];
|
|
5391
|
-
if (chunk.length !== chunkSizeBytes) {
|
|
5392
|
-
throw new Error(
|
|
5393
|
-
`Chunk ${i} size ${chunk.length} bytes does not match expected chunkSizeBytes ${chunkSizeBytes}.`
|
|
5394
|
-
);
|
|
5395
|
-
}
|
|
5396
|
-
}
|
|
5397
|
-
this.lastFunction = "decoded";
|
|
5398
|
-
return this.decoderCache.decode(available, config);
|
|
5399
|
-
}
|
|
5400
|
-
getChunkMerkleRoots() {
|
|
5401
|
-
if (this.decoderCache && this.lastFunction === "decoded")
|
|
5402
|
-
return this.decoderCache.getChunkMerkleRoots();
|
|
5403
|
-
if (this.encoderCache && this.lastFunction === "encoded")
|
|
5404
|
-
return this.encoderCache.getChunkMerkleRoots();
|
|
5405
|
-
throw new Error(
|
|
5406
|
-
"You must call encode or decode before calling getChunkMerkleRoots"
|
|
5407
|
-
);
|
|
5408
|
-
}
|
|
5409
|
-
/**
|
|
5410
|
-
* Determines if data can be erasure coded as-is or requires padding.
|
|
5411
|
-
*
|
|
5412
|
-
* Data can be erasure coded without padding if its size exactly matches
|
|
5413
|
-
* the total systematic data capacity (k * chunkSizeBytes).
|
|
5414
|
-
*
|
|
5415
|
-
* @param dataSize - Size of the data in bytes
|
|
5416
|
-
* @returns true if data needs padding, false if it can be coded as-is
|
|
5417
|
-
*/
|
|
5418
|
-
requiresPadding(dataSize) {
|
|
5419
|
-
const { erasure_k, chunkSizeBytes } = this.config;
|
|
5420
|
-
const systematicCapacity = erasure_k * chunkSizeBytes;
|
|
5421
|
-
return dataSize !== systematicCapacity;
|
|
5422
|
-
}
|
|
5423
|
-
};
|
|
5424
|
-
function buildClayConfig(input) {
|
|
5425
|
-
const { erasure_n, erasure_k, erasure_d, chunkSizeBytes } = input;
|
|
5426
|
-
if (erasure_n <= 0)
|
|
5427
|
-
throw new Error("erasure_n (total number of chunks) must be > 0");
|
|
5428
|
-
if (erasure_k <= 0)
|
|
5429
|
-
throw new Error("erasure_k (number of data chunks) must be > 0");
|
|
5430
|
-
if (erasure_k >= erasure_n)
|
|
5431
|
-
throw new Error(
|
|
5432
|
-
`erasure_k (${erasure_k}) must be < erasure_n (${erasure_n})`
|
|
5433
|
-
);
|
|
5434
|
-
if (erasure_d <= erasure_k)
|
|
5435
|
-
throw new Error(
|
|
5436
|
-
`erasure_d (${erasure_d}) must be > erasure_k (${erasure_k})`
|
|
5437
|
-
);
|
|
5438
|
-
if (erasure_d >= erasure_n)
|
|
5439
|
-
throw new Error(
|
|
5440
|
-
`erasure_d (${erasure_d}) must be < erasure_n (${erasure_n})`
|
|
5441
|
-
);
|
|
5442
|
-
if (chunkSizeBytes <= 0) throw new Error("chunkSizeBytes must be > 0");
|
|
5443
|
-
return {
|
|
5444
|
-
erasure_n,
|
|
5445
|
-
erasure_k,
|
|
5446
|
-
erasure_d,
|
|
5447
|
-
chunkSizeBytes
|
|
5448
|
-
};
|
|
5449
|
-
}
|
|
5450
|
-
|
|
5451
|
-
// ../../packages/sdk/dist/chunk-4MG4XGY4.mjs
|
|
5452
|
-
import {
|
|
5453
|
-
AccountAuthenticator,
|
|
5454
|
-
Deserializer,
|
|
5455
|
-
Hex as Hex5,
|
|
5456
|
-
MultiAgentTransaction,
|
|
5457
|
-
Serializer
|
|
5458
|
-
} from "@aptos-labs/ts-sdk";
|
|
5459
|
-
var StaleChannelStateError = class _StaleChannelStateError extends Error {
|
|
5460
|
-
/**
|
|
5461
|
-
* The last valid micropayment stored by the server.
|
|
5462
|
-
* Clients can use this to reset their local channel state.
|
|
5463
|
-
*/
|
|
5464
|
-
storedMicropayment;
|
|
5465
|
-
constructor(storedMicropayment, message) {
|
|
5466
|
-
super(
|
|
5467
|
-
message ?? "Client has stale channel state. Use the returned micropayment to reset local state."
|
|
5468
|
-
);
|
|
5469
|
-
this.name = "StaleChannelStateError";
|
|
5470
|
-
this.storedMicropayment = storedMicropayment;
|
|
5471
|
-
}
|
|
5472
|
-
/**
|
|
5473
|
-
* Returns the stored micropayment as a base64-encoded string.
|
|
5474
|
-
*/
|
|
5475
|
-
toBase64() {
|
|
5476
|
-
const bytes = this.storedMicropayment.bcsToBytes();
|
|
5477
|
-
const binaryString = Array.from(
|
|
5478
|
-
bytes,
|
|
5479
|
-
(byte) => String.fromCharCode(byte)
|
|
5480
|
-
).join("");
|
|
5481
|
-
return btoa(binaryString);
|
|
5482
|
-
}
|
|
5483
|
-
/**
|
|
5484
|
-
* Creates a StaleChannelStateError from a base64-encoded micropayment string.
|
|
5485
|
-
*/
|
|
5486
|
-
static fromBase64(base64, message) {
|
|
5487
|
-
const binaryString = atob(base64);
|
|
5488
|
-
const bytes = Uint8Array.from(binaryString, (char) => char.charCodeAt(0));
|
|
5489
|
-
const micropayment = SenderBuiltMicropayment.deserialize(bytes);
|
|
5490
|
-
return new _StaleChannelStateError(micropayment, message);
|
|
5491
|
-
}
|
|
5492
|
-
};
|
|
5493
|
-
var SenderBuiltMicropayment = class _SenderBuiltMicropayment {
|
|
5494
|
-
/**
|
|
5495
|
-
* The actual micropayment transaction. It is built with the receiver address as fee payer and also requires a signature from the receiver to submit.
|
|
5496
|
-
*/
|
|
5497
|
-
micropayment;
|
|
5498
|
-
/**
|
|
5499
|
-
* The sender's signature.
|
|
5500
|
-
*/
|
|
5501
|
-
senderSignature;
|
|
5502
|
-
constructor(micropayment, senderSignature) {
|
|
5503
|
-
this.micropayment = micropayment;
|
|
5504
|
-
this.senderSignature = senderSignature;
|
|
5505
|
-
}
|
|
5506
|
-
serialize(serializer) {
|
|
5507
|
-
this.micropayment.serialize(serializer);
|
|
5508
|
-
this.senderSignature.serialize(serializer);
|
|
5509
|
-
}
|
|
5510
|
-
bcsToBytes() {
|
|
5511
|
-
const serializer = new Serializer();
|
|
5512
|
-
this.serialize(serializer);
|
|
5513
|
-
return serializer.toUint8Array();
|
|
5514
|
-
}
|
|
5515
|
-
bcsToHex() {
|
|
5516
|
-
return Hex5.fromHexInput(this.bcsToBytes());
|
|
5517
|
-
}
|
|
5518
|
-
toStringWithoutPrefix() {
|
|
5519
|
-
return this.bcsToHex().toStringWithoutPrefix();
|
|
5520
|
-
}
|
|
5521
|
-
toString() {
|
|
5522
|
-
return this.bcsToHex().toString();
|
|
5523
|
-
}
|
|
5524
|
-
/**
|
|
5525
|
-
* Deserializes a SenderBuiltMicropayment from BCS bytes.
|
|
5526
|
-
* @param bytes - The bytes to deserialize from (Uint8Array or hex string).
|
|
5527
|
-
* @returns A new SenderBuiltMicropayment instance.
|
|
5528
|
-
*/
|
|
5529
|
-
static deserialize(bytes) {
|
|
5530
|
-
const bytesArray = typeof bytes === "string" ? Hex5.fromHexInput(bytes).toUint8Array() : bytes;
|
|
5531
|
-
const deserializer = new Deserializer(bytesArray);
|
|
5532
|
-
const micropayment = MultiAgentTransaction.deserialize(deserializer);
|
|
5533
|
-
const senderSignature = AccountAuthenticator.deserialize(deserializer);
|
|
5534
|
-
return new _SenderBuiltMicropayment(micropayment, senderSignature);
|
|
5535
|
-
}
|
|
5536
|
-
};
|
|
5537
|
-
|
|
5538
|
-
// ../../packages/sdk/dist/chunk-W5NRGZEP.mjs
|
|
5539
|
-
import { AccountAddress as AccountAddress4 } from "@aptos-labs/ts-sdk";
|
|
5540
|
-
import { z as z2 } from "zod";
|
|
5541
|
-
var BlobNameSchema = z2.string().min(1, "Blob name path parameter cannot be empty.").max(1024, "Blob name cannot exceed 1024 characters.").refine((name2) => !name2.endsWith("/"), {
|
|
5542
|
-
message: "Blob name cannot end with a slash"
|
|
5543
|
-
});
|
|
5544
|
-
|
|
5545
|
-
// ../../packages/sdk/dist/chunk-I6NG5GNL.mjs
|
|
5546
|
-
function sleep(ms) {
|
|
5547
|
-
return new Promise((resolve3) => setTimeout(resolve3, ms));
|
|
5548
|
-
}
|
|
5549
|
-
|
|
5550
|
-
// ../../packages/sdk/dist/chunk-KLGT3RN5.mjs
|
|
5551
|
-
import { AccountAddress as AccountAddress5 } from "@aptos-labs/ts-sdk";
|
|
5552
|
-
var MICROPAYMENT_HEADER = "X-Shelby-Micropayment";
|
|
5553
|
-
function encodeURIComponentKeepSlashes(str) {
|
|
5554
|
-
return encodeURIComponent(str).replace(/%2F/g, "/");
|
|
5555
|
-
}
|
|
5556
|
-
var ShelbyRPCClient = class {
|
|
5557
|
-
baseUrl;
|
|
5558
|
-
apiKey;
|
|
5559
|
-
rpcConfig;
|
|
5560
|
-
indexer;
|
|
5561
|
-
/**
|
|
5562
|
-
* Creates a new ShelbyRPCClient for interacting with Shelby RPC nodes.
|
|
5563
|
-
* This client handles blob storage operations including upload and download.
|
|
5564
|
-
*
|
|
5565
|
-
* @param config - The client configuration object.
|
|
5566
|
-
* @param config.network - The Shelby network to use.
|
|
5567
|
-
*
|
|
5568
|
-
* @example
|
|
5569
|
-
* ```typescript
|
|
5570
|
-
* const client = new ShelbyRPCClient({
|
|
5571
|
-
* network: Network.SHELBYNET,
|
|
5572
|
-
* apiKey: "AG-***",
|
|
5573
|
-
* });
|
|
5574
|
-
* ```
|
|
5575
|
-
*/
|
|
5576
|
-
constructor(config) {
|
|
5577
|
-
this.baseUrl = config.rpc?.baseUrl ?? NetworkToShelbyRPCBaseUrl.shelbynet;
|
|
5578
|
-
this.apiKey = config.apiKey ?? config.rpc?.apiKey;
|
|
5579
|
-
this.rpcConfig = config.rpc ?? {};
|
|
5580
|
-
this.indexer = getShelbyIndexerClient(config);
|
|
5581
|
-
}
|
|
5582
|
-
async #uploadPart(uploadId, partIdx, partData) {
|
|
5583
|
-
const nRetries = 5;
|
|
5584
|
-
for (let i = 0; i < nRetries; ++i) {
|
|
5585
|
-
const partResponse = await fetch(
|
|
5586
|
-
buildRequestUrl(
|
|
5587
|
-
`/v1/multipart-uploads/${uploadId}/parts/${partIdx}`,
|
|
5588
|
-
this.baseUrl
|
|
5589
|
-
),
|
|
5590
|
-
{
|
|
5591
|
-
method: "PUT",
|
|
5592
|
-
headers: {
|
|
5593
|
-
"Content-Type": "application/octet-stream",
|
|
5594
|
-
...this.apiKey ? { Authorization: `Bearer ${this.apiKey}` } : {}
|
|
5595
|
-
},
|
|
5596
|
-
body: partData
|
|
5597
|
-
}
|
|
5598
|
-
);
|
|
5599
|
-
if (partResponse.ok) return;
|
|
5600
|
-
if (i < nRetries - 1) {
|
|
5601
|
-
const delay = 2 ** i * 100;
|
|
5602
|
-
await sleep(delay);
|
|
5603
|
-
}
|
|
5604
|
-
}
|
|
5605
|
-
throw new Error(`Failed to upload part ${partIdx}.`);
|
|
5606
|
-
}
|
|
5607
|
-
async #putBlobMultipart(account, blobName, blobData, partSize = 5 * 1024 * 1024, onProgress) {
|
|
5608
|
-
const startResponse = await fetch(
|
|
5609
|
-
buildRequestUrl("/v1/multipart-uploads", this.baseUrl),
|
|
5610
|
-
{
|
|
5611
|
-
method: "POST",
|
|
5612
|
-
headers: {
|
|
5613
|
-
"Content-Type": "application/json",
|
|
5614
|
-
...this.apiKey ? { Authorization: `Bearer ${this.apiKey}` } : {}
|
|
5615
|
-
},
|
|
5616
|
-
body: JSON.stringify({
|
|
5617
|
-
rawAccount: account.toString(),
|
|
5618
|
-
rawBlobName: blobName,
|
|
5619
|
-
rawPartSize: partSize
|
|
5620
|
-
})
|
|
5621
|
-
}
|
|
5622
|
-
);
|
|
5623
|
-
if (!startResponse.ok) {
|
|
5624
|
-
let errorBodyText = "Could not read error body";
|
|
5625
|
-
try {
|
|
5626
|
-
errorBodyText = await startResponse.text();
|
|
5627
|
-
} catch (_e) {
|
|
5628
|
-
}
|
|
5629
|
-
throw new Error(
|
|
5630
|
-
`Failed to start multipart upload! status: ${startResponse.status}, body: ${errorBodyText}`
|
|
5631
|
-
);
|
|
5632
|
-
}
|
|
5633
|
-
const { uploadId } = await startResponse.json();
|
|
5634
|
-
const totalParts = Math.ceil(blobData.length / partSize);
|
|
5635
|
-
for (let partIdx = 0; partIdx < totalParts; partIdx++) {
|
|
5636
|
-
const start = partIdx * partSize;
|
|
5637
|
-
const end = Math.min(start + partSize, blobData.length);
|
|
5638
|
-
const partData = blobData.slice(start, end);
|
|
5639
|
-
await this.#uploadPart(uploadId, partIdx, partData);
|
|
5640
|
-
onProgress?.({
|
|
5641
|
-
partIdx,
|
|
5642
|
-
totalParts,
|
|
5643
|
-
partBytes: partData.length,
|
|
5644
|
-
uploadedBytes: end,
|
|
5645
|
-
totalBytes: blobData.length
|
|
5646
|
-
});
|
|
5647
|
-
}
|
|
5648
|
-
const completeResponse = await fetch(
|
|
5649
|
-
buildRequestUrl(
|
|
5650
|
-
`/v1/multipart-uploads/${uploadId}/complete`,
|
|
5651
|
-
this.baseUrl
|
|
5652
|
-
),
|
|
5653
|
-
{
|
|
5654
|
-
method: "POST",
|
|
5655
|
-
headers: {
|
|
5656
|
-
"Content-Type": "application/json",
|
|
5657
|
-
...this.apiKey ? { Authorization: `Bearer ${this.apiKey}` } : {}
|
|
5658
|
-
}
|
|
5659
|
-
}
|
|
5660
|
-
);
|
|
5661
|
-
if (!completeResponse.ok) {
|
|
5662
|
-
let errorBodyText = "Could not read error body";
|
|
5663
|
-
try {
|
|
5664
|
-
errorBodyText = await completeResponse.text();
|
|
5665
|
-
} catch (_e) {
|
|
5666
|
-
}
|
|
5667
|
-
throw new Error(
|
|
5668
|
-
`Failed to complete multipart upload! status: ${completeResponse.status}, body: ${errorBodyText}`
|
|
5669
|
-
);
|
|
5670
|
-
}
|
|
5517
|
+
*
|
|
5518
|
+
* @example
|
|
5519
|
+
* ```typescript
|
|
5520
|
+
* const { transaction } = await client.addChunksetAcknowledgements({
|
|
5521
|
+
* account: signer,
|
|
5522
|
+
* blobOwner: owner,
|
|
5523
|
+
* blobName: "foo/bar.txt",
|
|
5524
|
+
* creationMicros, // Taken from the blob metadata at registration time.
|
|
5525
|
+
* chunksetIdx,
|
|
5526
|
+
* storageProviderAcks: An array of StorageProviderAck types, each having the slot index and signature from the SP.
|
|
5527
|
+
* });
|
|
5528
|
+
* ```
|
|
5529
|
+
*/
|
|
5530
|
+
async addChunksetAcknowledgements(params) {
|
|
5531
|
+
const transaction = await this.aptos.transaction.build.simple({
|
|
5532
|
+
...params.options?.build,
|
|
5533
|
+
data: _ShelbyBlobClient.createChunksetAcknowledgementsPayload({
|
|
5534
|
+
blobOwner: params.blobOwner,
|
|
5535
|
+
blobName: params.blobName,
|
|
5536
|
+
creationMicros: params.creationMicros,
|
|
5537
|
+
chunksetIdx: params.chunksetIdx,
|
|
5538
|
+
storageProviderAcks: params.storageProviderAcks
|
|
5539
|
+
}),
|
|
5540
|
+
sender: params.account.accountAddress
|
|
5541
|
+
});
|
|
5542
|
+
return {
|
|
5543
|
+
transaction: await this.aptos.signAndSubmitTransaction({
|
|
5544
|
+
signer: params.account,
|
|
5545
|
+
transaction
|
|
5546
|
+
})
|
|
5547
|
+
};
|
|
5671
5548
|
}
|
|
5672
5549
|
/**
|
|
5673
|
-
*
|
|
5674
|
-
* This method should be called after blob commitments have been registered on the blockchain.
|
|
5675
|
-
* Uses multipart upload for efficient handling of large files.
|
|
5550
|
+
* Registers multiple blobs on the blockchain by writing their merkle roots and metadata.
|
|
5676
5551
|
*
|
|
5677
|
-
* @param params.account - The account that
|
|
5678
|
-
* @param params.
|
|
5679
|
-
* @param params.
|
|
5552
|
+
* @param params.account - The account that is signing and paying for the transaction.
|
|
5553
|
+
* @param params.expirationMicros - The expiration time of the blobs in microseconds.
|
|
5554
|
+
* @param params.blobs - The blobs to register.
|
|
5555
|
+
* @param params.blobs.blobName - The name/path of the blob (e.g. "foo/bar.txt").
|
|
5556
|
+
* @param params.blobs.blobSize - The size of the blob in bytes.
|
|
5557
|
+
* @param params.blobs.blobMerkleRoot - The merkle root of the blob commitments as a hex string.
|
|
5558
|
+
* @param params.options - Optional transaction building options.
|
|
5559
|
+
* @param params.options.chunksetSizeBytes - Custom chunkset size (defaults to DEFAULT_CHUNKSET_SIZE_BYTES).
|
|
5560
|
+
* @param params.options.build - Additional Aptos transaction building options.
|
|
5561
|
+
*
|
|
5562
|
+
* @returns An object containing the pending transaction.
|
|
5680
5563
|
*
|
|
5681
5564
|
* @example
|
|
5682
5565
|
* ```typescript
|
|
5683
|
-
* const
|
|
5566
|
+
* const provider = await ClayErasureCodingProvider.create();
|
|
5567
|
+
* const blobCommitments = await generateCommitments(provider, data);
|
|
5684
5568
|
*
|
|
5685
|
-
* await client.
|
|
5686
|
-
* account:
|
|
5687
|
-
*
|
|
5688
|
-
*
|
|
5569
|
+
* const { transaction } = await client.batchRegisterBlobs({
|
|
5570
|
+
* account: signer,
|
|
5571
|
+
* expirationMicros: Date.now() * 1000 + 3600_000_000, // 1 hour from now in microseconds
|
|
5572
|
+
* blobs: [
|
|
5573
|
+
* {
|
|
5574
|
+
* blobName: "foo/bar.txt",
|
|
5575
|
+
* blobSize: data.length,
|
|
5576
|
+
* blobMerkleRoot: blobCommitments.blob_merkle_root,
|
|
5577
|
+
* },
|
|
5578
|
+
* ],
|
|
5689
5579
|
* });
|
|
5690
5580
|
* ```
|
|
5691
5581
|
*/
|
|
5692
|
-
async
|
|
5693
|
-
|
|
5694
|
-
await this
|
|
5695
|
-
params.
|
|
5696
|
-
params.
|
|
5697
|
-
|
|
5698
|
-
|
|
5699
|
-
|
|
5700
|
-
|
|
5582
|
+
async batchRegisterBlobs(params) {
|
|
5583
|
+
const chunksetSize = params.options?.chunksetSizeBytes ?? DEFAULT_CHUNKSET_SIZE_BYTES;
|
|
5584
|
+
const transaction = await this.aptos.transaction.build.simple({
|
|
5585
|
+
...params.options?.build,
|
|
5586
|
+
sender: params.account.accountAddress,
|
|
5587
|
+
data: _ShelbyBlobClient.createBatchRegisterBlobsPayload({
|
|
5588
|
+
deployer: this.deployer,
|
|
5589
|
+
account: params.account.accountAddress,
|
|
5590
|
+
expirationMicros: params.expirationMicros,
|
|
5591
|
+
blobs: params.blobs.map((blob) => ({
|
|
5592
|
+
blobName: blob.blobName,
|
|
5593
|
+
blobSize: blob.blobSize,
|
|
5594
|
+
blobMerkleRoot: blob.blobMerkleRoot,
|
|
5595
|
+
numChunksets: expectedTotalChunksets(blob.blobSize, chunksetSize)
|
|
5596
|
+
}))
|
|
5597
|
+
})
|
|
5598
|
+
});
|
|
5599
|
+
return {
|
|
5600
|
+
transaction: await this.aptos.signAndSubmitTransaction({
|
|
5601
|
+
signer: params.account,
|
|
5602
|
+
transaction
|
|
5603
|
+
})
|
|
5604
|
+
};
|
|
5701
5605
|
}
|
|
5702
|
-
// FIXME make this possible to stream in put ^^^
|
|
5703
5606
|
/**
|
|
5704
|
-
*
|
|
5705
|
-
*
|
|
5607
|
+
* Creates a transaction payload to register a blob on the blockchain.
|
|
5608
|
+
* This is a static helper method for constructing the Move function call payload.
|
|
5706
5609
|
*
|
|
5707
|
-
* @param params.
|
|
5708
|
-
* @param params.
|
|
5709
|
-
* @param params.
|
|
5710
|
-
* @param params.
|
|
5711
|
-
* @param params.
|
|
5712
|
-
* @param params.
|
|
5610
|
+
* @param params.deployer - Optional deployer account address. Defaults to SHELBY_DEPLOYER.
|
|
5611
|
+
* @param params.account - The account that will own the blob.
|
|
5612
|
+
* @param params.blobName - The name/path of the blob (e.g. "foo/bar.txt").
|
|
5613
|
+
* @param params.blobSize - The size of the blob in bytes.
|
|
5614
|
+
* @param params.blobMerkleRoot - The merkle root of the blob commitments as a hex string.
|
|
5615
|
+
* @param params.expirationMicros - The expiration time of the blob in microseconds.
|
|
5616
|
+
* @param params.numChunksets - The total number of chunksets in the blob.
|
|
5713
5617
|
*
|
|
5714
|
-
* @returns
|
|
5618
|
+
* @returns An Aptos transaction payload data object for the register_blob Move function.
|
|
5715
5619
|
*
|
|
5716
|
-
* @
|
|
5717
|
-
|
|
5620
|
+
* @see https://github.com/shelby/shelby/blob/e08e84742cf2b80ad8bb7227deb3013398076d53/move/shelby_contract/sources/global_metadata.move#L357
|
|
5621
|
+
*/
|
|
5622
|
+
static createRegisterBlobPayload(params) {
|
|
5623
|
+
return {
|
|
5624
|
+
function: `${(params.deployer ?? SHELBY_DEPLOYER).toString()}::blob_metadata::register_blob`,
|
|
5625
|
+
functionArguments: [
|
|
5626
|
+
params.blobName,
|
|
5627
|
+
params.expirationMicros,
|
|
5628
|
+
MoveVector.U8(params.blobMerkleRoot),
|
|
5629
|
+
params.numChunksets,
|
|
5630
|
+
params.blobSize,
|
|
5631
|
+
// TODO
|
|
5632
|
+
0,
|
|
5633
|
+
// payment tier
|
|
5634
|
+
0
|
|
5635
|
+
// encoding
|
|
5636
|
+
]
|
|
5637
|
+
};
|
|
5638
|
+
}
|
|
5639
|
+
/**
|
|
5640
|
+
* Creates a transaction payload to register multiple blobs on the blockchain.
|
|
5641
|
+
* This is a static helper method for constructing the Move function call payload.
|
|
5718
5642
|
*
|
|
5719
|
-
* @
|
|
5720
|
-
*
|
|
5721
|
-
*
|
|
5722
|
-
*
|
|
5723
|
-
*
|
|
5724
|
-
*
|
|
5725
|
-
*
|
|
5643
|
+
* @param params.deployer - Optional deployer account address. Defaults to SHELBY_DEPLOYER.
|
|
5644
|
+
* @param params.account - The account that will own the blobs.
|
|
5645
|
+
* @param params.expirationMicros - The expiration time of the blobs in microseconds.
|
|
5646
|
+
* @param params.blobs - The blobs to register.
|
|
5647
|
+
* @param params.blobs.blobName - The name/path of the blob (e.g. "foo/bar.txt").
|
|
5648
|
+
* @param params.blobs.blobSize - The size of the blob in bytes.
|
|
5649
|
+
* @param params.blobs.blobMerkleRoot - The merkle root of the blob commitments as a hex string.
|
|
5650
|
+
* @param params.blobs.numChunksets - The total number of chunksets in the blob.
|
|
5726
5651
|
*
|
|
5727
|
-
*
|
|
5728
|
-
* const partial = await client.getBlob({
|
|
5729
|
-
* account: AccountAddress.from("0x1"),
|
|
5730
|
-
* blobName: "large-file.bin",
|
|
5731
|
-
* range: { start: 100, end: 199 }
|
|
5732
|
-
* });
|
|
5652
|
+
* @returns An Aptos transaction payload data object for the register_multiple_blobs Move function.
|
|
5733
5653
|
*
|
|
5734
|
-
*
|
|
5735
|
-
|
|
5736
|
-
|
|
5737
|
-
|
|
5738
|
-
|
|
5739
|
-
|
|
5740
|
-
|
|
5654
|
+
* @see https://github.com/shelby/shelby/blob/e08e84742cf2b80ad8bb7227deb3013398076d53/move/shelby_contract/sources/global_metadata.move#L357
|
|
5655
|
+
*/
|
|
5656
|
+
static createBatchRegisterBlobsPayload(params) {
|
|
5657
|
+
const blobNames = [];
|
|
5658
|
+
const blobMerkleRoots = [];
|
|
5659
|
+
const blobNumChunksets = [];
|
|
5660
|
+
const blobSizes = [];
|
|
5661
|
+
params.blobs.forEach((blob) => {
|
|
5662
|
+
blobNames.push(blob.blobName);
|
|
5663
|
+
blobMerkleRoots.push(MoveVector.U8(blob.blobMerkleRoot));
|
|
5664
|
+
blobNumChunksets.push(blob.numChunksets);
|
|
5665
|
+
blobSizes.push(blob.blobSize);
|
|
5666
|
+
});
|
|
5667
|
+
return {
|
|
5668
|
+
function: `${(params.deployer ?? SHELBY_DEPLOYER).toString()}::blob_metadata::register_multiple_blobs`,
|
|
5669
|
+
functionArguments: [
|
|
5670
|
+
blobNames,
|
|
5671
|
+
params.expirationMicros,
|
|
5672
|
+
blobMerkleRoots,
|
|
5673
|
+
blobNumChunksets,
|
|
5674
|
+
blobSizes,
|
|
5675
|
+
// TODO
|
|
5676
|
+
0,
|
|
5677
|
+
0
|
|
5678
|
+
]
|
|
5679
|
+
};
|
|
5680
|
+
}
|
|
5681
|
+
/**
|
|
5682
|
+
* Creates a transaction payload to delete a blob on the blockchain.
|
|
5683
|
+
* This is a static helper method for constructing the Move function call payload.
|
|
5684
|
+
*
|
|
5685
|
+
* @param params.deployer - Optional deployer account address. Defaults to SHELBY_DEPLOYER.
|
|
5686
|
+
* @param params.blobName - The blob name (e.g. "bar.txt", without the account address prefix).
|
|
5687
|
+
*
|
|
5688
|
+
* @returns An Aptos transaction payload data object for the delete_blob Move function.
|
|
5689
|
+
*
|
|
5690
|
+
* @see https://github.com/shelby/shelby/blob/64e9d7b4f0005e586faeb1e4085c79159234b6b6/move/shelby_contract/sources/global_metadata.move#L616
|
|
5691
|
+
*/
|
|
5692
|
+
static createDeleteBlobPayload(params) {
|
|
5693
|
+
return {
|
|
5694
|
+
function: `${(params.deployer ?? SHELBY_DEPLOYER).toString()}::blob_metadata::delete_blob`,
|
|
5695
|
+
functionArguments: [params.blobName]
|
|
5696
|
+
};
|
|
5697
|
+
}
|
|
5698
|
+
/**
|
|
5699
|
+
* Creates a transaction payload to delete multiple blobs on the blockchain.
|
|
5700
|
+
* This is a static helper method for constructing the Move function call payload.
|
|
5701
|
+
*
|
|
5702
|
+
* **Note:** This function requires the `delete_multiple_blobs` entry function
|
|
5703
|
+
* which will be deployed to the smart contract on 2026-02-04. Using this
|
|
5704
|
+
* function before that date will result in a transaction failure.
|
|
5705
|
+
*
|
|
5706
|
+
* This operation is atomic: if any blob deletion fails (e.g., blob not found),
|
|
5707
|
+
* the entire transaction fails and no blobs are deleted.
|
|
5708
|
+
*
|
|
5709
|
+
* @param params.deployer - Optional deployer account address. Defaults to SHELBY_DEPLOYER.
|
|
5710
|
+
* @param params.blobNames - Array of blob name suffixes without the account address prefix
|
|
5711
|
+
* (e.g. ["foo/bar.txt", "baz.txt"], NOT ["0x1/foo/bar.txt"]). The account address
|
|
5712
|
+
* prefix is automatically derived from the transaction sender.
|
|
5713
|
+
*
|
|
5714
|
+
* @returns An Aptos transaction payload data object for the delete_multiple_blobs Move function.
|
|
5715
|
+
*
|
|
5716
|
+
* @see https://github.com/shelby/shelby/blob/main/move/shelby_contract/sources/blob_metadata.move
|
|
5717
|
+
*/
|
|
5718
|
+
static createDeleteMultipleBlobsPayload(params) {
|
|
5719
|
+
return {
|
|
5720
|
+
function: `${(params.deployer ?? SHELBY_DEPLOYER).toString()}::blob_metadata::delete_multiple_blobs`,
|
|
5721
|
+
functionArguments: [params.blobNames]
|
|
5722
|
+
};
|
|
5723
|
+
}
|
|
5724
|
+
static createChunksetAcknowledgementsPayload(params) {
|
|
5725
|
+
const ackBitMask = params.storageProviderAcks.reduce(
|
|
5726
|
+
(acc, ack) => acc | 1 << ack.slot,
|
|
5727
|
+
0
|
|
5728
|
+
);
|
|
5729
|
+
const signatures = new MoveVector(params.storageProviderAcks.map((ack) => MoveVector.U8(ack.signature)));
|
|
5730
|
+
return {
|
|
5731
|
+
function: `${SHELBY_DEPLOYER}::blob_metadata::add_chunkset_acknowledgements`,
|
|
5732
|
+
functionArguments: [
|
|
5733
|
+
createBlobKey({
|
|
5734
|
+
account: params.blobOwner,
|
|
5735
|
+
blobName: params.blobName
|
|
5736
|
+
}),
|
|
5737
|
+
params.chunksetIdx,
|
|
5738
|
+
params.creationMicros,
|
|
5739
|
+
new U32(Number(ackBitMask)),
|
|
5740
|
+
signatures
|
|
5741
|
+
]
|
|
5742
|
+
};
|
|
5743
|
+
}
|
|
5744
|
+
};
|
|
5745
|
+
|
|
5746
|
+
// ../../packages/sdk/dist/chunk-UCDAABAS.mjs
|
|
5747
|
+
import {
|
|
5748
|
+
createDecoder,
|
|
5749
|
+
createEncoder
|
|
5750
|
+
} from "@shelby-protocol/clay-codes";
|
|
5751
|
+
function getTotalChunks(config) {
|
|
5752
|
+
return config.erasure_n;
|
|
5753
|
+
}
|
|
5754
|
+
var ClayErasureCodingProvider = class _ClayErasureCodingProvider {
|
|
5755
|
+
config;
|
|
5756
|
+
encoderCache;
|
|
5757
|
+
decoderCache;
|
|
5758
|
+
lastFunction;
|
|
5759
|
+
constructor(config) {
|
|
5760
|
+
this.config = config;
|
|
5761
|
+
this.lastFunction = "none";
|
|
5762
|
+
}
|
|
5763
|
+
/**
|
|
5764
|
+
* Static factory method to create an initialized ClayErasureCodingProvider
|
|
5741
5765
|
*/
|
|
5742
|
-
async
|
|
5743
|
-
|
|
5744
|
-
|
|
5745
|
-
|
|
5746
|
-
|
|
5747
|
-
|
|
5748
|
-
|
|
5749
|
-
);
|
|
5750
|
-
|
|
5751
|
-
|
|
5752
|
-
|
|
5753
|
-
|
|
5754
|
-
|
|
5755
|
-
|
|
5756
|
-
|
|
5757
|
-
|
|
5758
|
-
|
|
5759
|
-
|
|
5760
|
-
|
|
5761
|
-
|
|
5762
|
-
|
|
5763
|
-
|
|
5764
|
-
|
|
5765
|
-
|
|
5766
|
-
|
|
5767
|
-
|
|
5768
|
-
|
|
5769
|
-
|
|
5770
|
-
|
|
5771
|
-
|
|
5766
|
+
static async create(options) {
|
|
5767
|
+
const config = buildClayConfig({
|
|
5768
|
+
erasure_n: options?.erasure_n ?? DEFAULT_ERASURE_N,
|
|
5769
|
+
erasure_k: options?.erasure_k ?? DEFAULT_ERASURE_K,
|
|
5770
|
+
erasure_d: options?.erasure_d ?? DEFAULT_ERASURE_D,
|
|
5771
|
+
chunkSizeBytes: options?.chunkSizeBytes ?? DEFAULT_CHUNK_SIZE_BYTES
|
|
5772
|
+
});
|
|
5773
|
+
const provider = new _ClayErasureCodingProvider(config);
|
|
5774
|
+
[provider.encoderCache, provider.decoderCache] = await Promise.all([
|
|
5775
|
+
createEncoder({
|
|
5776
|
+
n: getTotalChunks(config),
|
|
5777
|
+
k: config.erasure_k,
|
|
5778
|
+
d: config.erasure_d,
|
|
5779
|
+
chunkSizeBytes: config.chunkSizeBytes
|
|
5780
|
+
}),
|
|
5781
|
+
createDecoder({
|
|
5782
|
+
n: getTotalChunks(config),
|
|
5783
|
+
k: config.erasure_k,
|
|
5784
|
+
d: config.erasure_d,
|
|
5785
|
+
chunkSizeBytes: config.chunkSizeBytes,
|
|
5786
|
+
erasedChunksMask: 0
|
|
5787
|
+
// No chunks erased initially; will be reconfigured on decode
|
|
5788
|
+
})
|
|
5789
|
+
]);
|
|
5790
|
+
return provider;
|
|
5791
|
+
}
|
|
5792
|
+
encode(data) {
|
|
5793
|
+
const { erasure_k, chunkSizeBytes } = this.config;
|
|
5794
|
+
if (!this.encoderCache) {
|
|
5795
|
+
throw new Error("Encoder cache is missing");
|
|
5772
5796
|
}
|
|
5773
|
-
const
|
|
5774
|
-
if (
|
|
5775
|
-
let errorBody;
|
|
5776
|
-
try {
|
|
5777
|
-
errorBody = await response.json();
|
|
5778
|
-
} catch {
|
|
5779
|
-
throw new Error(
|
|
5780
|
-
`Failed to download blob: ${response.status} ${response.statusText}`
|
|
5781
|
-
);
|
|
5782
|
-
}
|
|
5783
|
-
if (errorBody.storedMicropayment) {
|
|
5784
|
-
throw StaleChannelStateError.fromBase64(
|
|
5785
|
-
errorBody.storedMicropayment,
|
|
5786
|
-
errorBody.error
|
|
5787
|
-
);
|
|
5788
|
-
}
|
|
5797
|
+
const systematicCapacity = erasure_k * chunkSizeBytes;
|
|
5798
|
+
if (data.byteLength > systematicCapacity) {
|
|
5789
5799
|
throw new Error(
|
|
5790
|
-
|
|
5800
|
+
`Data size ${data.byteLength} bytes exceeds systematic capacity ${systematicCapacity} bytes (erasure_k=${erasure_k} * chunkSizeBytes=${chunkSizeBytes}).`
|
|
5791
5801
|
);
|
|
5792
5802
|
}
|
|
5793
|
-
if (
|
|
5803
|
+
if (this.requiresPadding(data.length)) {
|
|
5794
5804
|
throw new Error(
|
|
5795
|
-
`
|
|
5805
|
+
`Data size ${data.length} bytes does not match systematic capacity ${systematicCapacity} bytes (erasure_k=${erasure_k} * chunkSizeBytes=${chunkSizeBytes}). Provide pre-padded data before encoding.`
|
|
5796
5806
|
);
|
|
5797
5807
|
}
|
|
5798
|
-
|
|
5799
|
-
|
|
5800
|
-
|
|
5801
|
-
|
|
5802
|
-
if (
|
|
5803
|
-
throw new Error(
|
|
5804
|
-
"Response did not have content-length header, which is required"
|
|
5805
|
-
);
|
|
5808
|
+
this.lastFunction = "encoded";
|
|
5809
|
+
return this.encoderCache.erasureCode(data);
|
|
5810
|
+
}
|
|
5811
|
+
decode(available, config) {
|
|
5812
|
+
if (!this.decoderCache) {
|
|
5813
|
+
throw new Error("Decoder cache is missing");
|
|
5806
5814
|
}
|
|
5807
|
-
const
|
|
5808
|
-
if (
|
|
5815
|
+
const { erasure_k, chunkSizeBytes } = this.config;
|
|
5816
|
+
if (available.length < erasure_k) {
|
|
5809
5817
|
throw new Error(
|
|
5810
|
-
`
|
|
5818
|
+
`Insufficient available chunks: need at least ${erasure_k}, received ${available.length}.`
|
|
5811
5819
|
);
|
|
5812
5820
|
}
|
|
5813
|
-
|
|
5814
|
-
|
|
5815
|
-
|
|
5816
|
-
|
|
5817
|
-
|
|
5818
|
-
|
|
5819
|
-
}
|
|
5820
|
-
const reader = maybeReader;
|
|
5821
|
-
let bytesReceived = 0;
|
|
5822
|
-
function pump() {
|
|
5823
|
-
return reader.read().then(({ done, value }) => {
|
|
5824
|
-
if (done) {
|
|
5825
|
-
if (bytesReceived !== expectedContentLength) {
|
|
5826
|
-
controller.error(
|
|
5827
|
-
new Error(
|
|
5828
|
-
`Downloaded data size (${bytesReceived} bytes) does not match content-length header (${expectedContentLength} bytes). This might indicate a partial or corrupted download.`
|
|
5829
|
-
)
|
|
5830
|
-
);
|
|
5831
|
-
return;
|
|
5832
|
-
}
|
|
5833
|
-
controller.close();
|
|
5834
|
-
return;
|
|
5835
|
-
}
|
|
5836
|
-
bytesReceived += value.byteLength;
|
|
5837
|
-
controller.enqueue(value);
|
|
5838
|
-
return pump();
|
|
5839
|
-
}).catch((error) => {
|
|
5840
|
-
controller.error(error);
|
|
5841
|
-
});
|
|
5842
|
-
}
|
|
5843
|
-
return pump();
|
|
5821
|
+
for (let i = 0; i < available.length; i++) {
|
|
5822
|
+
const chunk = available[i];
|
|
5823
|
+
if (chunk.length !== chunkSizeBytes) {
|
|
5824
|
+
throw new Error(
|
|
5825
|
+
`Chunk ${i} size ${chunk.length} bytes does not match expected chunkSizeBytes ${chunkSizeBytes}.`
|
|
5826
|
+
);
|
|
5844
5827
|
}
|
|
5845
|
-
}
|
|
5846
|
-
|
|
5847
|
-
|
|
5848
|
-
|
|
5849
|
-
|
|
5850
|
-
|
|
5851
|
-
|
|
5828
|
+
}
|
|
5829
|
+
this.lastFunction = "decoded";
|
|
5830
|
+
return this.decoderCache.decode(available, config);
|
|
5831
|
+
}
|
|
5832
|
+
getChunkMerkleRoots() {
|
|
5833
|
+
if (this.decoderCache && this.lastFunction === "decoded")
|
|
5834
|
+
return this.decoderCache.getChunkMerkleRoots();
|
|
5835
|
+
if (this.encoderCache && this.lastFunction === "encoded")
|
|
5836
|
+
return this.encoderCache.getChunkMerkleRoots();
|
|
5837
|
+
throw new Error(
|
|
5838
|
+
"You must call encode or decode before calling getChunkMerkleRoots"
|
|
5839
|
+
);
|
|
5840
|
+
}
|
|
5841
|
+
/**
|
|
5842
|
+
* Determines if data can be erasure coded as-is or requires padding.
|
|
5843
|
+
*
|
|
5844
|
+
* Data can be erasure coded without padding if its size exactly matches
|
|
5845
|
+
* the total systematic data capacity (k * chunkSizeBytes).
|
|
5846
|
+
*
|
|
5847
|
+
* @param dataSize - Size of the data in bytes
|
|
5848
|
+
* @returns true if data needs padding, false if it can be coded as-is
|
|
5849
|
+
*/
|
|
5850
|
+
requiresPadding(dataSize) {
|
|
5851
|
+
const { erasure_k, chunkSizeBytes } = this.config;
|
|
5852
|
+
const systematicCapacity = erasure_k * chunkSizeBytes;
|
|
5853
|
+
return dataSize !== systematicCapacity;
|
|
5852
5854
|
}
|
|
5853
5855
|
};
|
|
5856
|
+
function buildClayConfig(input) {
|
|
5857
|
+
const { erasure_n, erasure_k, erasure_d, chunkSizeBytes } = input;
|
|
5858
|
+
if (erasure_n <= 0)
|
|
5859
|
+
throw new Error("erasure_n (total number of chunks) must be > 0");
|
|
5860
|
+
if (erasure_k <= 0)
|
|
5861
|
+
throw new Error("erasure_k (number of data chunks) must be > 0");
|
|
5862
|
+
if (erasure_k >= erasure_n)
|
|
5863
|
+
throw new Error(
|
|
5864
|
+
`erasure_k (${erasure_k}) must be < erasure_n (${erasure_n})`
|
|
5865
|
+
);
|
|
5866
|
+
if (erasure_d <= erasure_k)
|
|
5867
|
+
throw new Error(
|
|
5868
|
+
`erasure_d (${erasure_d}) must be > erasure_k (${erasure_k})`
|
|
5869
|
+
);
|
|
5870
|
+
if (erasure_d >= erasure_n)
|
|
5871
|
+
throw new Error(
|
|
5872
|
+
`erasure_d (${erasure_d}) must be < erasure_n (${erasure_n})`
|
|
5873
|
+
);
|
|
5874
|
+
if (chunkSizeBytes <= 0) throw new Error("chunkSizeBytes must be > 0");
|
|
5875
|
+
return {
|
|
5876
|
+
erasure_n,
|
|
5877
|
+
erasure_k,
|
|
5878
|
+
erasure_d,
|
|
5879
|
+
chunkSizeBytes
|
|
5880
|
+
};
|
|
5881
|
+
}
|
|
5854
5882
|
|
|
5855
|
-
// ../../packages/sdk/dist/chunk-
|
|
5883
|
+
// ../../packages/sdk/dist/chunk-H6IYPL3O.mjs
|
|
5856
5884
|
import {
|
|
5857
5885
|
AccountAddress as AccountAddress6,
|
|
5858
5886
|
Aptos as Aptos3,
|
|
@@ -5997,7 +6025,7 @@ function validateConcurrency(concurrency) {
|
|
|
5997
6025
|
}
|
|
5998
6026
|
}
|
|
5999
6027
|
|
|
6000
|
-
// ../../packages/sdk/dist/chunk-
|
|
6028
|
+
// ../../packages/sdk/dist/chunk-H6IYPL3O.mjs
|
|
6001
6029
|
var ShelbyClient = class {
|
|
6002
6030
|
/**
|
|
6003
6031
|
* The coordination client is used to interact with the Aptos blockchain which handles the commitments
|
|
@@ -6306,7 +6334,7 @@ var ShelbyClient = class {
|
|
|
6306
6334
|
}
|
|
6307
6335
|
};
|
|
6308
6336
|
|
|
6309
|
-
// ../../packages/sdk/dist/chunk-
|
|
6337
|
+
// ../../packages/sdk/dist/chunk-WO2DVEKI.mjs
|
|
6310
6338
|
var ShelbyNodeClient = class extends ShelbyClient {
|
|
6311
6339
|
};
|
|
6312
6340
|
|
|
@@ -6903,7 +6931,7 @@ function getAptosAccountExplorerUrl(network, accountAddress) {
|
|
|
6903
6931
|
import chalk2 from "chalk";
|
|
6904
6932
|
import { filesize } from "filesize";
|
|
6905
6933
|
import { render } from "ink";
|
|
6906
|
-
import { z as
|
|
6934
|
+
import { z as z12 } from "zod";
|
|
6907
6935
|
|
|
6908
6936
|
// src/components/AccountWizard.tsx
|
|
6909
6937
|
import { Box, Text } from "ink";
|
|
@@ -6922,37 +6950,37 @@ import {
|
|
|
6922
6950
|
} from "@aptos-labs/ts-sdk";
|
|
6923
6951
|
import fs2 from "fs-extra";
|
|
6924
6952
|
import YAML2 from "yaml";
|
|
6925
|
-
import { z as
|
|
6953
|
+
import { z as z8 } from "zod";
|
|
6926
6954
|
|
|
6927
6955
|
// src/schemas/AptosNetworkSchema.ts
|
|
6928
|
-
import { z as
|
|
6956
|
+
import { z as z5 } from "zod";
|
|
6929
6957
|
|
|
6930
6958
|
// src/schemas/EndpointSchema.ts
|
|
6931
|
-
import { z as
|
|
6932
|
-
var EndpointSchema =
|
|
6959
|
+
import { z as z4 } from "zod";
|
|
6960
|
+
var EndpointSchema = z4.string().url("Must be a valid URL");
|
|
6933
6961
|
|
|
6934
6962
|
// src/schemas/AptosNetworkSchema.ts
|
|
6935
|
-
var AptosNetworkNameSchema =
|
|
6936
|
-
var AptosNetworkSchema =
|
|
6963
|
+
var AptosNetworkNameSchema = z5.enum(shelbyNetworks);
|
|
6964
|
+
var AptosNetworkSchema = z5.object({
|
|
6937
6965
|
name: AptosNetworkNameSchema,
|
|
6938
6966
|
fullnode: EndpointSchema.optional(),
|
|
6939
6967
|
faucet: EndpointSchema.optional(),
|
|
6940
6968
|
indexer: EndpointSchema.optional(),
|
|
6941
6969
|
pepper: EndpointSchema.optional(),
|
|
6942
6970
|
prover: EndpointSchema.optional(),
|
|
6943
|
-
api_key:
|
|
6971
|
+
api_key: z5.string().optional()
|
|
6944
6972
|
});
|
|
6945
6973
|
|
|
6946
6974
|
// src/schemas/ShelbyNetworkSchema.ts
|
|
6947
|
-
import { z as
|
|
6948
|
-
var ShelbyNetworkSchema =
|
|
6975
|
+
import { z as z6 } from "zod";
|
|
6976
|
+
var ShelbyNetworkSchema = z6.object({
|
|
6949
6977
|
rpc_endpoint: EndpointSchema.optional(),
|
|
6950
6978
|
indexer_endpoint: EndpointSchema.optional(),
|
|
6951
|
-
rpc_api_key:
|
|
6952
|
-
indexer_api_key:
|
|
6953
|
-
deployer_address:
|
|
6979
|
+
rpc_api_key: z6.string().optional(),
|
|
6980
|
+
indexer_api_key: z6.string().optional(),
|
|
6981
|
+
deployer_address: z6.string().optional(),
|
|
6954
6982
|
// The account address of the RPC server that receives micropayments
|
|
6955
|
-
rpc_receiver_address:
|
|
6983
|
+
rpc_receiver_address: z6.string().optional()
|
|
6956
6984
|
});
|
|
6957
6985
|
|
|
6958
6986
|
// src/utils/global-config.ts
|
|
@@ -6960,9 +6988,9 @@ import os from "os";
|
|
|
6960
6988
|
import path from "path";
|
|
6961
6989
|
import fs from "fs-extra";
|
|
6962
6990
|
import YAML from "yaml";
|
|
6963
|
-
import { z as
|
|
6964
|
-
var GlobalConfigSchema =
|
|
6965
|
-
config_location_behavior:
|
|
6991
|
+
import { z as z7 } from "zod";
|
|
6992
|
+
var GlobalConfigSchema = z7.object({
|
|
6993
|
+
config_location_behavior: z7.enum(["global", "walk"])
|
|
6966
6994
|
});
|
|
6967
6995
|
var DEFAULT_GLOBAL_CONFIG = {
|
|
6968
6996
|
config_location_behavior: "global"
|
|
@@ -6998,27 +7026,27 @@ function saveGlobalConfig(config) {
|
|
|
6998
7026
|
}
|
|
6999
7027
|
|
|
7000
7028
|
// src/utils/config.ts
|
|
7001
|
-
var ContextSchema =
|
|
7002
|
-
api_key:
|
|
7029
|
+
var ContextSchema = z8.object({
|
|
7030
|
+
api_key: z8.string().optional(),
|
|
7003
7031
|
aptos_network: AptosNetworkSchema,
|
|
7004
7032
|
shelby_network: ShelbyNetworkSchema.optional()
|
|
7005
7033
|
});
|
|
7006
|
-
var AccountNameSchema =
|
|
7034
|
+
var AccountNameSchema = z8.string().min(1, "Account name cannot be empty").regex(
|
|
7007
7035
|
/^[a-zA-Z0-9_-]+$/,
|
|
7008
7036
|
"Account name must contain only alphanumeric characters, underscores, and hyphens"
|
|
7009
7037
|
);
|
|
7010
|
-
var ConfigSchema =
|
|
7011
|
-
contexts:
|
|
7012
|
-
accounts:
|
|
7038
|
+
var ConfigSchema = z8.object({
|
|
7039
|
+
contexts: z8.record(ContextSchema),
|
|
7040
|
+
accounts: z8.record(
|
|
7013
7041
|
AccountNameSchema,
|
|
7014
|
-
|
|
7015
|
-
address:
|
|
7016
|
-
private_key:
|
|
7042
|
+
z8.object({
|
|
7043
|
+
address: z8.string().optional(),
|
|
7044
|
+
private_key: z8.string({ message: "private_key must be provided" })
|
|
7017
7045
|
})
|
|
7018
7046
|
),
|
|
7019
|
-
default_context:
|
|
7047
|
+
default_context: z8.string(),
|
|
7020
7048
|
// Can be empty string if no account is set up
|
|
7021
|
-
default_account:
|
|
7049
|
+
default_account: z8.string()
|
|
7022
7050
|
});
|
|
7023
7051
|
function getPathForNewConfig() {
|
|
7024
7052
|
const globalConfig = loadGlobalConfig();
|
|
@@ -7218,7 +7246,7 @@ import {
|
|
|
7218
7246
|
Secp256k1PrivateKey,
|
|
7219
7247
|
SigningSchemeInput
|
|
7220
7248
|
} from "@aptos-labs/ts-sdk";
|
|
7221
|
-
import { z as
|
|
7249
|
+
import { z as z9 } from "zod";
|
|
7222
7250
|
function generateEd25519Account() {
|
|
7223
7251
|
const account = Account.generate({
|
|
7224
7252
|
scheme: SigningSchemeInput.Ed25519,
|
|
@@ -7238,7 +7266,7 @@ function isValidAddress(address) {
|
|
|
7238
7266
|
return AccountAddress8.isValid({ input: address }).valid;
|
|
7239
7267
|
}
|
|
7240
7268
|
var ED25519_NAME = "ed25519";
|
|
7241
|
-
var SignatureSchemeSchema =
|
|
7269
|
+
var SignatureSchemeSchema = z9.enum([ED25519_NAME]);
|
|
7242
7270
|
|
|
7243
7271
|
// src/components/AccountWizard.tsx
|
|
7244
7272
|
import { jsx, jsxs } from "react/jsx-runtime";
|
|
@@ -7466,7 +7494,7 @@ var SHELBY_CONFIG_FIELDS = [
|
|
|
7466
7494
|
import { Box as Box3, Text as Text3 } from "ink";
|
|
7467
7495
|
import TextInput2 from "ink-text-input";
|
|
7468
7496
|
import { useState as useState2 } from "react";
|
|
7469
|
-
import
|
|
7497
|
+
import z10 from "zod";
|
|
7470
7498
|
import { jsx as jsx4, jsxs as jsxs3 } from "react/jsx-runtime";
|
|
7471
7499
|
function FormTextInput({
|
|
7472
7500
|
type,
|
|
@@ -7496,7 +7524,7 @@ function FormTextInput({
|
|
|
7496
7524
|
return;
|
|
7497
7525
|
}
|
|
7498
7526
|
if (value2) {
|
|
7499
|
-
if (type === "url" && !
|
|
7527
|
+
if (type === "url" && !z10.string().url().safeParse(value2).success) {
|
|
7500
7528
|
setError("The field must be a valid URL");
|
|
7501
7529
|
return;
|
|
7502
7530
|
}
|
|
@@ -7544,7 +7572,8 @@ var ContextReviewWizard = ({
|
|
|
7544
7572
|
indexer_endpoint: NetworkToShelbyBlobIndexerBaseUrl[aptosNetwork.name],
|
|
7545
7573
|
indexer_api_key: void 0,
|
|
7546
7574
|
deployer_address: void 0,
|
|
7547
|
-
rpc_api_key: void 0
|
|
7575
|
+
rpc_api_key: void 0,
|
|
7576
|
+
rpc_receiver_address: void 0
|
|
7548
7577
|
}[key];
|
|
7549
7578
|
return fallbackValue ? { value: fallbackValue, source: "default" } : void 0;
|
|
7550
7579
|
};
|
|
@@ -8193,13 +8222,13 @@ import boxen from "boxen";
|
|
|
8193
8222
|
import chalk from "chalk";
|
|
8194
8223
|
import { getLatestVersion } from "fast-npm-meta";
|
|
8195
8224
|
import * as semver from "semver";
|
|
8196
|
-
import { z as
|
|
8225
|
+
import { z as z11 } from "zod";
|
|
8197
8226
|
var CHECK_INTERVAL = 1e3 * 60 * 60 * 24;
|
|
8198
8227
|
var NOTIFY_INTERVAL = CHECK_INTERVAL;
|
|
8199
|
-
var VersionCacheSchema =
|
|
8200
|
-
lastChecked:
|
|
8201
|
-
latestVersion:
|
|
8202
|
-
lastNotified:
|
|
8228
|
+
var VersionCacheSchema = z11.object({
|
|
8229
|
+
lastChecked: z11.number(),
|
|
8230
|
+
latestVersion: z11.string(),
|
|
8231
|
+
lastNotified: z11.number().optional()
|
|
8203
8232
|
});
|
|
8204
8233
|
var versionCache = new Cache("version-check.json", VersionCacheSchema);
|
|
8205
8234
|
async function fetchLatestVersion() {
|
|
@@ -9029,10 +9058,10 @@ var handleError = (error) => {
|
|
|
9029
9058
|
|
|
9030
9059
|
// src/commands/account.tsx
|
|
9031
9060
|
import { jsx as jsx10 } from "react/jsx-runtime";
|
|
9032
|
-
var CreateAccountOptionsSchema =
|
|
9033
|
-
name:
|
|
9034
|
-
privateKey:
|
|
9035
|
-
address:
|
|
9061
|
+
var CreateAccountOptionsSchema = z12.object({
|
|
9062
|
+
name: z12.string().optional(),
|
|
9063
|
+
privateKey: z12.string().optional(),
|
|
9064
|
+
address: z12.string().optional(),
|
|
9036
9065
|
scheme: SignatureSchemeSchema.optional()
|
|
9037
9066
|
});
|
|
9038
9067
|
var allSchemes = SignatureSchemeSchema.options.join(", ");
|
|
@@ -9059,12 +9088,12 @@ async function promptForAccountName(config) {
|
|
|
9059
9088
|
return answer;
|
|
9060
9089
|
}
|
|
9061
9090
|
}
|
|
9062
|
-
var ListAccountOptionsSchema =
|
|
9063
|
-
var UseAccountOptionsSchema =
|
|
9064
|
-
accountName:
|
|
9091
|
+
var ListAccountOptionsSchema = z12.object({});
|
|
9092
|
+
var UseAccountOptionsSchema = z12.object({
|
|
9093
|
+
accountName: z12.string()
|
|
9065
9094
|
});
|
|
9066
|
-
var DeleteAccountOptionsSchema =
|
|
9067
|
-
accountName:
|
|
9095
|
+
var DeleteAccountOptionsSchema = z12.object({
|
|
9096
|
+
accountName: z12.string()
|
|
9068
9097
|
});
|
|
9069
9098
|
function accountCommand(program) {
|
|
9070
9099
|
const account = program.command("account").description("Manage signing accounts (addresses & keys)");
|
|
@@ -9423,9 +9452,9 @@ function accountCommand(program) {
|
|
|
9423
9452
|
import * as fs4 from "fs";
|
|
9424
9453
|
import * as fsP from "fs/promises";
|
|
9425
9454
|
import { Readable } from "stream";
|
|
9426
|
-
import { z as
|
|
9427
|
-
var CommitmentOptionsSchema =
|
|
9428
|
-
input:
|
|
9455
|
+
import { z as z13 } from "zod";
|
|
9456
|
+
var CommitmentOptionsSchema = z13.object({
|
|
9457
|
+
input: z13.string().nonempty("`--input` is required").refine(
|
|
9429
9458
|
async (path11) => {
|
|
9430
9459
|
const stat4 = await fsP.stat(path11);
|
|
9431
9460
|
return stat4.isFile();
|
|
@@ -9434,7 +9463,7 @@ var CommitmentOptionsSchema = z12.object({
|
|
|
9434
9463
|
message: "`--input` must be a file"
|
|
9435
9464
|
}
|
|
9436
9465
|
),
|
|
9437
|
-
output:
|
|
9466
|
+
output: z13.string().nonempty("`--output` is required")
|
|
9438
9467
|
});
|
|
9439
9468
|
function commitmentCommand(program) {
|
|
9440
9469
|
program.command("commitment <input> <output>").description(
|
|
@@ -9535,30 +9564,30 @@ function configCommand(program) {
|
|
|
9535
9564
|
|
|
9536
9565
|
// src/commands/context.tsx
|
|
9537
9566
|
import { render as render2 } from "ink";
|
|
9538
|
-
import { z as
|
|
9567
|
+
import { z as z15 } from "zod";
|
|
9539
9568
|
|
|
9540
9569
|
// src/utils/commands.ts
|
|
9541
|
-
import
|
|
9542
|
-
var AptosCommandOptionsSchema =
|
|
9543
|
-
aptosNetwork:
|
|
9570
|
+
import z14 from "zod";
|
|
9571
|
+
var AptosCommandOptionsSchema = z14.object({
|
|
9572
|
+
aptosNetwork: z14.string().optional(),
|
|
9544
9573
|
// predefined network name
|
|
9545
|
-
aptosFullnode:
|
|
9546
|
-
aptosFaucet:
|
|
9547
|
-
aptosIndexer:
|
|
9548
|
-
aptosPepper:
|
|
9549
|
-
aptosProver:
|
|
9550
|
-
aptosApiKey:
|
|
9574
|
+
aptosFullnode: z14.string().url().optional(),
|
|
9575
|
+
aptosFaucet: z14.string().url().optional(),
|
|
9576
|
+
aptosIndexer: z14.string().url().optional(),
|
|
9577
|
+
aptosPepper: z14.string().url().optional(),
|
|
9578
|
+
aptosProver: z14.string().url().optional(),
|
|
9579
|
+
aptosApiKey: z14.string().optional()
|
|
9551
9580
|
});
|
|
9552
9581
|
var addAptosCommandOptions = (context) => context.option(
|
|
9553
9582
|
"--aptos-network <network>",
|
|
9554
9583
|
`Aptos network (${shelbyNetworks.join(", ")})`
|
|
9555
9584
|
).option("--aptos-fullnode <url>", "Aptos fullnode URL").option("--aptos-faucet <url>", "Aptos faucet URL").option("--aptos-indexer <url>", "Aptos indexer URL").option("--aptos-pepper <url>", "Aptos pepper URL").option("--aptos-prover <url>", "Aptos prover URL").option("--aptos-api-key <key>", "Aptos API key");
|
|
9556
|
-
var ShelbyCommandOptionsSchema =
|
|
9585
|
+
var ShelbyCommandOptionsSchema = z14.object({
|
|
9557
9586
|
shelbyRpcEndpoint: EndpointSchema.optional(),
|
|
9558
9587
|
shelbyIndexerEndpoint: EndpointSchema.optional(),
|
|
9559
|
-
shelbyRpcApiKey:
|
|
9560
|
-
shelbyIndexerApiKey:
|
|
9561
|
-
shelbyRpcReceiverAddress:
|
|
9588
|
+
shelbyRpcApiKey: z14.string().optional(),
|
|
9589
|
+
shelbyIndexerApiKey: z14.string().optional(),
|
|
9590
|
+
shelbyRpcReceiverAddress: z14.string().optional()
|
|
9562
9591
|
});
|
|
9563
9592
|
var addShelbyCommandOptions = (context) => context.option("--shelby-rpc-endpoint <url>", "Shelby RPC endpoint").option("--shelby-indexer-endpoint <url>", "Shelby indexer endpoint").option("--shelby-rpc-api-key <key>", "Shelby RPC API key").option("--shelby-indexer-api-key <key>", "Shelby indexer API key").option(
|
|
9564
9593
|
"--shelby-rpc-receiver-address <address>",
|
|
@@ -9599,8 +9628,8 @@ function getShelbyNetworkFromOptions(options) {
|
|
|
9599
9628
|
rpc_receiver_address: options.shelbyRpcReceiverAddress
|
|
9600
9629
|
};
|
|
9601
9630
|
}
|
|
9602
|
-
var CreateContextOptionsSchema =
|
|
9603
|
-
var UpdateContextOptionsSchema =
|
|
9631
|
+
var CreateContextOptionsSchema = z15.object({ name: z15.string().optional() }).merge(AptosCommandOptionsSchema).merge(ShelbyCommandOptionsSchema);
|
|
9632
|
+
var UpdateContextOptionsSchema = z15.object({}).merge(ShelbyCommandOptionsSchema).merge(AptosCommandOptionsSchema);
|
|
9604
9633
|
function contextCommand(program) {
|
|
9605
9634
|
const context = program.command("context").description("Manage network contexts (Shelby RPC & Aptos endpoints)");
|
|
9606
9635
|
addShelbyCommandOptions(addAptosCommandOptions(context.command("create"))).description("Create a new context").option("--name <context-name>", "Name of the context").action((options) => {
|
|
@@ -9865,7 +9894,7 @@ import { Aptos as Aptos6, AptosConfig as AptosConfig5 } from "@aptos-labs/ts-sdk
|
|
|
9865
9894
|
import { Box as Box7, render as render3, Text as Text7 } from "ink";
|
|
9866
9895
|
import SelectInput4 from "ink-select-input";
|
|
9867
9896
|
import ora from "ora";
|
|
9868
|
-
import { z as
|
|
9897
|
+
import { z as z16 } from "zod";
|
|
9869
9898
|
|
|
9870
9899
|
// src/utils/commander-helpers.ts
|
|
9871
9900
|
function createExitOverrideHandler(commandName, requiredArgs, requiredOptions, exampleUsage, warningMessage) {
|
|
@@ -9916,17 +9945,17 @@ function createExitOverrideHandler(commandName, requiredArgs, requiredOptions, e
|
|
|
9916
9945
|
|
|
9917
9946
|
// src/commands/delete.tsx
|
|
9918
9947
|
import { jsx as jsx12, jsxs as jsxs7 } from "react/jsx-runtime";
|
|
9919
|
-
var DeleteOptionsSchema =
|
|
9920
|
-
destination:
|
|
9948
|
+
var DeleteOptionsSchema = z16.object({
|
|
9949
|
+
destination: z16.string({
|
|
9921
9950
|
required_error: "\n\u274C Missing Required Argument\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n\n\u26A0\uFE0F Missing destination blob name (argument)\n\n\u{1F4A1} Usage:\n shelby delete <destination-blob-name> [options]\n\n\u{1F4DD} Example:\n shelby delete files/my-blob.txt\n shelby delete folder/ -r (delete all blobs in folder)\n\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500"
|
|
9922
9951
|
}).min(1, "Destination blob name is required"),
|
|
9923
|
-
recursive:
|
|
9924
|
-
assumeYes:
|
|
9952
|
+
recursive: z16.boolean().optional().default(false),
|
|
9953
|
+
assumeYes: z16.boolean().optional().default(false)
|
|
9925
9954
|
}).superRefine((data, ctx) => {
|
|
9926
9955
|
if (data.recursive) {
|
|
9927
9956
|
if (!data.destination.endsWith("/")) {
|
|
9928
9957
|
ctx.addIssue({
|
|
9929
|
-
code:
|
|
9958
|
+
code: z16.ZodIssueCode.custom,
|
|
9930
9959
|
message: `
|
|
9931
9960
|
\u274C Delete Failed
|
|
9932
9961
|
\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500
|
|
@@ -9944,7 +9973,7 @@ var DeleteOptionsSchema = z15.object({
|
|
|
9944
9973
|
const blobNameResult = BlobNameSchema.safeParse(data.destination);
|
|
9945
9974
|
if (!blobNameResult.success) {
|
|
9946
9975
|
ctx.addIssue({
|
|
9947
|
-
code:
|
|
9976
|
+
code: z16.ZodIssueCode.custom,
|
|
9948
9977
|
message: `
|
|
9949
9978
|
\u274C Delete Failed
|
|
9950
9979
|
\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500
|
|
@@ -9981,12 +10010,10 @@ async function createDeleteList(options, nodeClient, account) {
|
|
|
9981
10010
|
);
|
|
9982
10011
|
}
|
|
9983
10012
|
} else {
|
|
9984
|
-
const
|
|
9985
|
-
account
|
|
10013
|
+
const md = await nodeClient.coordination.getBlobMetadata({
|
|
10014
|
+
account,
|
|
10015
|
+
name: options.destination
|
|
9986
10016
|
});
|
|
9987
|
-
const md = activeBlobs.find(
|
|
9988
|
-
(blob) => stripAccountPrefix(blob.name) === options.destination
|
|
9989
|
-
);
|
|
9990
10017
|
if (!md) {
|
|
9991
10018
|
throw new Error(
|
|
9992
10019
|
`No active blob named '${options.destination}' was found for account ${account.toString()}. It may have expired or been deleted.`
|
|
@@ -10027,7 +10054,7 @@ function deleteCommand(program) {
|
|
|
10027
10054
|
destination
|
|
10028
10055
|
});
|
|
10029
10056
|
} catch (error) {
|
|
10030
|
-
if (error instanceof
|
|
10057
|
+
if (error instanceof z16.ZodError) {
|
|
10031
10058
|
const firstIssue = error.issues[0];
|
|
10032
10059
|
if (firstIssue) {
|
|
10033
10060
|
console.log(firstIssue.message);
|
|
@@ -10198,7 +10225,7 @@ import * as path6 from "path";
|
|
|
10198
10225
|
import { Readable as Readable2, Transform } from "stream";
|
|
10199
10226
|
import { pipeline } from "stream/promises";
|
|
10200
10227
|
import ora2 from "ora";
|
|
10201
|
-
import { z as
|
|
10228
|
+
import { z as z17 } from "zod";
|
|
10202
10229
|
|
|
10203
10230
|
// src/utils/micropayment-manager.ts
|
|
10204
10231
|
import crypto2 from "crypto";
|
|
@@ -10560,16 +10587,16 @@ async function releaseDownloadLock() {
|
|
|
10560
10587
|
} catch {
|
|
10561
10588
|
}
|
|
10562
10589
|
}
|
|
10563
|
-
var DownloadOptionsSchema =
|
|
10564
|
-
source:
|
|
10590
|
+
var DownloadOptionsSchema = z17.object({
|
|
10591
|
+
source: z17.string({
|
|
10565
10592
|
required_error: "\n\u274C Missing Required Argument\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n\n\u26A0\uFE0F Missing source blob name or prefix (first argument)\n\n\u{1F4A1} Usage:\n shelby download <source-blob-name> <destination-path> [options]\n\n\u{1F4DD} Examples:\n shelby download my-blob.txt ./myfile.txt\n shelby download my-folder/ ./my-folder/ -r\n"
|
|
10566
10593
|
}).min(1, "Source blob name or directory prefix is required").describe("Blob name or directory prefix to download"),
|
|
10567
|
-
destination:
|
|
10594
|
+
destination: z17.string({
|
|
10568
10595
|
required_error: "\n\u274C Missing Required Argument\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n\n\u26A0\uFE0F Missing destination path (second argument)\n\n\u{1F4A1} Usage:\n shelby download <source-blob-name> <destination-path> [options]\n\n\u{1F4DD} Example:\n shelby download my-blob.txt ./myfile.txt\n"
|
|
10569
10596
|
}).min(1, "`destination` must be a valid filepath").describe("Local path where to save the downloaded content"),
|
|
10570
|
-
recursive:
|
|
10571
|
-
force:
|
|
10572
|
-
allowConcurrent:
|
|
10597
|
+
recursive: z17.boolean().default(false).describe("Download assuming canonical directory layout and recurse"),
|
|
10598
|
+
force: z17.boolean().default(false).describe("Overwrite the destination if it already exists"),
|
|
10599
|
+
allowConcurrent: z17.boolean().default(false).describe(
|
|
10573
10600
|
"Allow concurrent downloads (bypasses lock file, micropayments may not work correctly)"
|
|
10574
10601
|
)
|
|
10575
10602
|
}).refine(
|
|
@@ -10715,12 +10742,10 @@ async function createFileList(options, nodeClient, account) {
|
|
|
10715
10742
|
);
|
|
10716
10743
|
}
|
|
10717
10744
|
} else {
|
|
10718
|
-
const
|
|
10719
|
-
account
|
|
10745
|
+
const md = await nodeClient.coordination.getBlobMetadata({
|
|
10746
|
+
account,
|
|
10747
|
+
name: options.source
|
|
10720
10748
|
});
|
|
10721
|
-
const md = activeBlobs.find(
|
|
10722
|
-
(blob) => stripAccountPrefix(blob.name) === options.source
|
|
10723
|
-
);
|
|
10724
10749
|
if (!md) {
|
|
10725
10750
|
throw new Error(
|
|
10726
10751
|
`No active blob named '${options.source}' was found for account ${account.toString()}. It may have expired or been deleted.
|
|
@@ -10796,7 +10821,7 @@ function downloadCommand(program) {
|
|
|
10796
10821
|
destination
|
|
10797
10822
|
});
|
|
10798
10823
|
} catch (error) {
|
|
10799
|
-
if (error instanceof
|
|
10824
|
+
if (error instanceof z17.ZodError) {
|
|
10800
10825
|
const firstIssue = error.issues[0];
|
|
10801
10826
|
if (firstIssue) {
|
|
10802
10827
|
console.log(firstIssue.message);
|
|
@@ -11038,10 +11063,10 @@ function downloadCommand(program) {
|
|
|
11038
11063
|
import { Network as Network9 } from "@aptos-labs/ts-sdk";
|
|
11039
11064
|
import { Option as Option2 } from "@commander-js/extra-typings";
|
|
11040
11065
|
import { execaSync } from "execa";
|
|
11041
|
-
import { z as
|
|
11042
|
-
var FaucetOptionsSchema =
|
|
11043
|
-
network:
|
|
11044
|
-
open:
|
|
11066
|
+
import { z as z18 } from "zod";
|
|
11067
|
+
var FaucetOptionsSchema = z18.object({
|
|
11068
|
+
network: z18.enum([Network9.SHELBYNET]).optional(),
|
|
11069
|
+
open: z18.boolean().optional().default(true)
|
|
11045
11070
|
});
|
|
11046
11071
|
function faucetCommand(program) {
|
|
11047
11072
|
program.command("faucet").description("Open the Shelby faucet web page to request tokens").addOption(
|
|
@@ -11265,10 +11290,10 @@ import ignore from "ignore";
|
|
|
11265
11290
|
import { Box as Box8, render as render5, Text as Text8 } from "ink";
|
|
11266
11291
|
import SelectInput5 from "ink-select-input";
|
|
11267
11292
|
import ora3 from "ora";
|
|
11268
|
-
import { z as
|
|
11293
|
+
import { z as z19 } from "zod";
|
|
11269
11294
|
import { jsx as jsx14, jsxs as jsxs8 } from "react/jsx-runtime";
|
|
11270
11295
|
var normBlobName2 = (i, f, b) => normBlobName(path8, i, f, b);
|
|
11271
|
-
var flexibleDateSchema =
|
|
11296
|
+
var flexibleDateSchema = z19.string().transform((val, ctx) => {
|
|
11272
11297
|
const now = /* @__PURE__ */ new Date();
|
|
11273
11298
|
let parsedDate = null;
|
|
11274
11299
|
if (/^\d+$/.test(val)) {
|
|
@@ -11291,7 +11316,7 @@ var flexibleDateSchema = z18.string().transform((val, ctx) => {
|
|
|
11291
11316
|
}
|
|
11292
11317
|
if (!parsedDate || Number.isNaN(parsedDate.getTime())) {
|
|
11293
11318
|
ctx.addIssue({
|
|
11294
|
-
code:
|
|
11319
|
+
code: z19.ZodIssueCode.custom,
|
|
11295
11320
|
message: `
|
|
11296
11321
|
\u274C Upload Failed
|
|
11297
11322
|
\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500
|
|
@@ -11305,11 +11330,11 @@ var flexibleDateSchema = z18.string().transform((val, ctx) => {
|
|
|
11305
11330
|
|
|
11306
11331
|
\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500`
|
|
11307
11332
|
});
|
|
11308
|
-
return
|
|
11333
|
+
return z19.NEVER;
|
|
11309
11334
|
}
|
|
11310
11335
|
if (parsedDate.getTime() <= now.getTime()) {
|
|
11311
11336
|
ctx.addIssue({
|
|
11312
|
-
code:
|
|
11337
|
+
code: z19.ZodIssueCode.custom,
|
|
11313
11338
|
message: `
|
|
11314
11339
|
\u274C Upload Failed
|
|
11315
11340
|
\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500
|
|
@@ -11321,26 +11346,26 @@ var flexibleDateSchema = z18.string().transform((val, ctx) => {
|
|
|
11321
11346
|
|
|
11322
11347
|
\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500`
|
|
11323
11348
|
});
|
|
11324
|
-
return
|
|
11349
|
+
return z19.NEVER;
|
|
11325
11350
|
}
|
|
11326
11351
|
return parsedDate;
|
|
11327
11352
|
});
|
|
11328
|
-
var UploadOptionsSchema =
|
|
11329
|
-
source:
|
|
11353
|
+
var UploadOptionsSchema = z19.object({
|
|
11354
|
+
source: z19.string({
|
|
11330
11355
|
required_error: '\n\u274C Missing Required Argument\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n\n\u26A0\uFE0F Missing source file or directory path (first argument)\n\n\u{1F4A1} Usage:\n shelby upload <source-file-or-directory> <destination-blob-name> [options]\n\n\u{1F4DD} Examples:\n shelby upload ./myfile.txt my-blob.txt -e tomorrow\n shelby upload ./my-folder/ my-folder/ -r -e "next week"\n\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500'
|
|
11331
11356
|
}).min(1, "Source file or directory path is required"),
|
|
11332
|
-
destination:
|
|
11357
|
+
destination: z19.string({
|
|
11333
11358
|
required_error: "\n\u274C Missing Required Argument\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n\n\u26A0\uFE0F Missing destination blob name (second argument)\n\n\u{1F4A1} Usage:\n shelby upload <source-file-or-directory> <destination-blob-name> [options]\n\n\u{1F4DD} Example:\n shelby upload ./myfile.txt files/my-blob.txt -e tomorrow\n\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500"
|
|
11334
11359
|
}).min(1, "Destination blob name is required"),
|
|
11335
11360
|
expiration: flexibleDateSchema,
|
|
11336
|
-
recursive:
|
|
11337
|
-
assumeYes:
|
|
11338
|
-
outputCommitments:
|
|
11361
|
+
recursive: z19.boolean().optional().default(false),
|
|
11362
|
+
assumeYes: z19.boolean().optional().default(false),
|
|
11363
|
+
outputCommitments: z19.string().optional()
|
|
11339
11364
|
}).superRefine(async (data, ctx) => {
|
|
11340
11365
|
const stats = await fs9.stat(data.source);
|
|
11341
11366
|
if (!stats.isFile() && !stats.isDirectory()) {
|
|
11342
11367
|
ctx.addIssue({
|
|
11343
|
-
code:
|
|
11368
|
+
code: z19.ZodIssueCode.custom,
|
|
11344
11369
|
message: "\n\u274C Upload Failed\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n\n\u26A0\uFE0F Source path must be a file or directory\n\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500",
|
|
11345
11370
|
path: ["source"]
|
|
11346
11371
|
});
|
|
@@ -11349,7 +11374,7 @@ var UploadOptionsSchema = z18.object({
|
|
|
11349
11374
|
if (stats.isDirectory()) {
|
|
11350
11375
|
if (!data.destination.endsWith("/")) {
|
|
11351
11376
|
ctx.addIssue({
|
|
11352
|
-
code:
|
|
11377
|
+
code: z19.ZodIssueCode.custom,
|
|
11353
11378
|
message: `
|
|
11354
11379
|
\u274C Upload Failed
|
|
11355
11380
|
\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500
|
|
@@ -11367,7 +11392,7 @@ var UploadOptionsSchema = z18.object({
|
|
|
11367
11392
|
const blobNameResult = BlobNameSchema.safeParse(data.destination);
|
|
11368
11393
|
if (!blobNameResult.success) {
|
|
11369
11394
|
ctx.addIssue({
|
|
11370
|
-
code:
|
|
11395
|
+
code: z19.ZodIssueCode.custom,
|
|
11371
11396
|
message: `
|
|
11372
11397
|
\u274C Upload Failed
|
|
11373
11398
|
\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500
|
|
@@ -11539,7 +11564,7 @@ function uploadCommand(program) {
|
|
|
11539
11564
|
destination
|
|
11540
11565
|
});
|
|
11541
11566
|
} catch (error) {
|
|
11542
|
-
if (error instanceof
|
|
11567
|
+
if (error instanceof z19.ZodError) {
|
|
11543
11568
|
const firstIssue = error.issues[0];
|
|
11544
11569
|
if (firstIssue) {
|
|
11545
11570
|
console.log(firstIssue.message);
|