@typeberry/lib 0.0.5-cdbb94a → 0.0.5-dbd8e5a
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.d.ts +42 -139
- package/index.js +89 -144
- package/package.json +2 -9
- package/index.cjs +0 -17538
package/index.d.ts
CHANGED
|
@@ -4474,9 +4474,6 @@ declare namespace bandersnatch_d_exports {
|
|
|
4474
4474
|
}
|
|
4475
4475
|
/* tslint:disable */
|
|
4476
4476
|
/* eslint-disable */
|
|
4477
|
-
/**
|
|
4478
|
-
* Generate ring commitment given concatenation of ring keys.
|
|
4479
|
-
*/
|
|
4480
4477
|
declare function ring_commitment(keys: Uint8Array): Uint8Array;
|
|
4481
4478
|
/**
|
|
4482
4479
|
* Derive Private and Public Key from Seed
|
|
@@ -4490,21 +4487,21 @@ declare function derive_public_key(seed: Uint8Array): Uint8Array;
|
|
|
4490
4487
|
* or
|
|
4491
4488
|
* https://graypaper.fluffylabs.dev/#/68eaa1f/0e54010e5401?v=0.6.4
|
|
4492
4489
|
*/
|
|
4493
|
-
declare function verify_seal(
|
|
4490
|
+
declare function verify_seal(keys: Uint8Array, signer_key_index: number, seal_data: Uint8Array, payload: Uint8Array, aux_data: Uint8Array): Uint8Array;
|
|
4494
4491
|
/**
|
|
4495
4492
|
* Verify multiple tickets at once as defined in:
|
|
4496
4493
|
* https://graypaper.fluffylabs.dev/#/68eaa1f/0f3e000f3e00?v=0.6.4
|
|
4497
4494
|
*
|
|
4498
4495
|
* NOTE: the aux_data of VRF function is empty!
|
|
4499
4496
|
*/
|
|
4500
|
-
declare function batch_verify_tickets(
|
|
4497
|
+
declare function batch_verify_tickets(keys: Uint8Array, tickets_data: Uint8Array, vrf_input_data_len: number): Uint8Array;
|
|
4501
4498
|
type InitInput$2 = RequestInfo | URL | Response | BufferSource | WebAssembly.Module;
|
|
4502
4499
|
interface InitOutput$2 {
|
|
4503
4500
|
readonly memory: WebAssembly.Memory;
|
|
4504
4501
|
readonly ring_commitment: (a: number, b: number) => [number, number];
|
|
4505
4502
|
readonly derive_public_key: (a: number, b: number) => [number, number];
|
|
4506
|
-
readonly verify_seal: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number) => [number, number];
|
|
4507
|
-
readonly batch_verify_tickets: (a: number, b: number, c: number, d: number, e: number
|
|
4503
|
+
readonly verify_seal: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number) => [number, number];
|
|
4504
|
+
readonly batch_verify_tickets: (a: number, b: number, c: number, d: number, e: number) => [number, number];
|
|
4508
4505
|
readonly __wbindgen_export_0: WebAssembly.Table;
|
|
4509
4506
|
readonly __wbindgen_malloc: (a: number, b: number) => number;
|
|
4510
4507
|
readonly __wbindgen_free: (a: number, b: number, c: number) => void;
|
|
@@ -8533,9 +8530,7 @@ declare class WriteableNodesDb extends NodesDb {
|
|
|
8533
8530
|
}
|
|
8534
8531
|
}
|
|
8535
8532
|
|
|
8536
|
-
|
|
8537
|
-
declare const leafComparator = (x: LeafNode, y: LeafNode) => x.getKey().compare(y.getKey());
|
|
8538
|
-
declare const zero = Bytes.zero(HASH_SIZE).asOpaque();
|
|
8533
|
+
declare const leafComparator = (x: LeafNode, y: LeafNode) => y.getKey().compare(x.getKey());
|
|
8539
8534
|
|
|
8540
8535
|
declare class InMemoryTrie {
|
|
8541
8536
|
/** Create an empty in-memory trie. */
|
|
@@ -8544,87 +8539,10 @@ declare class InMemoryTrie {
|
|
|
8544
8539
|
}
|
|
8545
8540
|
|
|
8546
8541
|
/** Given a collection of leaves, compute the state root. */
|
|
8547
|
-
static computeStateRoot(hasher: TrieHasher, leaves:
|
|
8548
|
-
|
|
8549
|
-
|
|
8550
|
-
|
|
8551
|
-
return zero;
|
|
8552
|
-
}
|
|
8553
|
-
|
|
8554
|
-
const nodes = [
|
|
8555
|
-
{
|
|
8556
|
-
leaf: firstSorted,
|
|
8557
|
-
sharedBitsWithPrev: 0,
|
|
8558
|
-
},
|
|
8559
|
-
];
|
|
8560
|
-
let last = nodes[0];
|
|
8561
|
-
// first we go through all of the sorted leaves and figure out how much in common
|
|
8562
|
-
// they have with the previous node.
|
|
8563
|
-
// If the shared-prefix drops, it means we are going up in depth (i.e. we are in different branch).
|
|
8564
|
-
for (const leaf of sorted) {
|
|
8565
|
-
const sharedBitsCount = findSharedPrefix(leaf.getKey(), last.leaf.getKey());
|
|
8566
|
-
last = {
|
|
8567
|
-
leaf,
|
|
8568
|
-
sharedBitsWithPrev: sharedBitsCount,
|
|
8569
|
-
};
|
|
8570
|
-
nodes.push(last);
|
|
8571
|
-
}
|
|
8572
|
-
// Now we will go backwards and hash them together (or create branch nodes).
|
|
8573
|
-
nodes.reverse();
|
|
8574
|
-
const stack: TrieNodeHash[] = [];
|
|
8575
|
-
let currentDepth = 0;
|
|
8576
|
-
const lastNode = nodes.length === 1 ? undefined : nodes[nodes.length - 1];
|
|
8577
|
-
for (const node of nodes) {
|
|
8578
|
-
const isLastNode = node === lastNode;
|
|
8579
|
-
const key = node.leaf.getKey();
|
|
8580
|
-
const prevDepth = currentDepth;
|
|
8581
|
-
currentDepth = node.sharedBitsWithPrev;
|
|
8582
|
-
|
|
8583
|
-
// first push all missing right-hand zero nodes.
|
|
8584
|
-
// Handle the case if all nodes are on the left side and we need one more top-level
|
|
8585
|
-
// extra.
|
|
8586
|
-
const startDepth = isLastNode ? prevDepth : prevDepth + 1;
|
|
8587
|
-
for (let i = startDepth; i <= currentDepth; i++) {
|
|
8588
|
-
if (getBit(key, i) === false) {
|
|
8589
|
-
stack.push(zero);
|
|
8590
|
-
}
|
|
8591
|
-
}
|
|
8592
|
-
|
|
8593
|
-
// now let's push the hash of the current leaf
|
|
8594
|
-
const hash = hasher.hashConcat(node.leaf.node.raw);
|
|
8595
|
-
stack.push(hash);
|
|
8596
|
-
// we are going further down, so no need to merge anything
|
|
8597
|
-
if (prevDepth < currentDepth) {
|
|
8598
|
-
continue;
|
|
8599
|
-
}
|
|
8600
|
-
// jumping back to some lower depth, we need to merge what we have on the stack.
|
|
8601
|
-
// we need to handle a case where we have no nodes on the top-most left side.
|
|
8602
|
-
// in such case we just add extra zero on the left.
|
|
8603
|
-
const endDepth = isLastNode ? currentDepth - 1 : currentDepth;
|
|
8604
|
-
for (let i = prevDepth; i > endDepth; i--) {
|
|
8605
|
-
if (getBit(key, i) === true) {
|
|
8606
|
-
stack.push(zero);
|
|
8607
|
-
}
|
|
8608
|
-
const current = stack.pop() ?? zero;
|
|
8609
|
-
const next = stack.pop() ?? zero;
|
|
8610
|
-
const branchNode = BranchNode.fromSubNodes(current, next);
|
|
8611
|
-
const hash = hasher.hashConcat(branchNode.node.raw);
|
|
8612
|
-
stack.push(hash);
|
|
8613
|
-
}
|
|
8614
|
-
}
|
|
8615
|
-
|
|
8616
|
-
return stack.pop() ?? zero;
|
|
8617
|
-
}
|
|
8618
|
-
|
|
8619
|
-
/**
|
|
8620
|
-
* Construct a `LeafNode` from given `key` and `value`.
|
|
8621
|
-
*
|
|
8622
|
-
* NOTE: for large value it WILL NOT be embedded in the leaf node,
|
|
8623
|
-
* and should rather be stored separately.
|
|
8624
|
-
*/
|
|
8625
|
-
static constructLeaf(hasher: TrieHasher, key: InputKey, value: BytesBlob, maybeValueHash?: ValueHash) {
|
|
8626
|
-
const valueHash = () => maybeValueHash ?? hasher.hashConcat(value.raw).asOpaque();
|
|
8627
|
-
return LeafNode.fromValue(key, value, valueHash);
|
|
8542
|
+
static computeStateRoot(hasher: TrieHasher, leaves: readonly LeafNode[]) {
|
|
8543
|
+
// TODO [ToDr] [opti] Simple loop to just compute the root hash instead of
|
|
8544
|
+
// constructing the entire trie.
|
|
8545
|
+
return InMemoryTrie.fromLeaves(hasher, leaves).getRootHash();
|
|
8628
8546
|
}
|
|
8629
8547
|
|
|
8630
8548
|
/**
|
|
@@ -8642,6 +8560,11 @@ declare class InMemoryTrie {
|
|
|
8642
8560
|
return new InMemoryTrie(nodes, root);
|
|
8643
8561
|
}
|
|
8644
8562
|
|
|
8563
|
+
static constructLeaf(hasher: TrieHasher, key: InputKey, value: BytesBlob, maybeValueHash?: ValueHash) {
|
|
8564
|
+
const valueHash = () => maybeValueHash ?? hasher.hashConcat(value.raw).asOpaque();
|
|
8565
|
+
return LeafNode.fromValue(key, value, valueHash);
|
|
8566
|
+
}
|
|
8567
|
+
|
|
8645
8568
|
private constructor(
|
|
8646
8569
|
// Exposed for trie-visualiser
|
|
8647
8570
|
public readonly nodes: WriteableNodesDb,
|
|
@@ -8768,7 +8691,7 @@ declare function findNodeToReplace(root: TrieNode, nodes: NodesDb, key: Truncate
|
|
|
8768
8691
|
|
|
8769
8692
|
const nextNode = nodes.get(nextHash);
|
|
8770
8693
|
if (nextNode === null) {
|
|
8771
|
-
if (nextHash.isEqualTo(zero)) {
|
|
8694
|
+
if (nextHash.isEqualTo(Bytes.zero(HASH_SIZE))) {
|
|
8772
8695
|
return traversedPath;
|
|
8773
8696
|
}
|
|
8774
8697
|
|
|
@@ -8904,35 +8827,6 @@ declare function trieStringify(root: TrieNode | null, nodes: NodesDb): string {
|
|
|
8904
8827
|
return `\nLeaf('${leaf.getKey().toString()}',${value})`;
|
|
8905
8828
|
}
|
|
8906
8829
|
|
|
8907
|
-
declare function findSharedPrefix(a: TruncatedStateKey, b: TruncatedStateKey) {
|
|
8908
|
-
for (let i = 0; i < TRUNCATED_HASH_SIZE; i++) {
|
|
8909
|
-
const diff = a.raw[i] ^ b.raw[i];
|
|
8910
|
-
if (diff === 0) {
|
|
8911
|
-
continue;
|
|
8912
|
-
}
|
|
8913
|
-
// check how many bits match
|
|
8914
|
-
for (const [mask, matchingBits] of bitLookup) {
|
|
8915
|
-
if ((mask & diff) !== 0) {
|
|
8916
|
-
return i * 8 + matchingBits;
|
|
8917
|
-
}
|
|
8918
|
-
}
|
|
8919
|
-
return i;
|
|
8920
|
-
}
|
|
8921
|
-
return TRUNCATED_HASH_SIZE * 8;
|
|
8922
|
-
}
|
|
8923
|
-
|
|
8924
|
-
declare const bitLookup = [
|
|
8925
|
-
[0b10000000, 0],
|
|
8926
|
-
[0b01000000, 1],
|
|
8927
|
-
[0b00100000, 2],
|
|
8928
|
-
[0b00010000, 3],
|
|
8929
|
-
[0b00001000, 4],
|
|
8930
|
-
[0b00000100, 5],
|
|
8931
|
-
[0b00000010, 6],
|
|
8932
|
-
[0b00000001, 7],
|
|
8933
|
-
[0b00000000, 8],
|
|
8934
|
-
];
|
|
8935
|
-
|
|
8936
8830
|
type index$e_BranchNode = BranchNode;
|
|
8937
8831
|
declare const index$e_BranchNode: typeof BranchNode;
|
|
8938
8832
|
type index$e_InMemoryTrie = InMemoryTrie;
|
|
@@ -8957,18 +8851,15 @@ type index$e_TruncatedStateKey = TruncatedStateKey;
|
|
|
8957
8851
|
type index$e_ValueHash = ValueHash;
|
|
8958
8852
|
type index$e_WriteableNodesDb = WriteableNodesDb;
|
|
8959
8853
|
declare const index$e_WriteableNodesDb: typeof WriteableNodesDb;
|
|
8960
|
-
declare const index$e_bitLookup: typeof bitLookup;
|
|
8961
8854
|
declare const index$e_createSubtreeForBothLeaves: typeof createSubtreeForBothLeaves;
|
|
8962
8855
|
declare const index$e_findNodeToReplace: typeof findNodeToReplace;
|
|
8963
|
-
declare const index$e_findSharedPrefix: typeof findSharedPrefix;
|
|
8964
8856
|
declare const index$e_getBit: typeof getBit;
|
|
8965
8857
|
declare const index$e_leafComparator: typeof leafComparator;
|
|
8966
8858
|
declare const index$e_parseInputKey: typeof parseInputKey;
|
|
8967
8859
|
declare const index$e_trieInsert: typeof trieInsert;
|
|
8968
8860
|
declare const index$e_trieStringify: typeof trieStringify;
|
|
8969
|
-
declare const index$e_zero: typeof zero;
|
|
8970
8861
|
declare namespace index$e {
|
|
8971
|
-
export { index$e_BranchNode as BranchNode, index$e_InMemoryTrie as InMemoryTrie, index$e_LeafNode as LeafNode, index$e_NodeType as NodeType, index$e_NodesDb as NodesDb, index$e_TRIE_NODE_BYTES as TRIE_NODE_BYTES, index$e_TRUNCATED_KEY_BITS as TRUNCATED_KEY_BITS, index$e_TraversedPath as TraversedPath, index$e_TrieNode as TrieNode, index$e_WriteableNodesDb as WriteableNodesDb, index$
|
|
8862
|
+
export { index$e_BranchNode as BranchNode, index$e_InMemoryTrie as InMemoryTrie, index$e_LeafNode as LeafNode, index$e_NodeType as NodeType, index$e_NodesDb as NodesDb, index$e_TRIE_NODE_BYTES as TRIE_NODE_BYTES, index$e_TRUNCATED_KEY_BITS as TRUNCATED_KEY_BITS, index$e_TraversedPath as TraversedPath, index$e_TrieNode as TrieNode, index$e_WriteableNodesDb as WriteableNodesDb, index$e_createSubtreeForBothLeaves as createSubtreeForBothLeaves, index$e_findNodeToReplace as findNodeToReplace, index$e_getBit as getBit, index$e_leafComparator as leafComparator, index$e_parseInputKey as parseInputKey, index$e_trieInsert as trieInsert, index$e_trieStringify as trieStringify };
|
|
8972
8863
|
export type { index$e_InputKey as InputKey, StateKey$1 as StateKey, index$e_TRUNCATED_KEY_BYTES as TRUNCATED_KEY_BYTES, index$e_TrieHasher as TrieHasher, index$e_TrieNodeHash as TrieNodeHash, index$e_TruncatedStateKey as TruncatedStateKey, index$e_ValueHash as ValueHash };
|
|
8973
8864
|
}
|
|
8974
8865
|
|
|
@@ -11803,6 +11694,8 @@ declare class StateEntries {
|
|
|
11803
11694
|
return new StateEntries(TruncatedHashDictionary.fromEntries(entries));
|
|
11804
11695
|
}
|
|
11805
11696
|
|
|
11697
|
+
private trieCache: InMemoryTrie | null = null;
|
|
11698
|
+
|
|
11806
11699
|
private constructor(private readonly entries: TruncatedHashDictionary<StateKey, BytesBlob>) {}
|
|
11807
11700
|
|
|
11808
11701
|
/** When comparing, we can safely ignore `trieCache` and just use entries. */
|
|
@@ -11814,6 +11707,18 @@ declare class StateEntries {
|
|
|
11814
11707
|
return this.entries[Symbol.iterator]();
|
|
11815
11708
|
}
|
|
11816
11709
|
|
|
11710
|
+
/** Construct the trie from given set of state entries. */
|
|
11711
|
+
public getTrie(): InMemoryTrie {
|
|
11712
|
+
if (this.trieCache === null) {
|
|
11713
|
+
const trie = InMemoryTrie.empty(blake2bTrieHasher);
|
|
11714
|
+
for (const [key, value] of this.entries) {
|
|
11715
|
+
trie.set(key.asOpaque(), value);
|
|
11716
|
+
}
|
|
11717
|
+
this.trieCache = trie;
|
|
11718
|
+
}
|
|
11719
|
+
return this.trieCache;
|
|
11720
|
+
}
|
|
11721
|
+
|
|
11817
11722
|
/** Retrieve value of some serialized key (if present). */
|
|
11818
11723
|
get(key: StateKey): BytesBlob | null {
|
|
11819
11724
|
return this.entries.get(key) ?? null;
|
|
@@ -11821,6 +11726,8 @@ declare class StateEntries {
|
|
|
11821
11726
|
|
|
11822
11727
|
/** Modify underlying entries dictionary with given update. */
|
|
11823
11728
|
applyUpdate(stateEntriesUpdate: Iterable<StateEntryUpdate>) {
|
|
11729
|
+
// NOTE since we are altering the structure, we need to reset the cache.
|
|
11730
|
+
this.trieCache = null;
|
|
11824
11731
|
for (const [action, key, value] of stateEntriesUpdate) {
|
|
11825
11732
|
if (action === StateEntryUpdateAction.Insert) {
|
|
11826
11733
|
this.entries.set(key, value);
|
|
@@ -11834,12 +11741,10 @@ declare class StateEntries {
|
|
|
11834
11741
|
|
|
11835
11742
|
/** https://graypaper.fluffylabs.dev/#/68eaa1f/391600391600?v=0.6.4 */
|
|
11836
11743
|
getRootHash(): StateRootHash {
|
|
11837
|
-
|
|
11838
|
-
|
|
11839
|
-
|
|
11840
|
-
|
|
11841
|
-
|
|
11842
|
-
return InMemoryTrie.computeStateRoot(blake2bTrieHasher, leaves).asOpaque();
|
|
11744
|
+
// TODO [ToDr] it should be possible to do this more efficiently
|
|
11745
|
+
// by converting the state entries into leaf nodes and constructing
|
|
11746
|
+
// the trie from the trie nodes.
|
|
11747
|
+
return this.getTrie().getRootHash().asOpaque();
|
|
11843
11748
|
}
|
|
11844
11749
|
}
|
|
11845
11750
|
|
|
@@ -12226,13 +12131,13 @@ declare class LeafDb implements SerializedStateBackend {
|
|
|
12226
12131
|
);
|
|
12227
12132
|
}
|
|
12228
12133
|
|
|
12229
|
-
const leaves =
|
|
12134
|
+
const leaves: LeafNode[] = [];
|
|
12230
12135
|
for (const nodeData of blob.chunks(TRIE_NODE_BYTES)) {
|
|
12231
12136
|
const node = new TrieNode(nodeData.raw);
|
|
12232
12137
|
if (node.getNodeType() === NodeType.Branch) {
|
|
12233
12138
|
return Result.error(LeafDbError.InvalidLeafData, `Branch node detected: ${nodeData}`);
|
|
12234
12139
|
}
|
|
12235
|
-
leaves.
|
|
12140
|
+
leaves.push(node.asLeafNode());
|
|
12236
12141
|
}
|
|
12237
12142
|
|
|
12238
12143
|
return Result.ok(new LeafDb(leaves, db));
|
|
@@ -12242,11 +12147,11 @@ declare class LeafDb implements SerializedStateBackend {
|
|
|
12242
12147
|
private readonly lookup: TruncatedHashDictionary<StateKey, Lookup>;
|
|
12243
12148
|
|
|
12244
12149
|
private constructor(
|
|
12245
|
-
public readonly leaves:
|
|
12150
|
+
public readonly leaves: readonly LeafNode[],
|
|
12246
12151
|
public readonly db: ValuesDb,
|
|
12247
12152
|
) {
|
|
12248
12153
|
this.lookup = TruncatedHashDictionary.fromEntries(
|
|
12249
|
-
leaves.
|
|
12154
|
+
leaves.map((leaf) => {
|
|
12250
12155
|
const key: StateKey = leaf.getKey().asOpaque();
|
|
12251
12156
|
const value: Lookup = leaf.hasEmbeddedValue()
|
|
12252
12157
|
? {
|
|
@@ -14443,8 +14348,6 @@ declare enum AccessType {
|
|
|
14443
14348
|
WRITE = 1,
|
|
14444
14349
|
}
|
|
14445
14350
|
|
|
14446
|
-
// const logger = Logger.new(import.meta.filename, "pvm:mem");
|
|
14447
|
-
|
|
14448
14351
|
declare class Memory {
|
|
14449
14352
|
static fromInitialMemory(initialMemoryState: InitialMemoryState) {
|
|
14450
14353
|
return new Memory(
|
|
@@ -14481,7 +14384,7 @@ declare class Memory {
|
|
|
14481
14384
|
return Result.ok(OK);
|
|
14482
14385
|
}
|
|
14483
14386
|
|
|
14484
|
-
|
|
14387
|
+
logger.insane(`MEM[${address}] <- ${BytesBlob.blobFrom(bytes)}`);
|
|
14485
14388
|
const pagesResult = this.getPages(address, bytes.length, AccessType.WRITE);
|
|
14486
14389
|
|
|
14487
14390
|
if (pagesResult.isError) {
|
|
@@ -14570,7 +14473,7 @@ declare class Memory {
|
|
|
14570
14473
|
bytesLeft -= bytesToRead;
|
|
14571
14474
|
}
|
|
14572
14475
|
|
|
14573
|
-
|
|
14476
|
+
logger.insane(`MEM[${startAddress}] => ${BytesBlob.blobFrom(result)}`);
|
|
14574
14477
|
return Result.ok(OK);
|
|
14575
14478
|
}
|
|
14576
14479
|
|