@typeberry/lib 0.0.5-ab52154 → 0.0.5-cdbb94a

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/index.cjs +17538 -0
  2. package/index.d.ts +139 -42
  3. package/index.js +144 -89
  4. package/package.json +9 -2
package/index.d.ts CHANGED
@@ -4474,6 +4474,9 @@ declare namespace bandersnatch_d_exports {
4474
4474
  }
4475
4475
  /* tslint:disable */
4476
4476
  /* eslint-disable */
4477
+ /**
4478
+ * Generate ring commitment given concatenation of ring keys.
4479
+ */
4477
4480
  declare function ring_commitment(keys: Uint8Array): Uint8Array;
4478
4481
  /**
4479
4482
  * Derive Private and Public Key from Seed
@@ -4487,21 +4490,21 @@ declare function derive_public_key(seed: Uint8Array): Uint8Array;
4487
4490
  * or
4488
4491
  * https://graypaper.fluffylabs.dev/#/68eaa1f/0e54010e5401?v=0.6.4
4489
4492
  */
4490
- declare function verify_seal(keys: Uint8Array, signer_key_index: number, seal_data: Uint8Array, payload: Uint8Array, aux_data: Uint8Array): Uint8Array;
4493
+ declare function verify_seal(signer_key: Uint8Array, seal_data: Uint8Array, payload: Uint8Array, aux_data: Uint8Array): Uint8Array;
4491
4494
  /**
4492
4495
  * Verify multiple tickets at once as defined in:
4493
4496
  * https://graypaper.fluffylabs.dev/#/68eaa1f/0f3e000f3e00?v=0.6.4
4494
4497
  *
4495
4498
  * NOTE: the aux_data of VRF function is empty!
4496
4499
  */
4497
- declare function batch_verify_tickets(keys: Uint8Array, tickets_data: Uint8Array, vrf_input_data_len: number): Uint8Array;
4500
+ declare function batch_verify_tickets(ring_size: number, commitment: Uint8Array, tickets_data: Uint8Array, vrf_input_data_len: number): Uint8Array;
4498
4501
  type InitInput$2 = RequestInfo | URL | Response | BufferSource | WebAssembly.Module;
4499
4502
  interface InitOutput$2 {
4500
4503
  readonly memory: WebAssembly.Memory;
4501
4504
  readonly ring_commitment: (a: number, b: number) => [number, number];
4502
4505
  readonly derive_public_key: (a: number, b: number) => [number, number];
4503
- readonly verify_seal: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number) => [number, number];
4504
- readonly batch_verify_tickets: (a: number, b: number, c: number, d: number, e: number) => [number, number];
4506
+ readonly verify_seal: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number) => [number, number];
4507
+ readonly batch_verify_tickets: (a: number, b: number, c: number, d: number, e: number, f: number) => [number, number];
4505
4508
  readonly __wbindgen_export_0: WebAssembly.Table;
4506
4509
  readonly __wbindgen_malloc: (a: number, b: number) => number;
4507
4510
  readonly __wbindgen_free: (a: number, b: number, c: number) => void;
@@ -8530,7 +8533,9 @@ declare class WriteableNodesDb extends NodesDb {
8530
8533
  }
8531
8534
  }
8532
8535
 
8533
- declare const leafComparator = (x: LeafNode, y: LeafNode) => y.getKey().compare(x.getKey());
8536
+ /** Compare two trie `LeafNode`s only by their key. */
8537
+ declare const leafComparator = (x: LeafNode, y: LeafNode) => x.getKey().compare(y.getKey());
8538
+ declare const zero = Bytes.zero(HASH_SIZE).asOpaque();
8534
8539
 
8535
8540
  declare class InMemoryTrie {
8536
8541
  /** Create an empty in-memory trie. */
@@ -8539,10 +8544,87 @@ declare class InMemoryTrie {
8539
8544
  }
8540
8545
 
8541
8546
  /** Given a collection of leaves, compute the state root. */
8542
- static computeStateRoot(hasher: TrieHasher, leaves: readonly LeafNode[]) {
8543
- // TODO [ToDr] [opti] Simple loop to just compute the root hash instead of
8544
- // constructing the entire trie.
8545
- return InMemoryTrie.fromLeaves(hasher, leaves).getRootHash();
8547
+ static computeStateRoot(hasher: TrieHasher, leaves: SortedSet<LeafNode>): TrieNodeHash {
8548
+ const sorted = leaves.slice();
8549
+ const firstSorted = sorted.shift();
8550
+ if (firstSorted === undefined) {
8551
+ return zero;
8552
+ }
8553
+
8554
+ const nodes = [
8555
+ {
8556
+ leaf: firstSorted,
8557
+ sharedBitsWithPrev: 0,
8558
+ },
8559
+ ];
8560
+ let last = nodes[0];
8561
+ // first we go through all of the sorted leaves and figure out how much in common
8562
+ // they have with the previous node.
8563
+ // If the shared-prefix drops, it means we are going up in depth (i.e. we are in different branch).
8564
+ for (const leaf of sorted) {
8565
+ const sharedBitsCount = findSharedPrefix(leaf.getKey(), last.leaf.getKey());
8566
+ last = {
8567
+ leaf,
8568
+ sharedBitsWithPrev: sharedBitsCount,
8569
+ };
8570
+ nodes.push(last);
8571
+ }
8572
+ // Now we will go backwards and hash them together (or create branch nodes).
8573
+ nodes.reverse();
8574
+ const stack: TrieNodeHash[] = [];
8575
+ let currentDepth = 0;
8576
+ const lastNode = nodes.length === 1 ? undefined : nodes[nodes.length - 1];
8577
+ for (const node of nodes) {
8578
+ const isLastNode = node === lastNode;
8579
+ const key = node.leaf.getKey();
8580
+ const prevDepth = currentDepth;
8581
+ currentDepth = node.sharedBitsWithPrev;
8582
+
8583
+ // first push all missing right-hand zero nodes.
8584
+ // Handle the case if all nodes are on the left side and we need one more top-level
8585
+ // extra.
8586
+ const startDepth = isLastNode ? prevDepth : prevDepth + 1;
8587
+ for (let i = startDepth; i <= currentDepth; i++) {
8588
+ if (getBit(key, i) === false) {
8589
+ stack.push(zero);
8590
+ }
8591
+ }
8592
+
8593
+ // now let's push the hash of the current leaf
8594
+ const hash = hasher.hashConcat(node.leaf.node.raw);
8595
+ stack.push(hash);
8596
+ // we are going further down, so no need to merge anything
8597
+ if (prevDepth < currentDepth) {
8598
+ continue;
8599
+ }
8600
+ // jumping back to some lower depth, we need to merge what we have on the stack.
8601
+ // we need to handle a case where we have no nodes on the top-most left side.
8602
+ // in such case we just add extra zero on the left.
8603
+ const endDepth = isLastNode ? currentDepth - 1 : currentDepth;
8604
+ for (let i = prevDepth; i > endDepth; i--) {
8605
+ if (getBit(key, i) === true) {
8606
+ stack.push(zero);
8607
+ }
8608
+ const current = stack.pop() ?? zero;
8609
+ const next = stack.pop() ?? zero;
8610
+ const branchNode = BranchNode.fromSubNodes(current, next);
8611
+ const hash = hasher.hashConcat(branchNode.node.raw);
8612
+ stack.push(hash);
8613
+ }
8614
+ }
8615
+
8616
+ return stack.pop() ?? zero;
8617
+ }
8618
+
8619
+ /**
8620
+ * Construct a `LeafNode` from given `key` and `value`.
8621
+ *
8622
+ * NOTE: for large value it WILL NOT be embedded in the leaf node,
8623
+ * and should rather be stored separately.
8624
+ */
8625
+ static constructLeaf(hasher: TrieHasher, key: InputKey, value: BytesBlob, maybeValueHash?: ValueHash) {
8626
+ const valueHash = () => maybeValueHash ?? hasher.hashConcat(value.raw).asOpaque();
8627
+ return LeafNode.fromValue(key, value, valueHash);
8546
8628
  }
8547
8629
 
8548
8630
  /**
@@ -8560,11 +8642,6 @@ declare class InMemoryTrie {
8560
8642
  return new InMemoryTrie(nodes, root);
8561
8643
  }
8562
8644
 
8563
- static constructLeaf(hasher: TrieHasher, key: InputKey, value: BytesBlob, maybeValueHash?: ValueHash) {
8564
- const valueHash = () => maybeValueHash ?? hasher.hashConcat(value.raw).asOpaque();
8565
- return LeafNode.fromValue(key, value, valueHash);
8566
- }
8567
-
8568
8645
  private constructor(
8569
8646
  // Exposed for trie-visualiser
8570
8647
  public readonly nodes: WriteableNodesDb,
@@ -8691,7 +8768,7 @@ declare function findNodeToReplace(root: TrieNode, nodes: NodesDb, key: Truncate
8691
8768
 
8692
8769
  const nextNode = nodes.get(nextHash);
8693
8770
  if (nextNode === null) {
8694
- if (nextHash.isEqualTo(Bytes.zero(HASH_SIZE))) {
8771
+ if (nextHash.isEqualTo(zero)) {
8695
8772
  return traversedPath;
8696
8773
  }
8697
8774
 
@@ -8827,6 +8904,35 @@ declare function trieStringify(root: TrieNode | null, nodes: NodesDb): string {
8827
8904
  return `\nLeaf('${leaf.getKey().toString()}',${value})`;
8828
8905
  }
8829
8906
 
8907
+ declare function findSharedPrefix(a: TruncatedStateKey, b: TruncatedStateKey) {
8908
+ for (let i = 0; i < TRUNCATED_HASH_SIZE; i++) {
8909
+ const diff = a.raw[i] ^ b.raw[i];
8910
+ if (diff === 0) {
8911
+ continue;
8912
+ }
8913
+ // check how many bits match
8914
+ for (const [mask, matchingBits] of bitLookup) {
8915
+ if ((mask & diff) !== 0) {
8916
+ return i * 8 + matchingBits;
8917
+ }
8918
+ }
8919
+ return i;
8920
+ }
8921
+ return TRUNCATED_HASH_SIZE * 8;
8922
+ }
8923
+
8924
+ declare const bitLookup = [
8925
+ [0b10000000, 0],
8926
+ [0b01000000, 1],
8927
+ [0b00100000, 2],
8928
+ [0b00010000, 3],
8929
+ [0b00001000, 4],
8930
+ [0b00000100, 5],
8931
+ [0b00000010, 6],
8932
+ [0b00000001, 7],
8933
+ [0b00000000, 8],
8934
+ ];
8935
+
8830
8936
  type index$e_BranchNode = BranchNode;
8831
8937
  declare const index$e_BranchNode: typeof BranchNode;
8832
8938
  type index$e_InMemoryTrie = InMemoryTrie;
@@ -8851,15 +8957,18 @@ type index$e_TruncatedStateKey = TruncatedStateKey;
8851
8957
  type index$e_ValueHash = ValueHash;
8852
8958
  type index$e_WriteableNodesDb = WriteableNodesDb;
8853
8959
  declare const index$e_WriteableNodesDb: typeof WriteableNodesDb;
8960
+ declare const index$e_bitLookup: typeof bitLookup;
8854
8961
  declare const index$e_createSubtreeForBothLeaves: typeof createSubtreeForBothLeaves;
8855
8962
  declare const index$e_findNodeToReplace: typeof findNodeToReplace;
8963
+ declare const index$e_findSharedPrefix: typeof findSharedPrefix;
8856
8964
  declare const index$e_getBit: typeof getBit;
8857
8965
  declare const index$e_leafComparator: typeof leafComparator;
8858
8966
  declare const index$e_parseInputKey: typeof parseInputKey;
8859
8967
  declare const index$e_trieInsert: typeof trieInsert;
8860
8968
  declare const index$e_trieStringify: typeof trieStringify;
8969
+ declare const index$e_zero: typeof zero;
8861
8970
  declare namespace index$e {
8862
- export { index$e_BranchNode as BranchNode, index$e_InMemoryTrie as InMemoryTrie, index$e_LeafNode as LeafNode, index$e_NodeType as NodeType, index$e_NodesDb as NodesDb, index$e_TRIE_NODE_BYTES as TRIE_NODE_BYTES, index$e_TRUNCATED_KEY_BITS as TRUNCATED_KEY_BITS, index$e_TraversedPath as TraversedPath, index$e_TrieNode as TrieNode, index$e_WriteableNodesDb as WriteableNodesDb, index$e_createSubtreeForBothLeaves as createSubtreeForBothLeaves, index$e_findNodeToReplace as findNodeToReplace, index$e_getBit as getBit, index$e_leafComparator as leafComparator, index$e_parseInputKey as parseInputKey, index$e_trieInsert as trieInsert, index$e_trieStringify as trieStringify };
8971
+ export { index$e_BranchNode as BranchNode, index$e_InMemoryTrie as InMemoryTrie, index$e_LeafNode as LeafNode, index$e_NodeType as NodeType, index$e_NodesDb as NodesDb, index$e_TRIE_NODE_BYTES as TRIE_NODE_BYTES, index$e_TRUNCATED_KEY_BITS as TRUNCATED_KEY_BITS, index$e_TraversedPath as TraversedPath, index$e_TrieNode as TrieNode, index$e_WriteableNodesDb as WriteableNodesDb, index$e_bitLookup as bitLookup, index$e_createSubtreeForBothLeaves as createSubtreeForBothLeaves, index$e_findNodeToReplace as findNodeToReplace, index$e_findSharedPrefix as findSharedPrefix, index$e_getBit as getBit, index$e_leafComparator as leafComparator, index$e_parseInputKey as parseInputKey, index$e_trieInsert as trieInsert, index$e_trieStringify as trieStringify, index$e_zero as zero };
8863
8972
  export type { index$e_InputKey as InputKey, StateKey$1 as StateKey, index$e_TRUNCATED_KEY_BYTES as TRUNCATED_KEY_BYTES, index$e_TrieHasher as TrieHasher, index$e_TrieNodeHash as TrieNodeHash, index$e_TruncatedStateKey as TruncatedStateKey, index$e_ValueHash as ValueHash };
8864
8973
  }
8865
8974
 
@@ -11694,8 +11803,6 @@ declare class StateEntries {
11694
11803
  return new StateEntries(TruncatedHashDictionary.fromEntries(entries));
11695
11804
  }
11696
11805
 
11697
- private trieCache: InMemoryTrie | null = null;
11698
-
11699
11806
  private constructor(private readonly entries: TruncatedHashDictionary<StateKey, BytesBlob>) {}
11700
11807
 
11701
11808
  /** When comparing, we can safely ignore `trieCache` and just use entries. */
@@ -11707,18 +11814,6 @@ declare class StateEntries {
11707
11814
  return this.entries[Symbol.iterator]();
11708
11815
  }
11709
11816
 
11710
- /** Construct the trie from given set of state entries. */
11711
- public getTrie(): InMemoryTrie {
11712
- if (this.trieCache === null) {
11713
- const trie = InMemoryTrie.empty(blake2bTrieHasher);
11714
- for (const [key, value] of this.entries) {
11715
- trie.set(key.asOpaque(), value);
11716
- }
11717
- this.trieCache = trie;
11718
- }
11719
- return this.trieCache;
11720
- }
11721
-
11722
11817
  /** Retrieve value of some serialized key (if present). */
11723
11818
  get(key: StateKey): BytesBlob | null {
11724
11819
  return this.entries.get(key) ?? null;
@@ -11726,8 +11821,6 @@ declare class StateEntries {
11726
11821
 
11727
11822
  /** Modify underlying entries dictionary with given update. */
11728
11823
  applyUpdate(stateEntriesUpdate: Iterable<StateEntryUpdate>) {
11729
- // NOTE since we are altering the structure, we need to reset the cache.
11730
- this.trieCache = null;
11731
11824
  for (const [action, key, value] of stateEntriesUpdate) {
11732
11825
  if (action === StateEntryUpdateAction.Insert) {
11733
11826
  this.entries.set(key, value);
@@ -11741,10 +11834,12 @@ declare class StateEntries {
11741
11834
 
11742
11835
  /** https://graypaper.fluffylabs.dev/#/68eaa1f/391600391600?v=0.6.4 */
11743
11836
  getRootHash(): StateRootHash {
11744
- // TODO [ToDr] it should be possible to do this more efficiently
11745
- // by converting the state entries into leaf nodes and constructing
11746
- // the trie from the trie nodes.
11747
- return this.getTrie().getRootHash().asOpaque();
11837
+ const leaves: SortedSet<LeafNode> = SortedSet.fromArray(leafComparator);
11838
+ for (const [key, value] of this) {
11839
+ leaves.insert(InMemoryTrie.constructLeaf(blake2bTrieHasher, key.asOpaque(), value));
11840
+ }
11841
+
11842
+ return InMemoryTrie.computeStateRoot(blake2bTrieHasher, leaves).asOpaque();
11748
11843
  }
11749
11844
  }
11750
11845
 
@@ -12131,13 +12226,13 @@ declare class LeafDb implements SerializedStateBackend {
12131
12226
  );
12132
12227
  }
12133
12228
 
12134
- const leaves: LeafNode[] = [];
12229
+ const leaves = SortedSet.fromArray(leafComparator, []);
12135
12230
  for (const nodeData of blob.chunks(TRIE_NODE_BYTES)) {
12136
12231
  const node = new TrieNode(nodeData.raw);
12137
12232
  if (node.getNodeType() === NodeType.Branch) {
12138
12233
  return Result.error(LeafDbError.InvalidLeafData, `Branch node detected: ${nodeData}`);
12139
12234
  }
12140
- leaves.push(node.asLeafNode());
12235
+ leaves.insert(node.asLeafNode());
12141
12236
  }
12142
12237
 
12143
12238
  return Result.ok(new LeafDb(leaves, db));
@@ -12147,11 +12242,11 @@ declare class LeafDb implements SerializedStateBackend {
12147
12242
  private readonly lookup: TruncatedHashDictionary<StateKey, Lookup>;
12148
12243
 
12149
12244
  private constructor(
12150
- public readonly leaves: readonly LeafNode[],
12245
+ public readonly leaves: SortedSet<LeafNode>,
12151
12246
  public readonly db: ValuesDb,
12152
12247
  ) {
12153
12248
  this.lookup = TruncatedHashDictionary.fromEntries(
12154
- leaves.map((leaf) => {
12249
+ leaves.array.map((leaf) => {
12155
12250
  const key: StateKey = leaf.getKey().asOpaque();
12156
12251
  const value: Lookup = leaf.hasEmbeddedValue()
12157
12252
  ? {
@@ -14348,6 +14443,8 @@ declare enum AccessType {
14348
14443
  WRITE = 1,
14349
14444
  }
14350
14445
 
14446
+ // const logger = Logger.new(import.meta.filename, "pvm:mem");
14447
+
14351
14448
  declare class Memory {
14352
14449
  static fromInitialMemory(initialMemoryState: InitialMemoryState) {
14353
14450
  return new Memory(
@@ -14384,7 +14481,7 @@ declare class Memory {
14384
14481
  return Result.ok(OK);
14385
14482
  }
14386
14483
 
14387
- logger.insane(`MEM[${address}] <- ${BytesBlob.blobFrom(bytes)}`);
14484
+ // logger.insane(`MEM[${address}] <- ${BytesBlob.blobFrom(bytes)}`);
14388
14485
  const pagesResult = this.getPages(address, bytes.length, AccessType.WRITE);
14389
14486
 
14390
14487
  if (pagesResult.isError) {
@@ -14473,7 +14570,7 @@ declare class Memory {
14473
14570
  bytesLeft -= bytesToRead;
14474
14571
  }
14475
14572
 
14476
- logger.insane(`MEM[${startAddress}] => ${BytesBlob.blobFrom(result)}`);
14573
+ // logger.insane(`MEM[${startAddress}] => ${BytesBlob.blobFrom(result)}`);
14477
14574
  return Result.ok(OK);
14478
14575
  }
14479
14576