@typeberry/lib 0.4.1-dae2283 → 0.4.1-f776cce
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.cjs +470 -31
- package/index.d.ts +559 -41
- package/index.js +470 -31
- package/package.json +1 -1
package/index.cjs
CHANGED
|
@@ -4427,8 +4427,434 @@ class ArrayView {
|
|
|
4427
4427
|
}
|
|
4428
4428
|
}
|
|
4429
4429
|
|
|
4430
|
-
/** A map which uses
|
|
4431
|
-
class
|
|
4430
|
+
/** A map which uses byte blobs as keys */
|
|
4431
|
+
class BlobDictionary extends WithDebug {
|
|
4432
|
+
mapNodeThreshold;
|
|
4433
|
+
/**
|
|
4434
|
+
* The root node of the dictionary.
|
|
4435
|
+
*
|
|
4436
|
+
* This is the main internal data structure that organizes entries
|
|
4437
|
+
* in a tree-like fashion (array-based nodes up to `mapNodeThreshold`,
|
|
4438
|
+
* map-based nodes beyond it). All insertions, updates, and deletions
|
|
4439
|
+
* operate through this structure.
|
|
4440
|
+
*/
|
|
4441
|
+
root = Node.withList();
|
|
4442
|
+
/**
|
|
4443
|
+
* Auxiliary map that stores references to the original keys and their values.
|
|
4444
|
+
*
|
|
4445
|
+
* - Overriding a value in the main structure does not replace the original key reference.
|
|
4446
|
+
* - Used for efficient iteration over `keys()`, `values()`, `entries()`, and computing `size`.
|
|
4447
|
+
*/
|
|
4448
|
+
keyvals = new Map();
|
|
4449
|
+
/**
|
|
4450
|
+
* Protected constructor used internally by `BlobDictionary.new`
|
|
4451
|
+
* and `BlobDictionary.fromEntries`.
|
|
4452
|
+
*
|
|
4453
|
+
* This enforces controlled instantiation — users should create instances
|
|
4454
|
+
* through the provided static factory methods instead of calling the
|
|
4455
|
+
* constructor directly.
|
|
4456
|
+
*
|
|
4457
|
+
* @param mapNodeThreshold - The threshold that determines when the dictionary
|
|
4458
|
+
* switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
|
|
4459
|
+
*/
|
|
4460
|
+
constructor(mapNodeThreshold) {
|
|
4461
|
+
super();
|
|
4462
|
+
this.mapNodeThreshold = mapNodeThreshold;
|
|
4463
|
+
}
|
|
4464
|
+
/**
|
|
4465
|
+
* Returns the number of entries in the dictionary.
|
|
4466
|
+
*
|
|
4467
|
+
* The count is derived from the auxiliary `keyvals` map, which stores
|
|
4468
|
+
* all original key references and their associated values. This ensures
|
|
4469
|
+
* that the `size` reflects the actual number of entries, independent of
|
|
4470
|
+
* internal overrides in the main `root` structure.
|
|
4471
|
+
*
|
|
4472
|
+
* @returns The total number of entries in the dictionary.
|
|
4473
|
+
*/
|
|
4474
|
+
get size() {
|
|
4475
|
+
return this.keyvals.size;
|
|
4476
|
+
}
|
|
4477
|
+
[TEST_COMPARE_USING]() {
|
|
4478
|
+
const vals = Array.from(this);
|
|
4479
|
+
vals.sort((a, b) => a[0].compare(b[0]).value);
|
|
4480
|
+
return vals;
|
|
4481
|
+
}
|
|
4482
|
+
/**
|
|
4483
|
+
* Creates an empty `BlobDictionary`.
|
|
4484
|
+
*
|
|
4485
|
+
* @param mapNodeThreshold - The threshold that determines when the dictionary
|
|
4486
|
+
* switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
|
|
4487
|
+
* Defaults to `0`.
|
|
4488
|
+
*
|
|
4489
|
+
* @returns A new, empty `BlobDictionary` instance.
|
|
4490
|
+
*/
|
|
4491
|
+
static new(mapNodeThreshold = 0) {
|
|
4492
|
+
return new BlobDictionary(mapNodeThreshold);
|
|
4493
|
+
}
|
|
4494
|
+
/**
|
|
4495
|
+
* Creates a new `BlobDictionary` initialized with the given entries.
|
|
4496
|
+
*
|
|
4497
|
+
* @param entries - An array of `[key, value]` pairs used to populate the dictionary.
|
|
4498
|
+
* @param mapNodeThreshold - The threshold that determines when the dictionary
|
|
4499
|
+
* switches from using an array-based (`ListChildren`) node to a map-based (`MapChildren`) node for storing entries.
|
|
4500
|
+
* Defaults to `0`.
|
|
4501
|
+
*
|
|
4502
|
+
* @returns A new `BlobDictionary` containing the provided entries.
|
|
4503
|
+
*/
|
|
4504
|
+
static fromEntries(entries, mapNodeThreshold) {
|
|
4505
|
+
const dict = BlobDictionary.new(mapNodeThreshold);
|
|
4506
|
+
for (const [key, value] of entries) {
|
|
4507
|
+
dict.set(key, value);
|
|
4508
|
+
}
|
|
4509
|
+
return dict;
|
|
4510
|
+
}
|
|
4511
|
+
/**
|
|
4512
|
+
* Internal helper that inserts, updates or deletes an entry in the dictionary.
|
|
4513
|
+
*
|
|
4514
|
+
* Behaviour details:
|
|
4515
|
+
* - Passing `undefined` as `value` indicates a deletion. (E.g. `delete` uses `internalSet(key, undefined)`.)
|
|
4516
|
+
* - When an add (new entry) or a delete actually changes the structure, the method returns the affected leaf node.
|
|
4517
|
+
* - When the call only overrides an existing value (no structural add/delete), the method returns `null`.
|
|
4518
|
+
*
|
|
4519
|
+
* This method is intended for internal use by the dictionary implementation and allows `undefined` as a
|
|
4520
|
+
* sentinel value to signal removals.
|
|
4521
|
+
*
|
|
4522
|
+
* @param key - The key to insert, update or remove.
|
|
4523
|
+
* @param value - The value to associate with the key, or `undefined` to remove the key.
|
|
4524
|
+
* @returns The leaf node created or removed on add/delete, or `null` if the operation only overwrote an existing value.
|
|
4525
|
+
*/
|
|
4526
|
+
internalSet(key, value) {
|
|
4527
|
+
let node = this.root;
|
|
4528
|
+
const keyChunkGenerator = key.chunks(CHUNK_SIZE);
|
|
4529
|
+
let depth = 0;
|
|
4530
|
+
for (;;) {
|
|
4531
|
+
const maybeKeyChunk = keyChunkGenerator.next().value;
|
|
4532
|
+
if (maybeKeyChunk === undefined) {
|
|
4533
|
+
if (value === undefined) {
|
|
4534
|
+
return node.remove(key);
|
|
4535
|
+
}
|
|
4536
|
+
return node.set(key, value);
|
|
4537
|
+
}
|
|
4538
|
+
const keyChunk = asOpaqueType(maybeKeyChunk);
|
|
4539
|
+
if (node.children instanceof ListChildren) {
|
|
4540
|
+
const subkey = BytesBlob.blobFrom(key.raw.subarray(CHUNK_SIZE * depth));
|
|
4541
|
+
const leaf = value !== undefined ? node.children.insert(subkey, { key, value }) : node.children.remove(subkey);
|
|
4542
|
+
if (subkey.length > CHUNK_SIZE && node.children.children.length > this.mapNodeThreshold) {
|
|
4543
|
+
node.convertListChildrenToMap();
|
|
4544
|
+
}
|
|
4545
|
+
return leaf;
|
|
4546
|
+
}
|
|
4547
|
+
depth += 1;
|
|
4548
|
+
const children = node.children;
|
|
4549
|
+
if (children instanceof ListChildren) {
|
|
4550
|
+
throw new Error("We handle list node earlier. If we fall through, we know it's for the `Map` case.");
|
|
4551
|
+
}
|
|
4552
|
+
if (children instanceof MapChildren) {
|
|
4553
|
+
const maybeNode = children.getChild(keyChunk);
|
|
4554
|
+
if (maybeNode !== undefined) {
|
|
4555
|
+
// simply go one level deeper
|
|
4556
|
+
node = maybeNode;
|
|
4557
|
+
}
|
|
4558
|
+
else {
|
|
4559
|
+
// we are trying to remove an item, but it does not exist
|
|
4560
|
+
if (value === undefined) {
|
|
4561
|
+
return null;
|
|
4562
|
+
}
|
|
4563
|
+
// no more child nodes, we insert a new one.
|
|
4564
|
+
const newNode = Node.withList();
|
|
4565
|
+
children.setChild(keyChunk, newNode);
|
|
4566
|
+
node = newNode;
|
|
4567
|
+
}
|
|
4568
|
+
continue;
|
|
4569
|
+
}
|
|
4570
|
+
assertNever(children);
|
|
4571
|
+
}
|
|
4572
|
+
}
|
|
4573
|
+
/**
|
|
4574
|
+
* Adds a new entry to the dictionary or updates the value of an existing key.
|
|
4575
|
+
*
|
|
4576
|
+
* If an entry with the given key already exists, its value is replaced
|
|
4577
|
+
* with the new one.
|
|
4578
|
+
*
|
|
4579
|
+
* @param key - The key to add or update in the dictionary.
|
|
4580
|
+
* @param value - The value to associate with the specified key.
|
|
4581
|
+
* @returns Nothing (`void`).
|
|
4582
|
+
*/
|
|
4583
|
+
set(key, value) {
|
|
4584
|
+
const leaf = this.internalSet(key, value);
|
|
4585
|
+
if (leaf !== null) {
|
|
4586
|
+
this.keyvals.set(leaf.key, leaf);
|
|
4587
|
+
}
|
|
4588
|
+
}
|
|
4589
|
+
/**
|
|
4590
|
+
* Retrieves the value associated with the given key from the dictionary.
|
|
4591
|
+
*
|
|
4592
|
+
* If the key does not exist, this method returns `undefined`.
|
|
4593
|
+
*
|
|
4594
|
+
* @param key - The key whose associated value should be retrieved.
|
|
4595
|
+
* @returns The value associated with the specified key, or `undefined` if the key is not present.
|
|
4596
|
+
*/
|
|
4597
|
+
get(key) {
|
|
4598
|
+
let node = this.root;
|
|
4599
|
+
const pathChunksGenerator = key.chunks(CHUNK_SIZE);
|
|
4600
|
+
let depth = 0;
|
|
4601
|
+
while (node !== undefined) {
|
|
4602
|
+
const maybePathChunk = pathChunksGenerator.next().value;
|
|
4603
|
+
if (node.children instanceof ListChildren) {
|
|
4604
|
+
const subkey = BytesBlob.blobFrom(key.raw.subarray(depth * CHUNK_SIZE));
|
|
4605
|
+
const child = node.children.find(subkey);
|
|
4606
|
+
if (child !== null) {
|
|
4607
|
+
return child.value;
|
|
4608
|
+
}
|
|
4609
|
+
}
|
|
4610
|
+
if (maybePathChunk === undefined) {
|
|
4611
|
+
return node.getLeaf()?.value;
|
|
4612
|
+
}
|
|
4613
|
+
if (node.children instanceof MapChildren) {
|
|
4614
|
+
const pathChunk = asOpaqueType(maybePathChunk);
|
|
4615
|
+
node = node.children.getChild(pathChunk);
|
|
4616
|
+
depth += 1;
|
|
4617
|
+
}
|
|
4618
|
+
}
|
|
4619
|
+
return undefined;
|
|
4620
|
+
}
|
|
4621
|
+
/**
|
|
4622
|
+
* Checks whether the dictionary contains an entry for the given key.
|
|
4623
|
+
*
|
|
4624
|
+
* ⚠️ **Note:** Avoid using `has(...)` together with `get(...)` in a pattern like this:
|
|
4625
|
+
*
|
|
4626
|
+
* ```ts
|
|
4627
|
+
* if (dict.has(key)) {
|
|
4628
|
+
* const value = dict.get(key);
|
|
4629
|
+
* ...
|
|
4630
|
+
* }
|
|
4631
|
+
* ```
|
|
4632
|
+
*
|
|
4633
|
+
* This approach performs two lookups for the same key.
|
|
4634
|
+
*
|
|
4635
|
+
* Instead, prefer the following pattern, which retrieves the value once:
|
|
4636
|
+
*
|
|
4637
|
+
* ```ts
|
|
4638
|
+
* const value = dict.get(key);
|
|
4639
|
+
* if (value !== undefined) {
|
|
4640
|
+
* ...
|
|
4641
|
+
* }
|
|
4642
|
+
* ```
|
|
4643
|
+
*
|
|
4644
|
+
* @param key - The key to check for.
|
|
4645
|
+
* @returns `true` if the dictionary contains an entry for the given key, otherwise `false`.
|
|
4646
|
+
*/
|
|
4647
|
+
has(key) {
|
|
4648
|
+
return this.get(key) !== undefined;
|
|
4649
|
+
}
|
|
4650
|
+
/**
|
|
4651
|
+
* Removes an entry with the specified key from the dictionary.
|
|
4652
|
+
*
|
|
4653
|
+
* Internally, this calls {@link internalSet} with `undefined` to mark the entry as deleted.
|
|
4654
|
+
*
|
|
4655
|
+
* @param key - The key of the entry to remove.
|
|
4656
|
+
* @returns `true` if an entry was removed (i.e. the key existed), otherwise `false`.
|
|
4657
|
+
*/
|
|
4658
|
+
delete(key) {
|
|
4659
|
+
const leaf = this.internalSet(key, undefined);
|
|
4660
|
+
if (leaf !== null) {
|
|
4661
|
+
this.keyvals.delete(leaf.key);
|
|
4662
|
+
return true;
|
|
4663
|
+
}
|
|
4664
|
+
return false;
|
|
4665
|
+
}
|
|
4666
|
+
/**
|
|
4667
|
+
* Returns an iterator over the keys in the dictionary.
|
|
4668
|
+
*
|
|
4669
|
+
* The iterator yields each key in insertion order.
|
|
4670
|
+
*
|
|
4671
|
+
* @returns An iterator over all keys in the dictionary.
|
|
4672
|
+
*/
|
|
4673
|
+
keys() {
|
|
4674
|
+
return this.keyvals.keys();
|
|
4675
|
+
}
|
|
4676
|
+
/**
|
|
4677
|
+
* Returns an iterator over the values in the dictionary.
|
|
4678
|
+
*
|
|
4679
|
+
* The iterator yields each value in insertion order.
|
|
4680
|
+
*
|
|
4681
|
+
* @returns An iterator over all values in the dictionary.
|
|
4682
|
+
*/
|
|
4683
|
+
*values() {
|
|
4684
|
+
for (const leaf of this.keyvals.values()) {
|
|
4685
|
+
yield leaf.value;
|
|
4686
|
+
}
|
|
4687
|
+
}
|
|
4688
|
+
/**
|
|
4689
|
+
* Returns an iterator over the `[key, value]` pairs in the dictionary.
|
|
4690
|
+
*
|
|
4691
|
+
* The iterator yields entries in insertion order.
|
|
4692
|
+
*
|
|
4693
|
+
* @returns An iterator over `[key, value]` tuples for each entry in the dictionary.
|
|
4694
|
+
*/
|
|
4695
|
+
*entries() {
|
|
4696
|
+
for (const leaf of this.keyvals.values()) {
|
|
4697
|
+
yield [leaf.key, leaf.value];
|
|
4698
|
+
}
|
|
4699
|
+
}
|
|
4700
|
+
/**
|
|
4701
|
+
* Default iterator for the dictionary.
|
|
4702
|
+
*
|
|
4703
|
+
* Equivalent to calling {@link entries}.
|
|
4704
|
+
* Enables iteration with `for...of`:
|
|
4705
|
+
*
|
|
4706
|
+
* ```ts
|
|
4707
|
+
* for (const [key, value] of dict) {
|
|
4708
|
+
* ...
|
|
4709
|
+
* }
|
|
4710
|
+
* ```
|
|
4711
|
+
*
|
|
4712
|
+
* @returns An iterator over `[key, value]` pairs.
|
|
4713
|
+
*/
|
|
4714
|
+
[Symbol.iterator]() {
|
|
4715
|
+
return this.entries();
|
|
4716
|
+
}
|
|
4717
|
+
/**
|
|
4718
|
+
* Creates a new sorted array of values, ordered by their corresponding keys.
|
|
4719
|
+
*
|
|
4720
|
+
* Iterates over all entries in the dictionary and sorts them according
|
|
4721
|
+
* to the provided comparator function applied to the keys.
|
|
4722
|
+
*
|
|
4723
|
+
* @param comparator - A comparator function that can compare two keys.
|
|
4724
|
+
*
|
|
4725
|
+
* @returns A new array containing all values from the dictionary,
|
|
4726
|
+
* sorted according to their keys.
|
|
4727
|
+
*/
|
|
4728
|
+
toSortedArray(comparator) {
|
|
4729
|
+
const vals = Array.from(this);
|
|
4730
|
+
vals.sort((a, b) => comparator(a[0], b[0]).value);
|
|
4731
|
+
return vals.map((x) => x[1]);
|
|
4732
|
+
}
|
|
4733
|
+
}
|
|
4734
|
+
const CHUNK_SIZE = 6;
|
|
4735
|
+
/**
|
|
4736
|
+
* A function to transform a bytes chunk (up to 6 bytes into U48 number)
|
|
4737
|
+
*
|
|
4738
|
+
* Note that it uses 3 additional bits to store length(`value * 8 + len;`),
|
|
4739
|
+
* It is needed to distinguish shorter chunks that have 0s at the end, for example: [1, 2] and [1, 2, 0]
|
|
4740
|
+
* */
|
|
4741
|
+
function bytesAsU48(bytes) {
|
|
4742
|
+
const len = bytes.length;
|
|
4743
|
+
check `${len <= CHUNK_SIZE} Length has to be <= ${CHUNK_SIZE}, got: ${len}`;
|
|
4744
|
+
let value = bytes[3] | (bytes[2] << 8) | (bytes[1] << 16) | (bytes[0] << 24);
|
|
4745
|
+
for (let i = 4; i < bytes.length; i++) {
|
|
4746
|
+
value = value * 256 + bytes[i];
|
|
4747
|
+
}
|
|
4748
|
+
return value * 8 + len;
|
|
4749
|
+
}
|
|
4750
|
+
class Node {
|
|
4751
|
+
leaf;
|
|
4752
|
+
children;
|
|
4753
|
+
convertListChildrenToMap() {
|
|
4754
|
+
if (!(this.children instanceof ListChildren)) {
|
|
4755
|
+
return;
|
|
4756
|
+
}
|
|
4757
|
+
this.children = MapChildren.fromListNode(this.children);
|
|
4758
|
+
}
|
|
4759
|
+
static withList() {
|
|
4760
|
+
return new Node(undefined, ListChildren.new());
|
|
4761
|
+
}
|
|
4762
|
+
static withMap() {
|
|
4763
|
+
return new Node(undefined, MapChildren.new());
|
|
4764
|
+
}
|
|
4765
|
+
constructor(leaf, children) {
|
|
4766
|
+
this.leaf = leaf;
|
|
4767
|
+
this.children = children;
|
|
4768
|
+
}
|
|
4769
|
+
getLeaf() {
|
|
4770
|
+
return this.leaf;
|
|
4771
|
+
}
|
|
4772
|
+
remove(_key) {
|
|
4773
|
+
if (this.leaf === undefined) {
|
|
4774
|
+
return null;
|
|
4775
|
+
}
|
|
4776
|
+
const removedLeaf = this.leaf;
|
|
4777
|
+
this.leaf = undefined;
|
|
4778
|
+
return removedLeaf;
|
|
4779
|
+
}
|
|
4780
|
+
set(key, value) {
|
|
4781
|
+
if (this.leaf === undefined) {
|
|
4782
|
+
this.leaf = { key, value };
|
|
4783
|
+
return this.leaf;
|
|
4784
|
+
}
|
|
4785
|
+
this.leaf.value = value;
|
|
4786
|
+
return null;
|
|
4787
|
+
}
|
|
4788
|
+
}
|
|
4789
|
+
class ListChildren {
|
|
4790
|
+
children = [];
|
|
4791
|
+
constructor() { }
|
|
4792
|
+
find(key) {
|
|
4793
|
+
const result = this.children.find((item) => item[0].isEqualTo(key));
|
|
4794
|
+
if (result !== undefined) {
|
|
4795
|
+
return result[1];
|
|
4796
|
+
}
|
|
4797
|
+
return null;
|
|
4798
|
+
}
|
|
4799
|
+
remove(key) {
|
|
4800
|
+
const existingIndex = this.children.findIndex((item) => item[0].isEqualTo(key));
|
|
4801
|
+
if (existingIndex >= 0) {
|
|
4802
|
+
const ret = this.children.splice(existingIndex, 1);
|
|
4803
|
+
return ret[0][1];
|
|
4804
|
+
}
|
|
4805
|
+
return null;
|
|
4806
|
+
}
|
|
4807
|
+
insert(key, leaf) {
|
|
4808
|
+
const existingIndex = this.children.findIndex((item) => item[0].isEqualTo(key));
|
|
4809
|
+
if (existingIndex >= 0) {
|
|
4810
|
+
const existing = this.children[existingIndex];
|
|
4811
|
+
existing[1].value = leaf.value;
|
|
4812
|
+
return null;
|
|
4813
|
+
}
|
|
4814
|
+
this.children.push([key, leaf]);
|
|
4815
|
+
return leaf;
|
|
4816
|
+
}
|
|
4817
|
+
static new() {
|
|
4818
|
+
return new ListChildren();
|
|
4819
|
+
}
|
|
4820
|
+
}
|
|
4821
|
+
class MapChildren {
|
|
4822
|
+
children = new Map();
|
|
4823
|
+
constructor() { }
|
|
4824
|
+
static new() {
|
|
4825
|
+
return new MapChildren();
|
|
4826
|
+
}
|
|
4827
|
+
static fromListNode(node) {
|
|
4828
|
+
const mapNode = new MapChildren();
|
|
4829
|
+
for (const [key, leaf] of node.children) {
|
|
4830
|
+
const currentKeyChunk = asOpaqueType(BytesBlob.blobFrom(key.raw.subarray(0, CHUNK_SIZE)));
|
|
4831
|
+
const subKey = BytesBlob.blobFrom(key.raw.subarray(CHUNK_SIZE));
|
|
4832
|
+
let child = mapNode.getChild(currentKeyChunk);
|
|
4833
|
+
if (child === undefined) {
|
|
4834
|
+
child = Node.withList();
|
|
4835
|
+
mapNode.setChild(currentKeyChunk, child);
|
|
4836
|
+
}
|
|
4837
|
+
const children = child.children;
|
|
4838
|
+
children.insert(subKey, leaf);
|
|
4839
|
+
}
|
|
4840
|
+
return mapNode;
|
|
4841
|
+
}
|
|
4842
|
+
getChild(keyChunk) {
|
|
4843
|
+
const chunkAsNumber = bytesAsU48(keyChunk.raw);
|
|
4844
|
+
return this.children.get(chunkAsNumber);
|
|
4845
|
+
}
|
|
4846
|
+
setChild(keyChunk, node) {
|
|
4847
|
+
const chunkAsNumber = bytesAsU48(keyChunk.raw);
|
|
4848
|
+
this.children.set(chunkAsNumber, node);
|
|
4849
|
+
}
|
|
4850
|
+
}
|
|
4851
|
+
|
|
4852
|
+
/**
|
|
4853
|
+
* A map which uses hashes as keys.
|
|
4854
|
+
*
|
|
4855
|
+
* @deprecated
|
|
4856
|
+
* */
|
|
4857
|
+
class StringHashDictionary {
|
|
4432
4858
|
// TODO [ToDr] [crit] We can't use `TrieHash` directly in the map,
|
|
4433
4859
|
// because of the way it's being compared. Hence having `string` here.
|
|
4434
4860
|
// This has to be benchmarked and re-written to a custom map most likely.
|
|
@@ -4493,6 +4919,16 @@ class HashDictionary {
|
|
|
4493
4919
|
return this.map.delete(key.toString());
|
|
4494
4920
|
}
|
|
4495
4921
|
}
|
|
4922
|
+
/**
|
|
4923
|
+
* A value that indicates when `BlobDictionary` transforms Array nodes into Map nodes.
|
|
4924
|
+
* In practice, it doesn't matter much because, in real life, arrays in this structure usually have a length close to 1.
|
|
4925
|
+
*/
|
|
4926
|
+
const BLOB_DICTIONARY_THRESHOLD$1 = 5;
|
|
4927
|
+
class HashDictionary extends BlobDictionary {
|
|
4928
|
+
constructor() {
|
|
4929
|
+
super(BLOB_DICTIONARY_THRESHOLD$1);
|
|
4930
|
+
}
|
|
4931
|
+
}
|
|
4496
4932
|
|
|
4497
4933
|
/** A set specialized for storing hashes. */
|
|
4498
4934
|
class HashSet {
|
|
@@ -4942,6 +5378,18 @@ class SortedSet extends SortedArray {
|
|
|
4942
5378
|
}
|
|
4943
5379
|
}
|
|
4944
5380
|
|
|
5381
|
+
function getTruncatedKey(key) {
|
|
5382
|
+
// Always return exactly TRUNCATED_HASH_SIZE bytes.
|
|
5383
|
+
if (key.length === TRUNCATED_HASH_SIZE) {
|
|
5384
|
+
return key;
|
|
5385
|
+
}
|
|
5386
|
+
return Bytes.fromBlob(key.raw.subarray(0, TRUNCATED_HASH_SIZE), TRUNCATED_HASH_SIZE);
|
|
5387
|
+
}
|
|
5388
|
+
/**
|
|
5389
|
+
* A value that indicates when `BlobDictionary` transforms Array nodes into Map nodes.
|
|
5390
|
+
* In practice, it doesn't matter much because, in real life, arrays in this structure usually have a length close to 1.
|
|
5391
|
+
*/
|
|
5392
|
+
const BLOB_DICTIONARY_THRESHOLD = 5;
|
|
4945
5393
|
/**
|
|
4946
5394
|
* A collection of hash-based keys (likely `StateKey`s) which ignores
|
|
4947
5395
|
* differences on the last byte.
|
|
@@ -4954,48 +5402,37 @@ class TruncatedHashDictionary {
|
|
|
4954
5402
|
* Each key will be copied and have the last byte replace with a 0.
|
|
4955
5403
|
*/
|
|
4956
5404
|
static fromEntries(entries) {
|
|
4957
|
-
|
|
4958
|
-
const mapped = Array.from(entries).map(([key, value]) => {
|
|
4959
|
-
const newKey = Bytes.zero(HASH_SIZE).asOpaque();
|
|
4960
|
-
newKey.raw.set(key.raw.subarray(0, TRUNCATED_HASH_SIZE));
|
|
4961
|
-
return [newKey, value];
|
|
4962
|
-
});
|
|
4963
|
-
return new TruncatedHashDictionary(HashDictionary.fromEntries(mapped));
|
|
5405
|
+
return new TruncatedHashDictionary(BlobDictionary.fromEntries(Array.from(entries).map(([key, value]) => [getTruncatedKey(key), value]), BLOB_DICTIONARY_THRESHOLD));
|
|
4964
5406
|
}
|
|
4965
|
-
/** A truncated key which we re-use to query the dictionary. */
|
|
4966
|
-
truncatedKey = Bytes.zero(HASH_SIZE).asOpaque();
|
|
4967
5407
|
constructor(dict) {
|
|
4968
5408
|
this.dict = dict;
|
|
4969
5409
|
}
|
|
4970
5410
|
[TEST_COMPARE_USING]() {
|
|
4971
|
-
return this.dict;
|
|
5411
|
+
return Array.from(this.dict);
|
|
4972
5412
|
}
|
|
4973
5413
|
/** Return number of items in the dictionary. */
|
|
4974
5414
|
get size() {
|
|
4975
5415
|
return this.dict.size;
|
|
4976
5416
|
}
|
|
4977
5417
|
/** Retrieve a value that matches the key on `TRUNCATED_HASH_SIZE`. */
|
|
4978
|
-
get(
|
|
4979
|
-
|
|
4980
|
-
return this.dict.get(
|
|
5418
|
+
get(key) {
|
|
5419
|
+
const truncatedKey = getTruncatedKey(key);
|
|
5420
|
+
return this.dict.get(truncatedKey);
|
|
4981
5421
|
}
|
|
4982
5422
|
/** Return true if the key is present in the dictionary */
|
|
4983
|
-
has(
|
|
4984
|
-
|
|
4985
|
-
return this.dict.has(
|
|
5423
|
+
has(key) {
|
|
5424
|
+
const truncatedKey = getTruncatedKey(key);
|
|
5425
|
+
return this.dict.has(truncatedKey);
|
|
4986
5426
|
}
|
|
4987
5427
|
/** Set or update a value that matches the key on `TRUNCATED_HASH_SIZE`. */
|
|
4988
|
-
set(
|
|
4989
|
-
|
|
4990
|
-
|
|
4991
|
-
const key = Bytes.zero(HASH_SIZE);
|
|
4992
|
-
key.raw.set(fullKey.raw.subarray(0, TRUNCATED_HASH_SIZE));
|
|
4993
|
-
this.dict.set(key.asOpaque(), value);
|
|
5428
|
+
set(key, value) {
|
|
5429
|
+
const truncatedKey = getTruncatedKey(key);
|
|
5430
|
+
this.dict.set(truncatedKey, value);
|
|
4994
5431
|
}
|
|
4995
5432
|
/** Remove a value that matches the key on `TRUNCATED_HASH_SIZE`. */
|
|
4996
|
-
delete(
|
|
4997
|
-
|
|
4998
|
-
this.dict.delete(
|
|
5433
|
+
delete(key) {
|
|
5434
|
+
const truncatedKey = getTruncatedKey(key);
|
|
5435
|
+
this.dict.delete(truncatedKey);
|
|
4999
5436
|
}
|
|
5000
5437
|
/** Iterator over values of the dictionary. */
|
|
5001
5438
|
values() {
|
|
@@ -5003,9 +5440,7 @@ class TruncatedHashDictionary {
|
|
|
5003
5440
|
}
|
|
5004
5441
|
/** Iterator over entries of the dictionary (with truncated keys) */
|
|
5005
5442
|
*entries() {
|
|
5006
|
-
|
|
5007
|
-
yield [Bytes.fromBlob(key.raw.subarray(0, TRUNCATED_HASH_SIZE), TRUNCATED_HASH_SIZE).asOpaque(), value];
|
|
5008
|
-
}
|
|
5443
|
+
yield* this.dict.entries();
|
|
5009
5444
|
}
|
|
5010
5445
|
[Symbol.iterator]() {
|
|
5011
5446
|
return this.entries();
|
|
@@ -5015,14 +5450,18 @@ class TruncatedHashDictionary {
|
|
|
5015
5450
|
var index$n = /*#__PURE__*/Object.freeze({
|
|
5016
5451
|
__proto__: null,
|
|
5017
5452
|
ArrayView: ArrayView,
|
|
5453
|
+
BlobDictionary: BlobDictionary,
|
|
5018
5454
|
FixedSizeArray: FixedSizeArray,
|
|
5019
5455
|
HashDictionary: HashDictionary,
|
|
5020
5456
|
HashSet: HashSet,
|
|
5457
|
+
ListChildren: ListChildren,
|
|
5021
5458
|
MultiMap: MultiMap,
|
|
5022
5459
|
SortedArray: SortedArray,
|
|
5023
5460
|
SortedSet: SortedSet,
|
|
5461
|
+
StringHashDictionary: StringHashDictionary,
|
|
5024
5462
|
TruncatedHashDictionary: TruncatedHashDictionary,
|
|
5025
|
-
asKnownSize: asKnownSize
|
|
5463
|
+
asKnownSize: asKnownSize,
|
|
5464
|
+
bytesAsU48: bytesAsU48
|
|
5026
5465
|
});
|
|
5027
5466
|
|
|
5028
5467
|
/**
|